From 718e8e1edb537aca984216be39b3be5c8af4e6da Mon Sep 17 00:00:00 2001 From: Alex Gleason Date: Sun, 16 May 2021 21:39:58 -0500 Subject: [PATCH 01/93] Create NsfwApiPolicy --- config/config.exs | 7 + .../web/activity_pub/mrf/nsfw_api_policy.ex | 185 ++++++++++++++++++ 2 files changed, 192 insertions(+) create mode 100644 lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex diff --git a/config/config.exs b/config/config.exs index 66aee3264..d96dc1646 100644 --- a/config/config.exs +++ b/config/config.exs @@ -404,6 +404,13 @@ threshold: 604_800, actions: [:delist, :strip_followers] +config :pleroma, :mrf_nsfw_api, + url: "http://127.0.0.1:5000/", + threshold: 0.7, + mark_sensitive: true, + unlist: false, + reject: false + config :pleroma, :rich_media, enabled: true, ignore_hosts: [], diff --git a/lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex b/lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex new file mode 100644 index 000000000..9ad175b1b --- /dev/null +++ b/lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex @@ -0,0 +1,185 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ActivityPub.MRF.NsfwApiPolicy do + @moduledoc """ + Hide, delete, or mark sensitive NSFW content with artificial intelligence. + + Requires a NSFW API server, configured like so: + + config :pleroma, Pleroma.Web.ActivityPub.MRF.NsfwMRF, + url: "http://127.0.0.1:5000/", + threshold: 0.8, + mark_sensitive: true, + unlist: false, + reject: false + + The NSFW API server must implement an HTTP endpoint like this: + + curl http://localhost:5000/?url=https://fedi.com/images/001.jpg + + Returning a response like this: + + {"score", 0.314} + + Where a score is 0-1, with `1` being definitely NSFW. + + A good API server is here: https://github.com/EugenCepoi/nsfw_api + You can run it with Docker with a one-liner: + + docker run -it -p 127.0.0.1:5000:5000/tcp --env PORT=5000 eugencepoi/nsfw_api:latest + + Options: + + - `url`: Base URL of the API server. Default: "http://127.0.0.1:5000/" + - `threshold`: Lowest score to take action on. Default: `0.7` + - `mark_sensitive`: Mark sensitive all detected NSFW content? Default: `true` + - `unlist`: Unlist all detected NSFW content? Default: `false` + - `reject`: Reject all detected NSFW content (takes precedence)? Default: `false` + """ + alias Pleroma.Config + alias Pleroma.Constants + alias Pleroma.HTTP + alias Pleroma.User + + require Logger + require Pleroma.Constants + + @behaviour Pleroma.Web.ActivityPub.MRF + @policy :mrf_nsfw_api + + defp build_request_url(url) do + Config.get([@policy, :url]) + |> URI.parse() + |> Map.put(:query, "url=#{url}") + |> URI.to_string() + end + + defp parse_url(url) do + request = build_request_url(url) + + with {:ok, %Tesla.Env{body: body}} <- HTTP.get(request) do + Jason.decode(body) + else + error -> + Logger.warn(""" + [NsfwApiPolicy]: The API server failed. Skipping. + #{inspect(error)} + """) + + error + end + end + + defp check_url_nsfw(url) when is_binary(url) do + threshold = Config.get([@policy, :threshold]) + + case parse_url(url) do + {:ok, %{"score" => score}} when score >= threshold -> + {:nsfw, %{url: url, score: score, threshold: threshold}} + + _ -> + {:sfw, url} + end + end + + defp check_url_nsfw(%{"href" => url}) when is_binary(url) do + check_url_nsfw(url) + end + + defp check_attachment_nsfw(%{"url" => urls} = attachment) when is_list(urls) do + if Enum.all?(urls, &match?({:sfw, _}, check_url_nsfw(&1))) do + {:sfw, attachment} + else + {:nsfw, attachment} + end + end + + defp check_object_nsfw(%{"attachment" => attachments} = object) when is_list(attachments) do + if Enum.all?(attachments, &match?({:sfw, _}, check_attachment_nsfw(&1))) do + {:sfw, object} + else + {:nsfw, object} + end + end + + defp check_object_nsfw(%{"object" => %{} = child_object} = object) do + case check_object_nsfw(child_object) do + {:sfw, _} -> {:sfw, object} + {:nsfw, _} -> {:nsfw, object} + end + end + + defp check_object_nsfw(object), do: {:sfw, object} + + @impl true + def filter(object) do + with {:sfw, object} <- check_object_nsfw(object) do + {:ok, object} + else + {:nsfw, _data} -> handle_nsfw(object) + _ -> {:reject, "NSFW: Attachment rejected"} + end + end + + defp handle_nsfw(object) do + if Config.get([@policy, :reject]) do + {:reject, object} + else + {:ok, + object + |> maybe_unlist() + |> maybe_mark_sensitive()} + end + end + + defp maybe_unlist(object) do + if Config.get([@policy, :unlist]) do + unlist(object) + else + object + end + end + + defp maybe_mark_sensitive(object) do + if Config.get([@policy, :mark_sensitive]) do + mark_sensitive(object) + else + object + end + end + + defp unlist(%{"to" => to, "cc" => cc, "actor" => actor} = object) do + with %User{} = user <- User.get_cached_by_ap_id(actor) do + to = + [user.follower_address | to] + |> List.delete(Constants.as_public()) + |> Enum.uniq() + + cc = + [Constants.as_public() | cc] + |> List.delete(user.follower_address) + |> Enum.uniq() + + object + |> Map.put("to", to) + |> Map.put("cc", cc) + end + end + + defp mark_sensitive(%{"object" => child_object} = object) when is_map(child_object) do + Map.put(object, "object", mark_sensitive(child_object)) + end + + defp mark_sensitive(object) when is_map(object) do + tags = (object["tag"] || []) ++ ["nsfw"] + + object + |> Map.put("tag", tags) + |> Map.put("sensitive", true) + end + + @impl true + def describe, do: {:ok, %{}} +end From f15d419062b5f9aba2a2e84257dc2379b44f92e8 Mon Sep 17 00:00:00 2001 From: Alex Gleason Date: Wed, 16 Jun 2021 22:30:18 -0500 Subject: [PATCH 02/93] NsfwApiPolicy: raise if can't fetch user --- lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex b/lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex index 9ad175b1b..63e6af0a0 100644 --- a/lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex @@ -165,6 +165,8 @@ defp unlist(%{"to" => to, "cc" => cc, "actor" => actor} = object) do object |> Map.put("to", to) |> Map.put("cc", cc) + else + _ -> raise "[NsfwApiPolicy]: Could not fetch user #{actor}" end end From 2b3dfbb42f7ec0c5604876276a81d55a05955416 Mon Sep 17 00:00:00 2001 From: Alex Gleason Date: Thu, 17 Jun 2021 14:36:51 -0500 Subject: [PATCH 03/93] NsfwApiPolicy: add tests --- .../web/activity_pub/mrf/nsfw_api_policy.ex | 45 ++- .../activity_pub/mrf/nsfw_api_policy_test.exs | 267 ++++++++++++++++++ 2 files changed, 299 insertions(+), 13 deletions(-) create mode 100644 test/pleroma/web/activity_pub/mrf/nsfw_api_policy_test.exs diff --git a/lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex b/lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex index 63e6af0a0..9dcdf560e 100644 --- a/lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex @@ -49,14 +49,15 @@ defmodule Pleroma.Web.ActivityPub.MRF.NsfwApiPolicy do @behaviour Pleroma.Web.ActivityPub.MRF @policy :mrf_nsfw_api - defp build_request_url(url) do + def build_request_url(url) do Config.get([@policy, :url]) |> URI.parse() + |> fix_path() |> Map.put(:query, "url=#{url}") |> URI.to_string() end - defp parse_url(url) do + def parse_url(url) do request = build_request_url(url) with {:ok, %Tesla.Env{body: body}} <- HTTP.get(request) do @@ -72,23 +73,26 @@ defp parse_url(url) do end end - defp check_url_nsfw(url) when is_binary(url) do + def check_url_nsfw(url) when is_binary(url) do threshold = Config.get([@policy, :threshold]) case parse_url(url) do {:ok, %{"score" => score}} when score >= threshold -> {:nsfw, %{url: url, score: score, threshold: threshold}} + {:ok, %{"score" => score}} -> + {:sfw, %{url: url, score: score, threshold: threshold}} + _ -> - {:sfw, url} + {:sfw, %{url: url, score: nil, threshold: threshold}} end end - defp check_url_nsfw(%{"href" => url}) when is_binary(url) do + def check_url_nsfw(%{"href" => url}) when is_binary(url) do check_url_nsfw(url) end - defp check_attachment_nsfw(%{"url" => urls} = attachment) when is_list(urls) do + def check_attachment_nsfw(%{"url" => urls} = attachment) when is_list(urls) do if Enum.all?(urls, &match?({:sfw, _}, check_url_nsfw(&1))) do {:sfw, attachment} else @@ -96,7 +100,14 @@ defp check_attachment_nsfw(%{"url" => urls} = attachment) when is_list(urls) do end end - defp check_object_nsfw(%{"attachment" => attachments} = object) when is_list(attachments) do + def check_attachment_nsfw(%{"url" => url} = attachment) when is_binary(url) do + case check_url_nsfw(url) do + {:sfw, _} -> {:sfw, attachment} + {:nsfw, _} -> {:nsfw, attachment} + end + end + + def check_object_nsfw(%{"attachment" => attachments} = object) when is_list(attachments) do if Enum.all?(attachments, &match?({:sfw, _}, check_attachment_nsfw(&1))) do {:sfw, object} else @@ -104,14 +115,14 @@ defp check_object_nsfw(%{"attachment" => attachments} = object) when is_list(att end end - defp check_object_nsfw(%{"object" => %{} = child_object} = object) do + def check_object_nsfw(%{"object" => %{} = child_object} = object) do case check_object_nsfw(child_object) do {:sfw, _} -> {:sfw, object} {:nsfw, _} -> {:nsfw, object} end end - defp check_object_nsfw(object), do: {:sfw, object} + def check_object_nsfw(object), do: {:sfw, object} @impl true def filter(object) do @@ -150,7 +161,7 @@ defp maybe_mark_sensitive(object) do end end - defp unlist(%{"to" => to, "cc" => cc, "actor" => actor} = object) do + def unlist(%{"to" => to, "cc" => cc, "actor" => actor} = object) do with %User{} = user <- User.get_cached_by_ap_id(actor) do to = [user.follower_address | to] @@ -166,15 +177,15 @@ defp unlist(%{"to" => to, "cc" => cc, "actor" => actor} = object) do |> Map.put("to", to) |> Map.put("cc", cc) else - _ -> raise "[NsfwApiPolicy]: Could not fetch user #{actor}" + _ -> raise "[NsfwApiPolicy]: Could not find user #{actor}" end end - defp mark_sensitive(%{"object" => child_object} = object) when is_map(child_object) do + def mark_sensitive(%{"object" => child_object} = object) when is_map(child_object) do Map.put(object, "object", mark_sensitive(child_object)) end - defp mark_sensitive(object) when is_map(object) do + def mark_sensitive(object) when is_map(object) do tags = (object["tag"] || []) ++ ["nsfw"] object @@ -182,6 +193,14 @@ defp mark_sensitive(object) when is_map(object) do |> Map.put("sensitive", true) end + # Hackney needs a trailing slash + defp fix_path(%URI{path: path} = uri) when is_binary(path) do + path = String.trim_trailing(path, "/") <> "/" + Map.put(uri, :path, path) + end + + defp fix_path(%URI{path: nil} = uri), do: Map.put(uri, :path, "/") + @impl true def describe, do: {:ok, %{}} end diff --git a/test/pleroma/web/activity_pub/mrf/nsfw_api_policy_test.exs b/test/pleroma/web/activity_pub/mrf/nsfw_api_policy_test.exs new file mode 100644 index 000000000..0beb9c2cb --- /dev/null +++ b/test/pleroma/web/activity_pub/mrf/nsfw_api_policy_test.exs @@ -0,0 +1,267 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ActivityPub.MRF.NsfwApiPolicyTest do + use Pleroma.DataCase + + import ExUnit.CaptureLog + import Pleroma.Factory + + alias Pleroma.Constants + alias Pleroma.Web.ActivityPub.MRF.NsfwApiPolicy + + require Pleroma.Constants + + @policy :mrf_nsfw_api + + @sfw_url "https://kittens.co/kitty.gif" + @nsfw_url "https://b00bies.com/nsfw.jpg" + @timeout_url "http://time.out/i.jpg" + + setup_all do + clear_config(@policy, + url: "http://127.0.0.1:5000/", + threshold: 0.7, + mark_sensitive: true, + unlist: false, + reject: false + ) + end + + setup do + Tesla.Mock.mock(fn + # NSFW URL + %{method: :get, url: "http://127.0.0.1:5000/?url=#{@nsfw_url}"} -> + %Tesla.Env{status: 200, body: ~s({"score":0.99772077798843384,"url":"#{@nsfw_url}"})} + + # SFW URL + %{method: :get, url: "http://127.0.0.1:5000/?url=#{@sfw_url}"} -> + %Tesla.Env{status: 200, body: ~s({"score":0.00011714912398019806,"url":"#{@sfw_url}"})} + + # Timeout URL + %{method: :get, url: "http://127.0.0.1:5000/?url=#{@timeout_url}"} -> + {:error, :timeout} + + # Fallback URL + %{method: :get, url: "http://127.0.0.1:5000/?url=" <> url} -> + body = + ~s({"error_code":500,"error_reason":"[Errno -2] Name or service not known","url":"#{url}"}) + + %Tesla.Env{status: 500, body: body} + end) + + :ok + end + + describe "build_request_url/1" do + test "it works" do + expected = "http://127.0.0.1:5000/?url=https://b00bies.com/nsfw.jpg" + assert NsfwApiPolicy.build_request_url(@nsfw_url) == expected + end + + test "it adds a trailing slash" do + clear_config([@policy, :url], "http://localhost:5000") + + expected = "http://localhost:5000/?url=https://b00bies.com/nsfw.jpg" + assert NsfwApiPolicy.build_request_url(@nsfw_url) == expected + end + + test "it adds a trailing slash preserving the path" do + clear_config([@policy, :url], "http://localhost:5000/nsfw_api") + + expected = "http://localhost:5000/nsfw_api/?url=https://b00bies.com/nsfw.jpg" + assert NsfwApiPolicy.build_request_url(@nsfw_url) == expected + end + end + + describe "parse_url/1" do + test "returns decoded JSON from the API server" do + expected = %{"score" => 0.99772077798843384, "url" => @nsfw_url} + assert NsfwApiPolicy.parse_url(@nsfw_url) == {:ok, expected} + end + + test "warns when the API server fails" do + expected = "[NsfwApiPolicy]: The API server failed. Skipping." + assert capture_log(fn -> NsfwApiPolicy.parse_url(@timeout_url) end) =~ expected + end + + test "returns {:error, _} tuple when the API server fails" do + capture_log(fn -> + assert {:error, _} = NsfwApiPolicy.parse_url(@timeout_url) + end) + end + end + + describe "check_url_nsfw/1" do + test "returns {:nsfw, _} tuple" do + expected = {:nsfw, %{url: @nsfw_url, score: 0.99772077798843384, threshold: 0.7}} + assert NsfwApiPolicy.check_url_nsfw(@nsfw_url) == expected + end + + test "returns {:sfw, _} tuple" do + expected = {:sfw, %{url: @sfw_url, score: 0.00011714912398019806, threshold: 0.7}} + assert NsfwApiPolicy.check_url_nsfw(@sfw_url) == expected + end + + test "returns {:sfw, _} on failure" do + expected = {:sfw, %{url: @timeout_url, score: nil, threshold: 0.7}} + + capture_log(fn -> + assert NsfwApiPolicy.check_url_nsfw(@timeout_url) == expected + end) + end + + test "works with map URL" do + expected = {:nsfw, %{url: @nsfw_url, score: 0.99772077798843384, threshold: 0.7}} + assert NsfwApiPolicy.check_url_nsfw(%{"href" => @nsfw_url}) == expected + end + end + + describe "check_attachment_nsfw/1" do + test "returns {:nsfw, _} if any items are NSFW" do + attachment = %{"url" => [%{"href" => @nsfw_url}, @nsfw_url, @sfw_url]} + assert NsfwApiPolicy.check_attachment_nsfw(attachment) == {:nsfw, attachment} + end + + test "returns {:sfw, _} if all items are SFW" do + attachment = %{"url" => [%{"href" => @sfw_url}, @sfw_url, @sfw_url]} + assert NsfwApiPolicy.check_attachment_nsfw(attachment) == {:sfw, attachment} + end + + test "works with binary URL" do + attachment = %{"url" => @nsfw_url} + assert NsfwApiPolicy.check_attachment_nsfw(attachment) == {:nsfw, attachment} + end + end + + describe "check_object_nsfw/1" do + test "returns {:nsfw, _} if any items are NSFW" do + object = %{"attachment" => [%{"url" => [%{"href" => @nsfw_url}, @sfw_url]}]} + assert NsfwApiPolicy.check_object_nsfw(object) == {:nsfw, object} + end + + test "returns {:sfw, _} if all items are SFW" do + object = %{"attachment" => [%{"url" => [%{"href" => @sfw_url}, @sfw_url]}]} + assert NsfwApiPolicy.check_object_nsfw(object) == {:sfw, object} + end + + test "works with embedded object" do + object = %{"object" => %{"attachment" => [%{"url" => [%{"href" => @nsfw_url}, @sfw_url]}]}} + assert NsfwApiPolicy.check_object_nsfw(object) == {:nsfw, object} + end + end + + describe "unlist/1" do + test "unlist addressing" do + user = insert(:user) + + object = %{ + "to" => [Constants.as_public()], + "cc" => [user.follower_address, "https://hello.world/users/alex"], + "actor" => user.ap_id + } + + expected = %{ + "to" => [user.follower_address], + "cc" => [Constants.as_public(), "https://hello.world/users/alex"], + "actor" => user.ap_id + } + + assert NsfwApiPolicy.unlist(object) == expected + end + + test "raise if user isn't found" do + object = %{ + "to" => [Constants.as_public()], + "cc" => [], + "actor" => "https://hello.world/users/alex" + } + + assert_raise(RuntimeError, fn -> + NsfwApiPolicy.unlist(object) + end) + end + end + + describe "mark_sensitive/1" do + test "adds nsfw tag and marks sensitive" do + object = %{"tag" => ["yolo"]} + expected = %{"tag" => ["yolo", "nsfw"], "sensitive" => true} + assert NsfwApiPolicy.mark_sensitive(object) == expected + end + + test "works with embedded object" do + object = %{"object" => %{"tag" => ["yolo"]}} + expected = %{"object" => %{"tag" => ["yolo", "nsfw"], "sensitive" => true}} + assert NsfwApiPolicy.mark_sensitive(object) == expected + end + end + + describe "filter/1" do + setup do + user = insert(:user) + + nsfw_object = %{ + "to" => [Constants.as_public()], + "cc" => [user.follower_address], + "actor" => user.ap_id, + "attachment" => [%{"url" => @nsfw_url}] + } + + sfw_object = %{ + "to" => [Constants.as_public()], + "cc" => [user.follower_address], + "actor" => user.ap_id, + "attachment" => [%{"url" => @sfw_url}] + } + + %{user: user, nsfw_object: nsfw_object, sfw_object: sfw_object} + end + + test "passes SFW object through", %{sfw_object: object} do + {:ok, _} = NsfwApiPolicy.filter(object) + end + + test "passes NSFW object through when actions are disabled", %{nsfw_object: object} do + clear_config([@policy, :mark_sensitive], false) + clear_config([@policy, :unlist], false) + clear_config([@policy, :reject], false) + {:ok, _} = NsfwApiPolicy.filter(object) + end + + test "passes NSFW object through when :threshold is 1", %{nsfw_object: object} do + clear_config([@policy, :reject], true) + clear_config([@policy, :threshold], 1) + {:ok, _} = NsfwApiPolicy.filter(object) + end + + test "rejects SFW object through when :threshold is 0", %{sfw_object: object} do + clear_config([@policy, :reject], true) + clear_config([@policy, :threshold], 0) + {:reject, _} = NsfwApiPolicy.filter(object) + end + + test "rejects NSFW when :reject is enabled", %{nsfw_object: object} do + clear_config([@policy, :reject], true) + {:reject, _} = NsfwApiPolicy.filter(object) + end + + test "passes NSFW through when :reject is disabled", %{nsfw_object: object} do + clear_config([@policy, :reject], false) + {:ok, _} = NsfwApiPolicy.filter(object) + end + + test "unlists NSFW when :unlist is enabled", %{user: user, nsfw_object: object} do + clear_config([@policy, :unlist], true) + {:ok, object} = NsfwApiPolicy.filter(object) + assert object["to"] == [user.follower_address] + end + + test "passes NSFW through when :unlist is disabled", %{nsfw_object: object} do + clear_config([@policy, :unlist], false) + {:ok, object} = NsfwApiPolicy.filter(object) + assert object["to"] == [Constants.as_public()] + end + end +end From b293c14a1b01398029dfa80aea306946efc2f284 Mon Sep 17 00:00:00 2001 From: Alex Gleason Date: Thu, 17 Jun 2021 14:52:07 -0500 Subject: [PATCH 04/93] NsfwApiPolicy: add describe/0 and config_description/0 --- .../web/activity_pub/mrf/nsfw_api_policy.ex | 56 ++++++++++++++++++- 1 file changed, 54 insertions(+), 2 deletions(-) diff --git a/lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex b/lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex index 9dcdf560e..a1560c584 100644 --- a/lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex @@ -10,7 +10,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.NsfwApiPolicy do config :pleroma, Pleroma.Web.ActivityPub.MRF.NsfwMRF, url: "http://127.0.0.1:5000/", - threshold: 0.8, + threshold: 0.7, mark_sensitive: true, unlist: false, reject: false @@ -202,5 +202,57 @@ defp fix_path(%URI{path: path} = uri) when is_binary(path) do defp fix_path(%URI{path: nil} = uri), do: Map.put(uri, :path, "/") @impl true - def describe, do: {:ok, %{}} + def describe do + options = %{ + threshold: Config.get([@policy, :threshold]), + mark_sensitive: Config.get([@policy, :mark_sensitive]), + unlist: Config.get([@policy, :unlist]), + reject: Config.get([@policy, :reject]) + } + + {:ok, %{@policy => options}} + end + + @impl true + def config_description do + %{ + key: @policy, + related_policy: to_string(__MODULE__), + label: "NSFW API Policy", + description: + "Hide, delete, or mark sensitive NSFW content with artificial intelligence. Requires running an external API server.", + children: [ + %{ + key: :url, + type: :string, + description: "Base URL of the API server.", + suggestions: ["http://127.0.0.1:5000/"] + }, + %{ + key: :threshold, + type: :float, + description: "Lowest score to take action on. Between 0 and 1.", + suggestions: [0.7] + }, + %{ + key: :mark_sensitive, + type: :boolean, + description: "Mark sensitive all detected NSFW content?", + suggestions: [true] + }, + %{ + key: :unlist, + type: :boolean, + description: "Unlist sensitive all detected NSFW content?", + suggestions: [false] + }, + %{ + key: :reject, + type: :boolean, + description: "Reject sensitive all detected NSFW content (takes precedence)?", + suggestions: [false] + } + ] + } + end end From c802c3055ef6c1f763d5df68f9e5308093f7d565 Mon Sep 17 00:00:00 2001 From: Alex Gleason Date: Thu, 17 Jun 2021 15:04:40 -0500 Subject: [PATCH 05/93] NsfwApiPolicy: add systemd example file --- installation/nsfw-api.service | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 installation/nsfw-api.service diff --git a/installation/nsfw-api.service b/installation/nsfw-api.service new file mode 100644 index 000000000..ec629df67 --- /dev/null +++ b/installation/nsfw-api.service @@ -0,0 +1,15 @@ +[Unit] +Description=NSFW API +After=docker.service +Requires=docker.service + +[Service] +TimeoutStartSec=0 +Restart=always +ExecStartPre=-/usr/bin/docker stop %n +ExecStartPre=-/usr/bin/docker rm %n +ExecStartPre=/usr/bin/docker pull eugencepoi/nsfw_api:latest +ExecStart=/usr/bin/docker run --rm -p 127.0.0.1:5000:5000/tcp --env PORT=5000 --name %n eugencepoi/nsfw_api:latest + +[Install] +WantedBy=multi-user.target From a704d5499c03cb5609ea38a5f2ef06095ced3ef3 Mon Sep 17 00:00:00 2001 From: Alex Gleason Date: Thu, 17 Jun 2021 15:32:42 -0500 Subject: [PATCH 06/93] NsfwApiPolicy: Fall back more generously when functions don't match --- lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex b/lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex index a1560c584..920821f38 100644 --- a/lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex @@ -92,6 +92,11 @@ def check_url_nsfw(%{"href" => url}) when is_binary(url) do check_url_nsfw(url) end + def check_url_nsfw(url) do + threshold = Config.get([@policy, :threshold]) + {:sfw, %{url: url, score: nil, threshold: threshold}} + end + def check_attachment_nsfw(%{"url" => urls} = attachment) when is_list(urls) do if Enum.all?(urls, &match?({:sfw, _}, check_url_nsfw(&1))) do {:sfw, attachment} @@ -107,6 +112,8 @@ def check_attachment_nsfw(%{"url" => url} = attachment) when is_binary(url) do end end + def check_attachment_nsfw(attachment), do: {:sfw, attachment} + def check_object_nsfw(%{"attachment" => attachments} = object) when is_list(attachments) do if Enum.all?(attachments, &match?({:sfw, _}, check_attachment_nsfw(&1))) do {:sfw, object} From fa2a6d5d6b24657ddbda4ef11d2e6dbcb59545d3 Mon Sep 17 00:00:00 2001 From: Claudio Maradonna Date: Thu, 7 Apr 2022 18:25:02 +0200 Subject: [PATCH 07/93] feat: simple, but not stupid, uploader for IPFS fix: format fix with credo --- config/config.exs | 4 +++ config/description.exs | 24 +++++++++++++ config/dev.exs | 4 +++ lib/pleroma/upload.ex | 13 +++++-- lib/pleroma/uploaders/ipfs.ex | 64 +++++++++++++++++++++++++++++++++++ 5 files changed, 107 insertions(+), 2 deletions(-) create mode 100644 lib/pleroma/uploaders/ipfs.ex diff --git a/config/config.exs b/config/config.exs index 6a5acda09..7efad0061 100644 --- a/config/config.exs +++ b/config/config.exs @@ -82,6 +82,10 @@ # region: "us-east-1", # may be required for Amazon AWS scheme: "https://" +config :pleroma, Pleroma.Uploaders.IPFS, + post_gateway_url: nil, + get_gateway_url: nil + config :pleroma, :emoji, shortcode_globs: ["/emoji/custom/**/*.png"], pack_extensions: [".png", ".gif"], diff --git a/config/description.exs b/config/description.exs index 7caad18b4..d87bbb9b8 100644 --- a/config/description.exs +++ b/config/description.exs @@ -136,6 +136,30 @@ } ] }, + %{ + group: :pleroma, + key: Pleroma.Uploaders.IPFS, + type: :group, + description: "IPFS uploader-related settings", + children: [ + %{ + key: :get_gateway_url, + type: :string, + description: "GET Gateway URL", + suggestions: [ + "get_gateway_url" + ] + }, + %{ + key: :post_gateway_url, + type: :string, + description: "POST Gateway URL", + suggestions: [ + "post_gateway_url" + ] + } + ] + }, %{ group: :pleroma, key: Pleroma.Uploaders.S3, diff --git a/config/dev.exs b/config/dev.exs index ab3e83c12..89a84bf05 100644 --- a/config/dev.exs +++ b/config/dev.exs @@ -58,6 +58,10 @@ # https://dashbit.co/blog/speeding-up-re-compilation-of-elixir-projects config :phoenix, :plug_init_mode, :runtime +config :pleroma, Pleroma.Uploaders.IPFS, + post_gateway_url: nil, + get_gateway_url: nil + if File.exists?("./config/dev.secret.exs") do import_config "dev.secret.exs" else diff --git a/lib/pleroma/upload.ex b/lib/pleroma/upload.ex index db2909276..de39bcd6c 100644 --- a/lib/pleroma/upload.ex +++ b/lib/pleroma/upload.ex @@ -235,8 +235,14 @@ defp url_from_spec(%__MODULE__{name: name}, base_url, {:file, path}) do "" end - [base_url, path] - |> Path.join() + uploader = Config.get([Pleroma.Upload, :uploader]) + + if uploader == Pleroma.Uploaders.IPFS && String.contains?(base_url, "{CID}") do + String.replace(base_url, "{CID}", path) + else + [base_url, path] + |> Path.join() + end end defp url_from_spec(_upload, _base_url, {:url, url}), do: url @@ -273,6 +279,9 @@ def base_url do Path.join([upload_base_url, bucket_with_namespace]) end + Pleroma.Uploaders.IPFS -> + Config.get([Pleroma.Uploaders.IPFS, :get_gateway_url]) + _ -> public_endpoint || upload_base_url || Pleroma.Web.Endpoint.url() <> "/media/" end diff --git a/lib/pleroma/uploaders/ipfs.ex b/lib/pleroma/uploaders/ipfs.ex new file mode 100644 index 000000000..b46e9322e --- /dev/null +++ b/lib/pleroma/uploaders/ipfs.ex @@ -0,0 +1,64 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2022 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Uploaders.IPFS do + @behaviour Pleroma.Uploaders.Uploader + require Logger + + alias Pleroma.Config + alias Tesla.Multipart + + @impl true + def get_file(file) do + b_url = Pleroma.Upload.base_url() + + if String.contains?(b_url, "{CID}") do + {:ok, {:url, String.replace(b_url, "{CID}", URI.decode(file))}} + else + {:error, "IPFS Get URL doesn't contain '{CID}' placeholder"} + end + end + + @impl true + def put_file(%Pleroma.Upload{} = upload) do + config = Config.get([__MODULE__]) + post_base_url = Keyword.get(config, :post_gateway_url) + + mp = + Multipart.new() + |> Multipart.add_content_type_param("charset=utf-8") + |> Multipart.add_file(upload.tempfile) + + final_url = Path.join([post_base_url, "/api/v0/add"]) + + case Pleroma.HTTP.post(final_url, mp, [], params: ["cid-version": "1"]) do + {:ok, ret} -> + case Jason.decode(ret.body) do + {:ok, ret} -> + {:ok, {:file, ret["Hash"]}} + + error -> + Logger.error("#{__MODULE__}: #{inspect(error)}") + {:error, "JSON decode failed"} + end + + error -> + Logger.error("#{__MODULE__}: #{inspect(error)}") + {:error, "IPFS Gateway Upload failed"} + end + end + + @impl true + def delete_file(file) do + config = Config.get([__MODULE__]) + post_base_url = Keyword.get(config, :post_gateway_url) + + final_url = Path.join([post_base_url, "/api/v0/files/rm"]) + + case Pleroma.HTTP.post(final_url, "", [], params: [arg: file]) do + {:ok, %{status_code: 204}} -> :ok + error -> {:error, inspect(error)} + end + end +end From 43dfa58ebda407a0813d398bee8d0ae3e5c9fd5b Mon Sep 17 00:00:00 2001 From: Claudio Maradonna Date: Mon, 11 Apr 2022 15:10:01 +0200 Subject: [PATCH 08/93] added tests for ipfs uploader. adapted changelog.md accordingly. improved ipfs uploader with external suggestions fix lint description.exs --- CHANGELOG.md | 1 + config/description.exs | 5 +- config/dev.exs | 4 -- docs/configuration/cheatsheet.md | 13 ++++ lib/pleroma/upload.ex | 6 +- lib/pleroma/uploaders/ipfs.ex | 14 +++-- test/pleroma/uploaders/ipfs_test.exs | 88 ++++++++++++++++++++++++++++ 7 files changed, 116 insertions(+), 15 deletions(-) create mode 100644 test/pleroma/uploaders/ipfs_test.exs diff --git a/CHANGELOG.md b/CHANGELOG.md index f1beb0cd0..b2185b1ba 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,6 +32,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - PleromaAPI: Add `GET /api/v1/pleroma/birthdays` API endpoint - Make backend-rendered pages translatable. This includes emails. Pages returned as a HTTP response are translated using the language specified in the `userLanguage` cookie, or the `Accept-Language` header. Emails are translated using the `language` field when registering. This language can be changed by `PATCH /api/v1/accounts/update_credentials` with the `language` field. - Uploadfilter `Pleroma.Upload.Filter.Exiftool.ReadDescription` returns description values to the FE so they can pre fill the image description field +- Uploader: Add support for uploading attachments using IPFS ### Fixed - Subscription(Bell) Notifications: Don't create from Pipeline Ingested replies diff --git a/config/description.exs b/config/description.exs index d87bbb9b8..1fe5f01f0 100644 --- a/config/description.exs +++ b/config/description.exs @@ -147,7 +147,8 @@ type: :string, description: "GET Gateway URL", suggestions: [ - "get_gateway_url" + "https://ipfs.mydomain.com/<%= cid %>", + "https://<%= cid %>.ipfs.mydomain.com/" ] }, %{ @@ -155,7 +156,7 @@ type: :string, description: "POST Gateway URL", suggestions: [ - "post_gateway_url" + "http://localhost:5001/" ] } ] diff --git a/config/dev.exs b/config/dev.exs index 89a84bf05..ab3e83c12 100644 --- a/config/dev.exs +++ b/config/dev.exs @@ -58,10 +58,6 @@ # https://dashbit.co/blog/speeding-up-re-compilation-of-elixir-projects config :phoenix, :plug_init_mode, :runtime -config :pleroma, Pleroma.Uploaders.IPFS, - post_gateway_url: nil, - get_gateway_url: nil - if File.exists?("./config/dev.secret.exs") do import_config "dev.secret.exs" else diff --git a/docs/configuration/cheatsheet.md b/docs/configuration/cheatsheet.md index 74642397b..7e1f9c934 100644 --- a/docs/configuration/cheatsheet.md +++ b/docs/configuration/cheatsheet.md @@ -614,6 +614,19 @@ config :ex_aws, :s3, host: "s3.eu-central-1.amazonaws.com" ``` +#### Pleroma.Uploaders.IPFS + +* `post_gateway_url`: URL with port of POST Gateway (unauthenticated) +* `get_gateway_url`: URL of public GET Gateway + +Example: + +```elixir +config :pleroma, Pleroma.Uploaders.IPFS, + post_gateway_url: "http://localhost:5001", + get_gateway_url: "http://<%= cid %>.ipfs.mydomain.com" +``` + ### Upload filters #### Pleroma.Upload.Filter.AnonymizeFilename diff --git a/lib/pleroma/upload.ex b/lib/pleroma/upload.ex index de39bcd6c..b51d23f9e 100644 --- a/lib/pleroma/upload.ex +++ b/lib/pleroma/upload.ex @@ -235,10 +235,8 @@ defp url_from_spec(%__MODULE__{name: name}, base_url, {:file, path}) do "" end - uploader = Config.get([Pleroma.Upload, :uploader]) - - if uploader == Pleroma.Uploaders.IPFS && String.contains?(base_url, "{CID}") do - String.replace(base_url, "{CID}", path) + if String.contains?(base_url, "<%= cid %>") do + EEx.eval_string(base_url, cid: path) else [base_url, path] |> Path.join() diff --git a/lib/pleroma/uploaders/ipfs.ex b/lib/pleroma/uploaders/ipfs.ex index b46e9322e..722c68fa1 100644 --- a/lib/pleroma/uploaders/ipfs.ex +++ b/lib/pleroma/uploaders/ipfs.ex @@ -13,10 +13,10 @@ defmodule Pleroma.Uploaders.IPFS do def get_file(file) do b_url = Pleroma.Upload.base_url() - if String.contains?(b_url, "{CID}") do - {:ok, {:url, String.replace(b_url, "{CID}", URI.decode(file))}} + if String.contains?(b_url, "<%= cid %>") do + {:ok, {:url, EEx.eval_string(b_url, cid: URI.decode(file))}} else - {:error, "IPFS Get URL doesn't contain '{CID}' placeholder"} + {:error, "IPFS Get URL doesn't contain 'cid' placeholder"} end end @@ -36,7 +36,11 @@ def put_file(%Pleroma.Upload{} = upload) do {:ok, ret} -> case Jason.decode(ret.body) do {:ok, ret} -> - {:ok, {:file, ret["Hash"]}} + if Map.has_key?(ret, "Hash") do + {:ok, {:file, ret["Hash"]}} + else + {:error, "JSON doesn't contain Hash value"} + end error -> Logger.error("#{__MODULE__}: #{inspect(error)}") @@ -45,7 +49,7 @@ def put_file(%Pleroma.Upload{} = upload) do error -> Logger.error("#{__MODULE__}: #{inspect(error)}") - {:error, "IPFS Gateway Upload failed"} + {:error, "IPFS Gateway upload failed"} end end diff --git a/test/pleroma/uploaders/ipfs_test.exs b/test/pleroma/uploaders/ipfs_test.exs new file mode 100644 index 000000000..f9ae046cf --- /dev/null +++ b/test/pleroma/uploaders/ipfs_test.exs @@ -0,0 +1,88 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2022 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Uploaders.IPFSTest do + use Pleroma.DataCase + + alias Pleroma.Uploaders.IPFS + alias Tesla.Multipart + + import Mock + import ExUnit.CaptureLog + + setup do + clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.IPFS) + clear_config([Pleroma.Uploaders.IPFS]) + + clear_config( + [Pleroma.Uploaders.IPFS, :get_gateway_url], + "https://<%= cid %>.ipfs.mydomain.com" + ) + + clear_config([Pleroma.Uploaders.IPFS, :post_gateway_url], "http://localhost:5001") + end + + describe "get_file/1" do + test "it returns path to ipfs file with cid as subdomain" do + assert IPFS.get_file("testcid") == { + :ok, + {:url, "https://testcid.ipfs.mydomain.com"} + } + end + + test "it returns path to ipfs file with cid as path" do + clear_config( + [Pleroma.Uploaders.IPFS, :get_gateway_url], + "https://ipfs.mydomain.com/ipfs/<%= cid %>" + ) + + assert IPFS.get_file("testcid") == { + :ok, + {:url, "https://ipfs.mydomain.com/ipfs/testcid"} + } + end + end + + describe "put_file/1" do + setup do + file_upload = %Pleroma.Upload{ + name: "image-tet.jpg", + content_type: "image/jpeg", + path: "test_folder/image-tet.jpg", + tempfile: Path.absname("test/instance_static/add/shortcode.png") + } + + [file_upload: file_upload] + end + + test "save file", %{file_upload: file_upload} do + with_mock Pleroma.HTTP, + post: fn _, _, _, _ -> + {:ok, + %Tesla.Env{ + status: 200, + body: "{\"Hash\":\"bafybeicrh7ltzx52yxcwrvxxckfmwhqdgsb6qym6dxqm2a4ymsakeshwoi\"}" + }} + end do + assert IPFS.put_file(file_upload) == + {:ok, {:file, "bafybeicrh7ltzx52yxcwrvxxckfmwhqdgsb6qym6dxqm2a4ymsakeshwoi"}} + end + end + + test "returns error", %{file_upload: file_upload} do + with_mock Pleroma.HTTP, post: fn _, _, _, _ -> {:error, "IPFS Gateway upload failed"} end do + assert capture_log(fn -> + assert IPFS.put_file(file_upload) == {:error, "IPFS Gateway upload failed"} + end) =~ "Elixir.Pleroma.Uploaders.IPFS: {:error, \"IPFS Gateway upload failed\"}" + end + end + end + + describe "delete_file/1" do + test_with_mock "deletes file", Pleroma.HTTP, + post: fn _, _, _, _ -> {:ok, %{status_code: 204}} end do + assert :ok = IPFS.delete_file("image.jpg") + end + end +end From 44659ecd65fb2251f9130fcecf1732b8931104c1 Mon Sep 17 00:00:00 2001 From: Claudio Maradonna Date: Sat, 16 Apr 2022 09:38:49 +0200 Subject: [PATCH 09/93] ipfs: revert to String.replace for cid placeholder ipfs: fix lint --- config/description.exs | 4 ++-- docs/configuration/cheatsheet.md | 2 +- lib/pleroma/upload.ex | 4 ++-- lib/pleroma/uploaders/ipfs.ex | 7 +++++-- test/pleroma/uploaders/ipfs_test.exs | 4 ++-- 5 files changed, 12 insertions(+), 9 deletions(-) diff --git a/config/description.exs b/config/description.exs index 1fe5f01f0..b180e7308 100644 --- a/config/description.exs +++ b/config/description.exs @@ -147,8 +147,8 @@ type: :string, description: "GET Gateway URL", suggestions: [ - "https://ipfs.mydomain.com/<%= cid %>", - "https://<%= cid %>.ipfs.mydomain.com/" + "https://ipfs.mydomain.com/{CID}", + "https://{CID}.ipfs.mydomain.com/" ] }, %{ diff --git a/docs/configuration/cheatsheet.md b/docs/configuration/cheatsheet.md index 7e1f9c934..d35b33574 100644 --- a/docs/configuration/cheatsheet.md +++ b/docs/configuration/cheatsheet.md @@ -624,7 +624,7 @@ Example: ```elixir config :pleroma, Pleroma.Uploaders.IPFS, post_gateway_url: "http://localhost:5001", - get_gateway_url: "http://<%= cid %>.ipfs.mydomain.com" + get_gateway_url: "http://{CID}.ipfs.mydomain.com" ``` ### Upload filters diff --git a/lib/pleroma/upload.ex b/lib/pleroma/upload.ex index b51d23f9e..8a01cf613 100644 --- a/lib/pleroma/upload.ex +++ b/lib/pleroma/upload.ex @@ -235,8 +235,8 @@ defp url_from_spec(%__MODULE__{name: name}, base_url, {:file, path}) do "" end - if String.contains?(base_url, "<%= cid %>") do - EEx.eval_string(base_url, cid: path) + if String.contains?(base_url, Pleroma.Uploaders.IPFS.placeholder()) do + String.replace(base_url, Pleroma.Uploaders.IPFS.placeholder(), path) else [base_url, path] |> Path.join() diff --git a/lib/pleroma/uploaders/ipfs.ex b/lib/pleroma/uploaders/ipfs.ex index 722c68fa1..dde520d8e 100644 --- a/lib/pleroma/uploaders/ipfs.ex +++ b/lib/pleroma/uploaders/ipfs.ex @@ -9,12 +9,15 @@ defmodule Pleroma.Uploaders.IPFS do alias Pleroma.Config alias Tesla.Multipart + @placeholder "{CID}" + def placeholder, do: @placeholder + @impl true def get_file(file) do b_url = Pleroma.Upload.base_url() - if String.contains?(b_url, "<%= cid %>") do - {:ok, {:url, EEx.eval_string(b_url, cid: URI.decode(file))}} + if String.contains?(b_url, @placeholder) do + {:ok, {:url, String.replace(b_url, @placeholder, URI.decode(file))}} else {:error, "IPFS Get URL doesn't contain 'cid' placeholder"} end diff --git a/test/pleroma/uploaders/ipfs_test.exs b/test/pleroma/uploaders/ipfs_test.exs index f9ae046cf..fc87fa378 100644 --- a/test/pleroma/uploaders/ipfs_test.exs +++ b/test/pleroma/uploaders/ipfs_test.exs @@ -17,7 +17,7 @@ defmodule Pleroma.Uploaders.IPFSTest do clear_config( [Pleroma.Uploaders.IPFS, :get_gateway_url], - "https://<%= cid %>.ipfs.mydomain.com" + "https://{CID}.ipfs.mydomain.com" ) clear_config([Pleroma.Uploaders.IPFS, :post_gateway_url], "http://localhost:5001") @@ -34,7 +34,7 @@ test "it returns path to ipfs file with cid as subdomain" do test "it returns path to ipfs file with cid as path" do clear_config( [Pleroma.Uploaders.IPFS, :get_gateway_url], - "https://ipfs.mydomain.com/ipfs/<%= cid %>" + "https://ipfs.mydomain.com/ipfs/{CID}" ) assert IPFS.get_file("testcid") == { From 7c1af86f979ecebcd38995e5278fe2d59a36eda5 Mon Sep 17 00:00:00 2001 From: Claudio Maradonna Date: Mon, 9 May 2022 12:15:40 +0200 Subject: [PATCH 10/93] ipfs: refactor final_url generation. add tests for final_url fix lint --- lib/pleroma/uploaders/ipfs.ex | 19 ++++++++++--------- test/pleroma/uploaders/ipfs_test.exs | 13 ++++++++++++- 2 files changed, 22 insertions(+), 10 deletions(-) diff --git a/lib/pleroma/uploaders/ipfs.ex b/lib/pleroma/uploaders/ipfs.ex index dde520d8e..7a7481d81 100644 --- a/lib/pleroma/uploaders/ipfs.ex +++ b/lib/pleroma/uploaders/ipfs.ex @@ -12,6 +12,13 @@ defmodule Pleroma.Uploaders.IPFS do @placeholder "{CID}" def placeholder, do: @placeholder + def get_final_url(method) do + config = Config.get([__MODULE__]) + post_base_url = Keyword.get(config, :post_gateway_url) + + Path.join([post_base_url, method]) + end + @impl true def get_file(file) do b_url = Pleroma.Upload.base_url() @@ -25,15 +32,12 @@ def get_file(file) do @impl true def put_file(%Pleroma.Upload{} = upload) do - config = Config.get([__MODULE__]) - post_base_url = Keyword.get(config, :post_gateway_url) - mp = Multipart.new() |> Multipart.add_content_type_param("charset=utf-8") |> Multipart.add_file(upload.tempfile) - final_url = Path.join([post_base_url, "/api/v0/add"]) + final_url = get_final_url("/api/v0/add") case Pleroma.HTTP.post(final_url, mp, [], params: ["cid-version": "1"]) do {:ok, ret} -> @@ -42,7 +46,7 @@ def put_file(%Pleroma.Upload{} = upload) do if Map.has_key?(ret, "Hash") do {:ok, {:file, ret["Hash"]}} else - {:error, "JSON doesn't contain Hash value"} + {:error, "JSON doesn't contain Hash key"} end error -> @@ -58,10 +62,7 @@ def put_file(%Pleroma.Upload{} = upload) do @impl true def delete_file(file) do - config = Config.get([__MODULE__]) - post_base_url = Keyword.get(config, :post_gateway_url) - - final_url = Path.join([post_base_url, "/api/v0/files/rm"]) + final_url = get_final_url("/api/v0/files/rm") case Pleroma.HTTP.post(final_url, "", [], params: [arg: file]) do {:ok, %{status_code: 204}} -> :ok diff --git a/test/pleroma/uploaders/ipfs_test.exs b/test/pleroma/uploaders/ipfs_test.exs index fc87fa378..d567272d2 100644 --- a/test/pleroma/uploaders/ipfs_test.exs +++ b/test/pleroma/uploaders/ipfs_test.exs @@ -23,6 +23,16 @@ defmodule Pleroma.Uploaders.IPFSTest do clear_config([Pleroma.Uploaders.IPFS, :post_gateway_url], "http://localhost:5001") end + describe "get_final_url" do + test "it returns the final url for put_file" do + assert IPFS.get_final_url("/api/v0/add") == "http://localhost:5001/api/v0/add" + end + + test "it returns the final url for delete_file" do + assert IPFS.get_final_url("/api/v0/files/rm") == "http://localhost:5001/api/v0/files/rm" + end + end + describe "get_file/1" do test "it returns path to ipfs file with cid as subdomain" do assert IPFS.get_file("testcid") == { @@ -62,7 +72,8 @@ test "save file", %{file_upload: file_upload} do {:ok, %Tesla.Env{ status: 200, - body: "{\"Hash\":\"bafybeicrh7ltzx52yxcwrvxxckfmwhqdgsb6qym6dxqm2a4ymsakeshwoi\"}" + body: + "{\"Name\":\"image-tet.jpg\",\"Size\":\"5000\", \"Hash\":\"bafybeicrh7ltzx52yxcwrvxxckfmwhqdgsb6qym6dxqm2a4ymsakeshwoi\"}" }} end do assert IPFS.put_file(file_upload) == From 98f268e5ecc5bab98c98270a582f8b3f0e3be4e8 Mon Sep 17 00:00:00 2001 From: Claudio Maradonna Date: Thu, 9 Jun 2022 19:24:13 +0200 Subject: [PATCH 11/93] ipfs: small refactor and more tests --- lib/pleroma/uploaders/ipfs.ex | 24 ++++++++++++++---------- test/pleroma/uploaders/ipfs_test.exs | 24 ++++++++++++++++++++++-- 2 files changed, 36 insertions(+), 12 deletions(-) diff --git a/lib/pleroma/uploaders/ipfs.ex b/lib/pleroma/uploaders/ipfs.ex index 7a7481d81..9f6f26e2e 100644 --- a/lib/pleroma/uploaders/ipfs.ex +++ b/lib/pleroma/uploaders/ipfs.ex @@ -9,16 +9,24 @@ defmodule Pleroma.Uploaders.IPFS do alias Pleroma.Config alias Tesla.Multipart - @placeholder "{CID}" - def placeholder, do: @placeholder - - def get_final_url(method) do + defp get_final_url(method) do config = Config.get([__MODULE__]) post_base_url = Keyword.get(config, :post_gateway_url) Path.join([post_base_url, method]) end + def put_file_endpoint() do + get_final_url("/api/v0/add") + end + + def delete_file_endpoint() do + get_final_url("/api/v0/files/rm") + end + + @placeholder "{CID}" + def placeholder, do: @placeholder + @impl true def get_file(file) do b_url = Pleroma.Upload.base_url() @@ -37,9 +45,7 @@ def put_file(%Pleroma.Upload{} = upload) do |> Multipart.add_content_type_param("charset=utf-8") |> Multipart.add_file(upload.tempfile) - final_url = get_final_url("/api/v0/add") - - case Pleroma.HTTP.post(final_url, mp, [], params: ["cid-version": "1"]) do + case Pleroma.HTTP.post(put_file_endpoint(), mp, [], params: ["cid-version": "1"]) do {:ok, ret} -> case Jason.decode(ret.body) do {:ok, ret} -> @@ -62,9 +68,7 @@ def put_file(%Pleroma.Upload{} = upload) do @impl true def delete_file(file) do - final_url = get_final_url("/api/v0/files/rm") - - case Pleroma.HTTP.post(final_url, "", [], params: [arg: file]) do + case Pleroma.HTTP.post(delete_file_endpoint(), "", [], params: [arg: file]) do {:ok, %{status_code: 204}} -> :ok error -> {:error, inspect(error)} end diff --git a/test/pleroma/uploaders/ipfs_test.exs b/test/pleroma/uploaders/ipfs_test.exs index d567272d2..f2b880d9f 100644 --- a/test/pleroma/uploaders/ipfs_test.exs +++ b/test/pleroma/uploaders/ipfs_test.exs @@ -25,11 +25,11 @@ defmodule Pleroma.Uploaders.IPFSTest do describe "get_final_url" do test "it returns the final url for put_file" do - assert IPFS.get_final_url("/api/v0/add") == "http://localhost:5001/api/v0/add" + assert IPFS.put_file_endpoint() == "http://localhost:5001/api/v0/add" end test "it returns the final url for delete_file" do - assert IPFS.get_final_url("/api/v0/files/rm") == "http://localhost:5001/api/v0/files/rm" + assert IPFS.delete_file_endpoint() == "http://localhost:5001/api/v0/files/rm" end end @@ -88,6 +88,26 @@ test "returns error", %{file_upload: file_upload} do end) =~ "Elixir.Pleroma.Uploaders.IPFS: {:error, \"IPFS Gateway upload failed\"}" end end + + test "returns error if JSON decode fails", %{file_upload: file_upload} do + with_mocks([ + {Pleroma.HTTP, [], [post: fn _, _, _, _ -> {:ok, %Tesla.Env{status: 200, body: ''}} end]}, + {Jason, [], [decode: fn _ -> {:error, "JSON decode failed"} end]} + ]) do + assert capture_log(fn -> + assert IPFS.put_file(file_upload) == {:error, "JSON decode failed"} + end) =~ "Elixir.Pleroma.Uploaders.IPFS: {:error, \"JSON decode failed\"}" + end + end + + test "returns error if JSON body doesn't contain Hash key", %{file_upload: file_upload} do + with_mocks([ + {Pleroma.HTTP, [], [post: fn _, _, _, _ -> {:ok, %Tesla.Env{status: 200, body: ''}} end]}, + {Jason, [], [decode: fn _ -> {:ok, %{}} end]} + ]) do + assert IPFS.put_file(file_upload) == {:error, "JSON doesn't contain Hash key"} + end + end end describe "delete_file/1" do From 254f2ea85400ebd692fc4a45f5ac22fedd49ec09 Mon Sep 17 00:00:00 2001 From: Claudio Maradonna Date: Thu, 9 Jun 2022 23:38:50 +0200 Subject: [PATCH 12/93] ipfs: remove unused alias fix analysis job --- lib/pleroma/uploaders/ipfs.ex | 4 ++-- test/pleroma/uploaders/ipfs_test.exs | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/lib/pleroma/uploaders/ipfs.ex b/lib/pleroma/uploaders/ipfs.ex index 9f6f26e2e..32e06c5cf 100644 --- a/lib/pleroma/uploaders/ipfs.ex +++ b/lib/pleroma/uploaders/ipfs.ex @@ -16,11 +16,11 @@ defp get_final_url(method) do Path.join([post_base_url, method]) end - def put_file_endpoint() do + def put_file_endpoint do get_final_url("/api/v0/add") end - def delete_file_endpoint() do + def delete_file_endpoint do get_final_url("/api/v0/files/rm") end diff --git a/test/pleroma/uploaders/ipfs_test.exs b/test/pleroma/uploaders/ipfs_test.exs index f2b880d9f..5edb6266b 100644 --- a/test/pleroma/uploaders/ipfs_test.exs +++ b/test/pleroma/uploaders/ipfs_test.exs @@ -6,7 +6,6 @@ defmodule Pleroma.Uploaders.IPFSTest do use Pleroma.DataCase alias Pleroma.Uploaders.IPFS - alias Tesla.Multipart import Mock import ExUnit.CaptureLog From 5e097eb91def0efd3cd0008309fd524fcfd88e15 Mon Sep 17 00:00:00 2001 From: Claudio Maradonna Date: Tue, 28 Jun 2022 17:53:44 +0200 Subject: [PATCH 13/93] ipfs: better tests with @ilja suggestions --- test/pleroma/uploaders/ipfs_test.exs | 38 ++++++++++++++++++---------- 1 file changed, 25 insertions(+), 13 deletions(-) diff --git a/test/pleroma/uploaders/ipfs_test.exs b/test/pleroma/uploaders/ipfs_test.exs index 5edb6266b..9df3f239c 100644 --- a/test/pleroma/uploaders/ipfs_test.exs +++ b/test/pleroma/uploaders/ipfs_test.exs @@ -6,6 +6,7 @@ defmodule Pleroma.Uploaders.IPFSTest do use Pleroma.DataCase alias Pleroma.Uploaders.IPFS + alias Tesla.Multipart import Mock import ExUnit.CaptureLog @@ -62,12 +63,17 @@ test "it returns path to ipfs file with cid as path" do tempfile: Path.absname("test/instance_static/add/shortcode.png") } - [file_upload: file_upload] + mp = + Multipart.new() + |> Multipart.add_content_type_param("charset=utf-8") + |> Multipart.add_file(file_upload.tempfile) + + [file_upload: file_upload, mp: mp] end test "save file", %{file_upload: file_upload} do with_mock Pleroma.HTTP, - post: fn _, _, _, _ -> + post: fn "http://localhost:5001/api/v0/add", mp, [], params: ["cid-version": "1"] -> {:ok, %Tesla.Env{ status: 200, @@ -81,7 +87,10 @@ test "save file", %{file_upload: file_upload} do end test "returns error", %{file_upload: file_upload} do - with_mock Pleroma.HTTP, post: fn _, _, _, _ -> {:error, "IPFS Gateway upload failed"} end do + with_mock Pleroma.HTTP, + post: fn "http://localhost:5001/api/v0/add", mp, [], params: ["cid-version": "1"] -> + {:error, "IPFS Gateway upload failed"} + end do assert capture_log(fn -> assert IPFS.put_file(file_upload) == {:error, "IPFS Gateway upload failed"} end) =~ "Elixir.Pleroma.Uploaders.IPFS: {:error, \"IPFS Gateway upload failed\"}" @@ -89,21 +98,22 @@ test "returns error", %{file_upload: file_upload} do end test "returns error if JSON decode fails", %{file_upload: file_upload} do - with_mocks([ - {Pleroma.HTTP, [], [post: fn _, _, _, _ -> {:ok, %Tesla.Env{status: 200, body: ''}} end]}, - {Jason, [], [decode: fn _ -> {:error, "JSON decode failed"} end]} - ]) do + with_mock Pleroma.HTTP, [], + post: fn "http://localhost:5001/api/v0/add", mp, [], params: ["cid-version": "1"] -> + {:ok, %Tesla.Env{status: 200, body: 'invalid'}} + end do assert capture_log(fn -> assert IPFS.put_file(file_upload) == {:error, "JSON decode failed"} - end) =~ "Elixir.Pleroma.Uploaders.IPFS: {:error, \"JSON decode failed\"}" + end) =~ + "Elixir.Pleroma.Uploaders.IPFS: {:error, %Jason.DecodeError{data: \"invalid\", position: 0, token: nil}}" end end test "returns error if JSON body doesn't contain Hash key", %{file_upload: file_upload} do - with_mocks([ - {Pleroma.HTTP, [], [post: fn _, _, _, _ -> {:ok, %Tesla.Env{status: 200, body: ''}} end]}, - {Jason, [], [decode: fn _ -> {:ok, %{}} end]} - ]) do + with_mock Pleroma.HTTP, [], + post: fn "http://localhost:5001/api/v0/add", mp, [], params: ["cid-version": "1"] -> + {:ok, %Tesla.Env{status: 200, body: '{"key": "value"}'}} + end do assert IPFS.put_file(file_upload) == {:error, "JSON doesn't contain Hash key"} end end @@ -111,7 +121,9 @@ test "returns error if JSON body doesn't contain Hash key", %{file_upload: file_ describe "delete_file/1" do test_with_mock "deletes file", Pleroma.HTTP, - post: fn _, _, _, _ -> {:ok, %{status_code: 204}} end do + post: fn "http://localhost:5001/api/v0/files/rm", "", [], params: [arg: "image.jpg"] -> + {:ok, %{status_code: 204}} + end do assert :ok = IPFS.delete_file("image.jpg") end end From 21d9091f5e422493ff69fe59db9c965e0d511369 Mon Sep 17 00:00:00 2001 From: Claudio Maradonna Date: Fri, 8 Jul 2022 10:06:46 +0200 Subject: [PATCH 14/93] ipfs: replacing single quotes with double quotes --- test/pleroma/uploaders/ipfs_test.exs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/pleroma/uploaders/ipfs_test.exs b/test/pleroma/uploaders/ipfs_test.exs index 9df3f239c..853d185e5 100644 --- a/test/pleroma/uploaders/ipfs_test.exs +++ b/test/pleroma/uploaders/ipfs_test.exs @@ -100,7 +100,7 @@ test "returns error", %{file_upload: file_upload} do test "returns error if JSON decode fails", %{file_upload: file_upload} do with_mock Pleroma.HTTP, [], post: fn "http://localhost:5001/api/v0/add", mp, [], params: ["cid-version": "1"] -> - {:ok, %Tesla.Env{status: 200, body: 'invalid'}} + {:ok, %Tesla.Env{status: 200, body: "invalid"}} end do assert capture_log(fn -> assert IPFS.put_file(file_upload) == {:error, "JSON decode failed"} @@ -112,7 +112,7 @@ test "returns error if JSON decode fails", %{file_upload: file_upload} do test "returns error if JSON body doesn't contain Hash key", %{file_upload: file_upload} do with_mock Pleroma.HTTP, [], post: fn "http://localhost:5001/api/v0/add", mp, [], params: ["cid-version": "1"] -> - {:ok, %Tesla.Env{status: 200, body: '{"key": "value"}'}} + {:ok, %Tesla.Env{status: 200, body: "{\"key\": \"value\"}"}} end do assert IPFS.put_file(file_upload) == {:error, "JSON doesn't contain Hash key"} end From c899af1d6acad1895240a0247e9b91eca5db08df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?marcin=20miko=C5=82ajczak?= Date: Thu, 14 Apr 2022 20:09:43 +0200 Subject: [PATCH 15/93] Reject requests from specified instances if `authorized_fetch_mode` is enabled MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: marcin mikołajczak --- config/config.exs | 1 + config/description.exs | 12 ++++ docs/configuration/cheatsheet.md | 1 + lib/pleroma/signature.ex | 16 +++-- .../web/mastodon_api/views/instance_view.ex | 7 +++ lib/pleroma/web/plugs/http_signature_plug.ex | 40 ++++++++++++ test/pleroma/signature_test.exs | 8 +++ .../web/plugs/http_signature_plug_test.exs | 63 +++++++++++++++++-- 8 files changed, 140 insertions(+), 8 deletions(-) diff --git a/config/config.exs b/config/config.exs index 0fc959807..cfa16f766 100644 --- a/config/config.exs +++ b/config/config.exs @@ -216,6 +216,7 @@ allow_relay: true, public: true, quarantined_instances: [], + rejected_instances: [], static_dir: "instance/static/", allowed_post_formats: [ "text/plain", diff --git a/config/description.exs b/config/description.exs index b29348edf..a75f40929 100644 --- a/config/description.exs +++ b/config/description.exs @@ -714,6 +714,18 @@ {"*.quarantined.com", "Reason"} ] }, + %{ + key: :rejected_instances, + type: {:list, :tuple}, + key_placeholder: "instance", + value_placeholder: "reason", + description: + "List of ActivityPub instances to reject requests from if authorized_fetch_mode is enabled", + suggestions: [ + {"rejected.com", "Reason"}, + {"*.rejected.com", "Reason"} + ] + }, %{ key: :static_dir, type: :string, diff --git a/docs/configuration/cheatsheet.md b/docs/configuration/cheatsheet.md index 6e13b9622..84a5bdb98 100644 --- a/docs/configuration/cheatsheet.md +++ b/docs/configuration/cheatsheet.md @@ -41,6 +41,7 @@ To add configuration to your config file, you can copy it from the base config. * `allow_relay`: Permits remote instances to subscribe to all public posts of your instance. This may increase the visibility of your instance. * `public`: Makes the client API in authenticated mode-only except for user-profiles. Useful for disabling the Local Timeline and The Whole Known Network. Note that there is a dependent setting restricting or allowing unauthenticated access to specific resources, see `restrict_unauthenticated` for more details. * `quarantined_instances`: ActivityPub instances where private (DMs, followers-only) activities will not be send. +* `rejected_instances`: ActivityPub instances to reject requests from if authorized_fetch_mode is enabled. * `allowed_post_formats`: MIME-type list of formats allowed to be posted (transformed into HTML). * `extended_nickname_format`: Set to `true` to use extended local nicknames format (allows underscores/dashes). This will break federation with older software for theses nicknames. diff --git a/lib/pleroma/signature.ex b/lib/pleroma/signature.ex index dbe6fd209..d5ba5c4fb 100644 --- a/lib/pleroma/signature.ex +++ b/lib/pleroma/signature.ex @@ -37,8 +37,7 @@ def key_id_to_actor_id(key_id) do end def fetch_public_key(conn) do - with %{"keyId" => kid} <- HTTPSignatures.signature_for_conn(conn), - {:ok, actor_id} <- key_id_to_actor_id(kid), + with {:ok, actor_id} <- get_actor_id(conn), {:ok, public_key} <- User.get_public_key_for_ap_id(actor_id) do {:ok, public_key} else @@ -48,8 +47,7 @@ def fetch_public_key(conn) do end def refetch_public_key(conn) do - with %{"keyId" => kid} <- HTTPSignatures.signature_for_conn(conn), - {:ok, actor_id} <- key_id_to_actor_id(kid), + with {:ok, actor_id} <- get_actor_id(conn), {:ok, _user} <- ActivityPub.make_user_from_ap_id(actor_id), {:ok, public_key} <- User.get_public_key_for_ap_id(actor_id) do {:ok, public_key} @@ -59,6 +57,16 @@ def refetch_public_key(conn) do end end + def get_actor_id(conn) do + with %{"keyId" => kid} <- HTTPSignatures.signature_for_conn(conn), + {:ok, actor_id} <- key_id_to_actor_id(kid) do + {:ok, actor_id} + else + e -> + {:error, e} + end + end + def sign(%User{} = user, headers) do with {:ok, %{keys: keys}} <- User.ensure_keys_present(user), {:ok, private_key, _} <- Keys.keys_from_pem(keys) do diff --git a/lib/pleroma/web/mastodon_api/views/instance_view.ex b/lib/pleroma/web/mastodon_api/views/instance_view.ex index 62931bd41..017bd62e2 100644 --- a/lib/pleroma/web/mastodon_api/views/instance_view.ex +++ b/lib/pleroma/web/mastodon_api/views/instance_view.ex @@ -105,6 +105,7 @@ def features do def federation do quarantined = Config.get([:instance, :quarantined_instances], []) + rejected = Config.get([:instance, :rejected_instances], []) if Config.get([:mrf, :transparency]) do {:ok, data} = MRF.describe() @@ -124,6 +125,12 @@ def federation do |> Enum.map(fn {instance, reason} -> {instance, %{"reason" => reason}} end) |> Map.new() }) + |> Map.put( + :rejected_instances, + rejected + |> Enum.map(fn {instance, reason} -> {instance, %{"reason" => reason}} end) + |> Map.new() + ) else %{} end diff --git a/lib/pleroma/web/plugs/http_signature_plug.ex b/lib/pleroma/web/plugs/http_signature_plug.ex index d023754a6..cf80b9b14 100644 --- a/lib/pleroma/web/plugs/http_signature_plug.ex +++ b/lib/pleroma/web/plugs/http_signature_plug.ex @@ -5,6 +5,10 @@ defmodule Pleroma.Web.Plugs.HTTPSignaturePlug do import Plug.Conn import Phoenix.Controller, only: [get_format: 1, text: 2] + + alias Pleroma.Config + alias Pleroma.Web.ActivityPub.MRF + require Logger def init(options) do @@ -19,7 +23,9 @@ def call(conn, _opts) do if get_format(conn) == "activity+json" do conn |> maybe_assign_valid_signature() + |> maybe_assign_actor_id() |> maybe_require_signature() + |> maybe_filter_requests() else conn end @@ -46,6 +52,16 @@ defp maybe_assign_valid_signature(conn) do end end + defp maybe_assign_actor_id(%{assigns: %{valid_signature: true}} = conn) do + adapter = Application.get_env(:http_signatures, :adapter) + + {:ok, actor_id} = adapter.get_actor_id(conn) + + assign(conn, :actor_id, actor_id) + end + + defp maybe_assign_actor_id(conn), do: conn + defp has_signature_header?(conn) do conn |> get_req_header("signature") |> Enum.at(0, false) end @@ -62,4 +78,28 @@ defp maybe_require_signature(conn) do conn end end + + defp maybe_filter_requests(%{halted: true} = conn), do: conn + + defp maybe_filter_requests(conn) do + if Pleroma.Config.get([:activitypub, :authorized_fetch_mode], false) do + %{host: host} = URI.parse(conn.assigns.actor_id) + + if MRF.subdomain_match?(rejected_domains(), host) do + conn + |> put_status(:unauthorized) + |> halt() + else + conn + end + else + conn + end + end + + defp rejected_domains do + Config.get([:instance, :rejected_instances]) + |> Pleroma.Web.ActivityPub.MRF.instance_list_from_tuples() + |> Pleroma.Web.ActivityPub.MRF.subdomains_regex() + end end diff --git a/test/pleroma/signature_test.exs b/test/pleroma/signature_test.exs index 92d05f26c..8f94efdc3 100644 --- a/test/pleroma/signature_test.exs +++ b/test/pleroma/signature_test.exs @@ -70,6 +70,14 @@ test "it returns error when not found user" do end end + describe "get_actor_id/1" do + test "it returns actor id" do + ap_id = "https://mastodon.social/users/lambadalambda" + + assert Signature.get_actor_id(make_fake_conn(ap_id)) == {:ok, ap_id} + end + end + describe "sign/2" do test "it returns signature headers" do user = diff --git a/test/pleroma/web/plugs/http_signature_plug_test.exs b/test/pleroma/web/plugs/http_signature_plug_test.exs index 2d8fba3cd..de68e8823 100644 --- a/test/pleroma/web/plugs/http_signature_plug_test.exs +++ b/test/pleroma/web/plugs/http_signature_plug_test.exs @@ -10,11 +10,15 @@ defmodule Pleroma.Web.Plugs.HTTPSignaturePlugTest do import Phoenix.Controller, only: [put_format: 2] import Mock - test "it call HTTPSignatures to check validity if the actor sighed it" do + test "it call HTTPSignatures to check validity if the actor signed it" do params = %{"actor" => "http://mastodon.example.org/users/admin"} conn = build_conn(:get, "/doesntmattter", params) - with_mock HTTPSignatures, validate_conn: fn _ -> true end do + with_mock HTTPSignatures, + validate_conn: fn _ -> true end, + signature_for_conn: fn _ -> + %{"keyId" => "http://mastodon.example.org/users/admin#main-key"} + end do conn = conn |> put_req_header( @@ -41,7 +45,11 @@ test "it call HTTPSignatures to check validity if the actor sighed it" do end test "when signature header is present", %{conn: conn} do - with_mock HTTPSignatures, validate_conn: fn _ -> false end do + with_mock HTTPSignatures, + validate_conn: fn _ -> false end, + signature_for_conn: fn _ -> + %{"keyId" => "http://mastodon.example.org/users/admin#main-key"} + end do conn = conn |> put_req_header( @@ -58,7 +66,11 @@ test "when signature header is present", %{conn: conn} do assert called(HTTPSignatures.validate_conn(:_)) end - with_mock HTTPSignatures, validate_conn: fn _ -> true end do + with_mock HTTPSignatures, + validate_conn: fn _ -> true end, + signature_for_conn: fn _ -> + %{"keyId" => "http://mastodon.example.org/users/admin#main-key"} + end do conn = conn |> put_req_header( @@ -82,4 +94,47 @@ test "halts the connection when `signature` header is not present", %{conn: conn assert conn.resp_body == "Request not signed" end end + + test "rejects requests from `rejected_instances` when `authorized_fetch_mode` is enabled" do + clear_config([:activitypub, :authorized_fetch_mode], true) + clear_config([:instance, :rejected_instances], [{"mastodon.example.org", "no reason"}]) + + with_mock HTTPSignatures, + validate_conn: fn _ -> true end, + signature_for_conn: fn _ -> + %{"keyId" => "http://mastodon.example.org/users/admin#main-key"} + end do + conn = + build_conn(:get, "/doesntmattter", %{"actor" => "http://mastodon.example.org/users/admin"}) + |> put_req_header( + "signature", + "keyId=\"http://mastodon.example.org/users/admin#main-key" + ) + |> put_format("activity+json") + |> HTTPSignaturePlug.call(%{}) + + assert conn.assigns.valid_signature == true + assert conn.halted == true + assert called(HTTPSignatures.validate_conn(:_)) + end + + with_mock HTTPSignatures, + validate_conn: fn _ -> true end, + signature_for_conn: fn _ -> + %{"keyId" => "http://allowed.example.org/users/admin#main-key"} + end do + conn = + build_conn(:get, "/doesntmattter", %{"actor" => "http://allowed.example.org/users/admin"}) + |> put_req_header( + "signature", + "keyId=\"http://allowed.example.org/users/admin#main-key" + ) + |> put_format("activity+json") + |> HTTPSignaturePlug.call(%{}) + + assert conn.assigns.valid_signature == true + assert conn.halted == false + assert called(HTTPSignatures.validate_conn(:_)) + end + end end From 2ae1b802f260e9ad8eaa585907d9505545ceb872 Mon Sep 17 00:00:00 2001 From: "Haelwenn (lanodan) Monnier" Date: Thu, 9 Mar 2023 10:21:11 +0100 Subject: [PATCH 16/93] AttachmentValidator: Add support for Honk "summary" + "name" As used by Honk and supported by Mastodon --- .../object_validators/attachment_validator.ex | 3 ++- .../object_validators/attachment_validator_test.exs | 11 +++++++---- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex b/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex index 398020bff..766421e60 100644 --- a/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex @@ -15,6 +15,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator do field(:type, :string) field(:mediaType, ObjectValidators.MIME, default: "application/octet-stream") field(:name, :string) + field(:summary, :string) field(:blurhash, :string) embeds_many :url, UrlObjectValidator, primary_key: false do @@ -44,7 +45,7 @@ def changeset(struct, data) do |> fix_url() struct - |> cast(data, [:id, :type, :mediaType, :name, :blurhash]) + |> cast(data, [:id, :type, :mediaType, :name, :summary, :blurhash]) |> cast_embed(:url, with: &url_changeset/2, required: true) |> validate_inclusion(:type, ~w[Link Document Audio Image Video]) |> validate_required([:type, :mediaType]) diff --git a/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs b/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs index 77f2044e9..8d561603c 100644 --- a/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs +++ b/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs @@ -25,19 +25,22 @@ test "fails without url" do end test "works with honkerific attachments" do - attachment = %{ + honk = %{ "mediaType" => "", - "name" => "", - "summary" => "298p3RG7j27tfsZ9RQ.jpg", + "summary" => "Select your spirit chonk", + "name" => "298p3RG7j27tfsZ9RQ.jpg", "type" => "Document", "url" => "https://honk.tedunangst.com/d/298p3RG7j27tfsZ9RQ.jpg" } assert {:ok, attachment} = - AttachmentValidator.cast_and_validate(attachment) + honk + |> AttachmentValidator.cast_and_validate() |> Ecto.Changeset.apply_action(:insert) assert attachment.mediaType == "application/octet-stream" + assert attachment.summary == "Select your spirit chonk" + assert attachment.name == "298p3RG7j27tfsZ9RQ.jpg" end test "works with an unknown but valid mime type" do From 197647a04e66c1af3ae691a4507612fdbee9c48c Mon Sep 17 00:00:00 2001 From: "Haelwenn (lanodan) Monnier" Date: Thu, 9 Mar 2023 10:35:57 +0100 Subject: [PATCH 17/93] MastoAPI Attachment: Use "summary" for descriptions if present --- .../web/api_spec/schemas/attachment.ex | 6 +- .../web/mastodon_api/views/status_view.ex | 17 ++- .../mastodon_api/views/status_view_test.exs | 101 ++++++++++++------ 3 files changed, 87 insertions(+), 37 deletions(-) diff --git a/lib/pleroma/web/api_spec/schemas/attachment.ex b/lib/pleroma/web/api_spec/schemas/attachment.ex index 48634a14f..e89f2ddd0 100644 --- a/lib/pleroma/web/api_spec/schemas/attachment.ex +++ b/lib/pleroma/web/api_spec/schemas/attachment.ex @@ -50,7 +50,11 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Attachment do pleroma: %Schema{ type: :object, properties: %{ - mime_type: %Schema{type: :string, description: "mime type of the attachment"} + mime_type: %Schema{type: :string, description: "mime type of the attachment"}, + name: %Schema{ + type: :string, + description: "Name of the attachment, typically the filename" + } } } }, diff --git a/lib/pleroma/web/mastodon_api/views/status_view.ex b/lib/pleroma/web/mastodon_api/views/status_view.ex index 0a8c98b44..7a3af8acb 100644 --- a/lib/pleroma/web/mastodon_api/views/status_view.ex +++ b/lib/pleroma/web/mastodon_api/views/status_view.ex @@ -571,6 +571,19 @@ def render("attachment.json", %{attachment: attachment}) do to_string(attachment["id"] || hash_id) end + description = + if attachment["summary"] do + HTML.strip_tags(attachment["summary"]) + else + attachment["name"] + end + + name = if attachment["summary"], do: attachment["name"] + + pleroma = + %{mime_type: media_type} + |> Maps.put_if_present(:name, name) + %{ id: attachment_id, url: href, @@ -578,8 +591,8 @@ def render("attachment.json", %{attachment: attachment}) do preview_url: href_preview, text_url: href, type: type, - description: attachment["name"], - pleroma: %{mime_type: media_type}, + description: description, + pleroma: pleroma, blurhash: attachment["blurhash"] } |> Maps.put_if_present(:meta, meta) diff --git a/test/pleroma/web/mastodon_api/views/status_view_test.exs b/test/pleroma/web/mastodon_api/views/status_view_test.exs index f76b115b7..4580da75b 100644 --- a/test/pleroma/web/mastodon_api/views/status_view_test.exs +++ b/test/pleroma/web/mastodon_api/views/status_view_test.exs @@ -456,45 +456,78 @@ test "create mentions from the 'tag' field" do assert mention.url == recipient.ap_id end - test "attachments" do - object = %{ - "type" => "Image", - "url" => [ - %{ - "mediaType" => "image/png", - "href" => "someurl", - "width" => 200, - "height" => 100 - } - ], - "blurhash" => "UJJ8X[xYW,%Jtq%NNFbXB5j]IVM|9GV=WHRn", - "uuid" => 6 - } + describe "attachments" do + test "Complete Mastodon style" do + object = %{ + "type" => "Image", + "url" => [ + %{ + "mediaType" => "image/png", + "href" => "someurl", + "width" => 200, + "height" => 100 + } + ], + "blurhash" => "UJJ8X[xYW,%Jtq%NNFbXB5j]IVM|9GV=WHRn", + "uuid" => 6 + } - expected = %{ - id: "1638338801", - type: "image", - url: "someurl", - remote_url: "someurl", - preview_url: "someurl", - text_url: "someurl", - description: nil, - pleroma: %{mime_type: "image/png"}, - meta: %{original: %{width: 200, height: 100, aspect: 2}}, - blurhash: "UJJ8X[xYW,%Jtq%NNFbXB5j]IVM|9GV=WHRn" - } + expected = %{ + id: "1638338801", + type: "image", + url: "someurl", + remote_url: "someurl", + preview_url: "someurl", + text_url: "someurl", + description: nil, + pleroma: %{mime_type: "image/png"}, + meta: %{original: %{width: 200, height: 100, aspect: 2}}, + blurhash: "UJJ8X[xYW,%Jtq%NNFbXB5j]IVM|9GV=WHRn" + } - api_spec = Pleroma.Web.ApiSpec.spec() + api_spec = Pleroma.Web.ApiSpec.spec() - assert expected == StatusView.render("attachment.json", %{attachment: object}) - assert_schema(expected, "Attachment", api_spec) + assert expected == StatusView.render("attachment.json", %{attachment: object}) + assert_schema(expected, "Attachment", api_spec) - # If theres a "id", use that instead of the generated one - object = Map.put(object, "id", 2) - result = StatusView.render("attachment.json", %{attachment: object}) + # If theres a "id", use that instead of the generated one + object = Map.put(object, "id", 2) + result = StatusView.render("attachment.json", %{attachment: object}) - assert %{id: "2"} = result - assert_schema(result, "Attachment", api_spec) + assert %{id: "2"} = result + assert_schema(result, "Attachment", api_spec) + end + + test "Honkerific" do + object = %{ + "type" => "Image", + "url" => [ + %{ + "mediaType" => "image/png", + "href" => "someurl" + } + ], + "name" => "fool.jpeg", + "summary" => "they have played us for absolute fools." + } + + expected = %{ + blurhash: nil, + description: "they have played us for absolute fools.", + id: "1638338801", + pleroma: %{mime_type: "image/png", name: "fool.jpeg"}, + preview_url: "someurl", + remote_url: "someurl", + text_url: "someurl", + type: "image", + url: "someurl" + } + + api_spec = Pleroma.Web.ApiSpec.spec() + + assert expected == StatusView.render("attachment.json", %{attachment: object}) + assert_schema(expected, "Attachment", api_spec) + end end test "put the url advertised in the Activity in to the url attribute" do From 1ab4ab8d38687634735e1415f395b072718ab1ab Mon Sep 17 00:00:00 2001 From: tusooa Date: Tue, 18 Jul 2023 18:24:30 -0400 Subject: [PATCH 18/93] Extract translatable strings --- changelog.d/3907.skip | 0 priv/gettext/config_descriptions.pot | 84 ++++++++++++++++++++++++++++ priv/gettext/errors.pot | 36 +++++++++--- priv/gettext/oauth_scopes.pot | 40 +++++++++++++ 4 files changed, 152 insertions(+), 8 deletions(-) create mode 100644 changelog.d/3907.skip diff --git a/changelog.d/3907.skip b/changelog.d/3907.skip new file mode 100644 index 000000000..e69de29bb diff --git a/priv/gettext/config_descriptions.pot b/priv/gettext/config_descriptions.pot index 4f60e1c85..b4792868b 100644 --- a/priv/gettext/config_descriptions.pot +++ b/priv/gettext/config_descriptions.pot @@ -5973,3 +5973,87 @@ msgstr "" msgctxt "config label at :pleroma-:instance > :languages" msgid "Languages" msgstr "" + +#: lib/pleroma/docs/translator.ex:5 +#, elixir-autogen, elixir-format +msgctxt "config description at :pleroma-:mrf_emoji" +msgid "Reject or force-unlisted emojis whose URLs or names match a keyword or [Regex](https://hexdocs.pm/elixir/Regex.html)." +msgstr "" + +#: lib/pleroma/docs/translator.ex:5 +#, elixir-autogen, elixir-format +msgctxt "config description at :pleroma-:mrf_emoji > :federated_timeline_removal_shortcode" +msgid " A list of patterns which result in message with emojis whose shortcodes match being removed from federated timelines (a.k.a unlisted). This will apply only to statuses.\n\n Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.\n" +msgstr "" + +#: lib/pleroma/docs/translator.ex:5 +#, elixir-autogen, elixir-format +msgctxt "config description at :pleroma-:mrf_emoji > :federated_timeline_removal_url" +msgid " A list of patterns which result in message with emojis whose URLs match being removed from federated timelines (a.k.a unlisted). This will apply only to statuses.\n\n Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.\n" +msgstr "" + +#: lib/pleroma/docs/translator.ex:5 +#, elixir-autogen, elixir-format +msgctxt "config description at :pleroma-:mrf_emoji > :remove_shortcode" +msgid " A list of patterns which result in emoji whose shortcode matches being removed from the message. This will apply to statuses, emoji reactions, and user profiles.\n\n Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.\n" +msgstr "" + +#: lib/pleroma/docs/translator.ex:5 +#, elixir-autogen, elixir-format +msgctxt "config description at :pleroma-:mrf_emoji > :remove_url" +msgid " A list of patterns which result in emoji whose URL matches being removed from the message. This will apply to statuses, emoji reactions, and user profiles.\n\n Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.\n" +msgstr "" + +#: lib/pleroma/docs/translator.ex:5 +#, elixir-autogen, elixir-format +msgctxt "config description at :pleroma-Pleroma.User.Backup > :process_chunk_size" +msgid "The number of activities to fetch in the backup job for each chunk." +msgstr "" + +#: lib/pleroma/docs/translator.ex:5 +#, elixir-autogen, elixir-format +msgctxt "config description at :pleroma-Pleroma.User.Backup > :process_wait_time" +msgid "The amount of time to wait for backup to report progress, in milliseconds. If no progress is received from the backup job for that much time, terminate it and deem it failed." +msgstr "" + +#: lib/pleroma/docs/translator.ex:5 +#, elixir-autogen, elixir-format +msgctxt "config label at :pleroma-:mrf_emoji" +msgid "MRF Emoji" +msgstr "" + +#: lib/pleroma/docs/translator.ex:5 +#, elixir-autogen, elixir-format +msgctxt "config label at :pleroma-:mrf_emoji > :federated_timeline_removal_shortcode" +msgid "Federated timeline removal shortcode" +msgstr "" + +#: lib/pleroma/docs/translator.ex:5 +#, elixir-autogen, elixir-format +msgctxt "config label at :pleroma-:mrf_emoji > :federated_timeline_removal_url" +msgid "Federated timeline removal url" +msgstr "" + +#: lib/pleroma/docs/translator.ex:5 +#, elixir-autogen, elixir-format +msgctxt "config label at :pleroma-:mrf_emoji > :remove_shortcode" +msgid "Remove shortcode" +msgstr "" + +#: lib/pleroma/docs/translator.ex:5 +#, elixir-autogen, elixir-format +msgctxt "config label at :pleroma-:mrf_emoji > :remove_url" +msgid "Remove url" +msgstr "" + +#: lib/pleroma/docs/translator.ex:5 +#, elixir-autogen, elixir-format +msgctxt "config label at :pleroma-Pleroma.User.Backup > :process_chunk_size" +msgid "Process Chunk Size" +msgstr "" + +#: lib/pleroma/docs/translator.ex:5 +#, elixir-autogen, elixir-format +msgctxt "config label at :pleroma-Pleroma.User.Backup > :process_wait_time" +msgid "Process Wait Time" +msgstr "" diff --git a/priv/gettext/errors.pot b/priv/gettext/errors.pot index d320ee1bd..aca77f8fa 100644 --- a/priv/gettext/errors.pot +++ b/priv/gettext/errors.pot @@ -110,7 +110,7 @@ msgstr "" msgid "Can't display this activity" msgstr "" -#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:334 +#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:346 #, elixir-autogen, elixir-format msgid "Can't find user" msgstr "" @@ -198,7 +198,7 @@ msgstr "" msgid "Invalid password." msgstr "" -#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:267 +#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:279 #, elixir-autogen, elixir-format msgid "Invalid request" msgstr "" @@ -225,7 +225,7 @@ msgstr "" #: lib/pleroma/web/feed/tag_controller.ex:16 #: lib/pleroma/web/feed/user_controller.ex:69 #: lib/pleroma/web/o_status/o_status_controller.ex:132 -#: lib/pleroma/web/plugs/uploaded_media.ex:104 +#: lib/pleroma/web/plugs/uploaded_media.ex:84 #, elixir-autogen, elixir-format msgid "Not found" msgstr "" @@ -235,7 +235,7 @@ msgstr "" msgid "Poll's author can't vote" msgstr "" -#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:499 +#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:511 #: lib/pleroma/web/mastodon_api/controllers/fallback_controller.ex:20 #: lib/pleroma/web/mastodon_api/controllers/poll_controller.ex:39 #: lib/pleroma/web/mastodon_api/controllers/poll_controller.ex:51 @@ -341,7 +341,7 @@ msgstr "" msgid "CAPTCHA expired" msgstr "" -#: lib/pleroma/web/plugs/uploaded_media.ex:77 +#: lib/pleroma/web/plugs/uploaded_media.ex:57 #, elixir-autogen, elixir-format msgid "Failed" msgstr "" @@ -361,7 +361,7 @@ msgstr "" msgid "Insufficient permissions: %{permissions}." msgstr "" -#: lib/pleroma/web/plugs/uploaded_media.ex:131 +#: lib/pleroma/web/plugs/uploaded_media.ex:111 #, elixir-autogen, elixir-format msgid "Internal Error" msgstr "" @@ -557,7 +557,7 @@ msgstr "" msgid "Access denied" msgstr "" -#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:331 +#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:343 #, elixir-autogen, elixir-format msgid "This API requires an authenticated user" msgstr "" @@ -567,7 +567,7 @@ msgstr "" msgid "User is not an admin." msgstr "" -#: lib/pleroma/user/backup.ex:73 +#: lib/pleroma/user/backup.ex:78 #, elixir-format msgid "Last export was less than a day ago" msgid_plural "Last export was less than %{days} days ago" @@ -607,3 +607,23 @@ msgstr "" #, elixir-autogen, elixir-format msgid "User isn't privileged." msgstr "" + +#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:267 +#, elixir-autogen, elixir-format +msgid "Bio is too long" +msgstr "" + +#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:270 +#, elixir-autogen, elixir-format +msgid "Name is too long" +msgstr "" + +#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:273 +#, elixir-autogen, elixir-format +msgid "One or more field entries are too long" +msgstr "" + +#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:276 +#, elixir-autogen, elixir-format +msgid "Too many field entries" +msgstr "" diff --git a/priv/gettext/oauth_scopes.pot b/priv/gettext/oauth_scopes.pot index 50ad0dd9e..83328770e 100644 --- a/priv/gettext/oauth_scopes.pot +++ b/priv/gettext/oauth_scopes.pot @@ -219,3 +219,43 @@ msgstr "" #, elixir-autogen, elixir-format msgid "read:mutes" msgstr "" + +#: lib/pleroma/web/api_spec/scopes/translator.ex:5 +#, elixir-autogen, elixir-format +msgid "push" +msgstr "" + +#: lib/pleroma/web/api_spec/scopes/translator.ex:5 +#, elixir-autogen, elixir-format +msgid "read:backups" +msgstr "" + +#: lib/pleroma/web/api_spec/scopes/translator.ex:5 +#, elixir-autogen, elixir-format +msgid "read:chats" +msgstr "" + +#: lib/pleroma/web/api_spec/scopes/translator.ex:5 +#, elixir-autogen, elixir-format +msgid "read:media" +msgstr "" + +#: lib/pleroma/web/api_spec/scopes/translator.ex:5 +#, elixir-autogen, elixir-format +msgid "read:reports" +msgstr "" + +#: lib/pleroma/web/api_spec/scopes/translator.ex:5 +#, elixir-autogen, elixir-format +msgid "write:chats" +msgstr "" + +#: lib/pleroma/web/api_spec/scopes/translator.ex:5 +#, elixir-autogen, elixir-format +msgid "write:follow" +msgstr "" + +#: lib/pleroma/web/api_spec/scopes/translator.ex:5 +#, elixir-autogen, elixir-format +msgid "write:reports" +msgstr "" From b6a9d87f16a4806eab7a6da874d6f75b65d4f214 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?marcin=20miko=C5=82ajczak?= Date: Wed, 15 Mar 2023 19:44:42 +0100 Subject: [PATCH 19/93] Display reposted replies with exclude_replies: true MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: marcin mikołajczak --- changelog.d/show-reposter-replies.add | 1 + lib/pleroma/web/activity_pub/activity_pub.ex | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 changelog.d/show-reposter-replies.add diff --git a/changelog.d/show-reposter-replies.add b/changelog.d/show-reposter-replies.add new file mode 100644 index 000000000..3b852ec3b --- /dev/null +++ b/changelog.d/show-reposter-replies.add @@ -0,0 +1 @@ +Display reposted replies with exclude_replies: true \ No newline at end of file diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index 3979d418e..4b956c680 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -964,8 +964,9 @@ defp restrict_media(query, _), do: query defp restrict_replies(query, %{exclude_replies: true}) do from( - [_activity, object] in query, - where: fragment("?->>'inReplyTo' is null", object.data) + [activity, object] in query, + where: + fragment("?->>'inReplyTo' is null or ?->>'type' = 'Announce'", object.data, activity.data) ) end From f271ea6e432d685c113582e5944d79e12c153016 Mon Sep 17 00:00:00 2001 From: "Haelwenn (lanodan) Monnier" Date: Sat, 16 Dec 2023 18:22:32 +0100 Subject: [PATCH 20/93] Move Plugs.RemoteIP.maybe_add_cidr/1 to InetHelper.parse_cidr/1 --- lib/pleroma/helpers/inet_helper.ex | 11 +++++++++++ lib/pleroma/web/plugs/remote_ip.ex | 14 ++------------ 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/lib/pleroma/helpers/inet_helper.ex b/lib/pleroma/helpers/inet_helper.ex index 704d37f8a..3500fc679 100644 --- a/lib/pleroma/helpers/inet_helper.ex +++ b/lib/pleroma/helpers/inet_helper.ex @@ -16,4 +16,15 @@ def parse_address(ip) when is_binary(ip) do def parse_address(ip) do :inet.parse_address(ip) end + + def parse_cidr(proxy) when is_binary(proxy) do + proxy = + cond do + "/" in String.codepoints(proxy) -> proxy + InetCidr.v4?(InetCidr.parse_address!(proxy)) -> proxy <> "/32" + InetCidr.v6?(InetCidr.parse_address!(proxy)) -> proxy <> "/128" + end + + InetCidr.parse(proxy, true) + end end diff --git a/lib/pleroma/web/plugs/remote_ip.ex b/lib/pleroma/web/plugs/remote_ip.ex index f207d9fef..3a4bffb50 100644 --- a/lib/pleroma/web/plugs/remote_ip.ex +++ b/lib/pleroma/web/plugs/remote_ip.ex @@ -8,6 +8,7 @@ defmodule Pleroma.Web.Plugs.RemoteIp do """ alias Pleroma.Config + alias Pleroma.Helpers.InetHelper import Plug.Conn @behaviour Plug @@ -30,19 +31,8 @@ defp remote_ip_opts do proxies = Config.get([__MODULE__, :proxies], []) |> Enum.concat(reserved) - |> Enum.map(&maybe_add_cidr/1) + |> Enum.map(&InetHelper.parse_cidr/1) {headers, proxies} end - - defp maybe_add_cidr(proxy) when is_binary(proxy) do - proxy = - cond do - "/" in String.codepoints(proxy) -> proxy - InetCidr.v4?(InetCidr.parse_address!(proxy)) -> proxy <> "/32" - InetCidr.v6?(InetCidr.parse_address!(proxy)) -> proxy <> "/128" - end - - InetCidr.parse(proxy, true) - end end From 086ba59d0346be870dc7df2660fbb55666bf0af7 Mon Sep 17 00:00:00 2001 From: "Haelwenn (lanodan) Monnier" Date: Sat, 16 Dec 2023 18:56:46 +0100 Subject: [PATCH 21/93] HTTPSignaturePlug: Add :authorized_fetch_mode_exceptions --- changelog.d/auth-fetch-exception.add | 1 + config/description.exs | 6 ++++++ docs/configuration/cheatsheet.md | 1 + lib/pleroma/web/plugs/http_signature_plug.ex | 20 ++++++++++++++----- .../web/plugs/http_signature_plug_test.exs | 19 ++++++++++++++++++ 5 files changed, 42 insertions(+), 5 deletions(-) create mode 100644 changelog.d/auth-fetch-exception.add diff --git a/changelog.d/auth-fetch-exception.add b/changelog.d/auth-fetch-exception.add new file mode 100644 index 000000000..98efb903e --- /dev/null +++ b/changelog.d/auth-fetch-exception.add @@ -0,0 +1 @@ +HTTPSignaturePlug: Add :authorized_fetch_mode_exceptions configuration \ No newline at end of file diff --git a/config/description.exs b/config/description.exs index b152981c4..2fed1a152 100644 --- a/config/description.exs +++ b/config/description.exs @@ -1771,6 +1771,12 @@ type: :boolean, description: "Require HTTP signatures for AP fetches" }, + %{ + key: :authorized_fetch_mode_exceptions, + type: {:list, :string}, + description: + "List of IPs (CIDR format accepted) to exempt from HTTP Signatures requirement (for example to allow debugging, you shouldn't otherwise need this)" + }, %{ key: :note_replies_output_limit, type: :integer, diff --git a/docs/configuration/cheatsheet.md b/docs/configuration/cheatsheet.md index a4cae4dbb..06933ba76 100644 --- a/docs/configuration/cheatsheet.md +++ b/docs/configuration/cheatsheet.md @@ -279,6 +279,7 @@ Notes: * `deny_follow_blocked`: Whether to disallow following an account that has blocked the user in question * `sign_object_fetches`: Sign object fetches with HTTP signatures * `authorized_fetch_mode`: Require HTTP signatures for AP fetches +* `authorized_fetch_mode_exceptions`: List of IPs (CIDR format accepted) to exempt from HTTP Signatures requirement (for example to allow debugging, you shouldn't otherwise need this) ## Pleroma.User diff --git a/lib/pleroma/web/plugs/http_signature_plug.ex b/lib/pleroma/web/plugs/http_signature_plug.ex index e814efc2c..7ec202662 100644 --- a/lib/pleroma/web/plugs/http_signature_plug.ex +++ b/lib/pleroma/web/plugs/http_signature_plug.ex @@ -3,6 +3,8 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.Plugs.HTTPSignaturePlug do + alias Pleroma.Helpers.InetHelper + import Plug.Conn import Phoenix.Controller, only: [get_format: 1, text: 2] require Logger @@ -89,12 +91,20 @@ defp has_signature_header?(conn) do defp maybe_require_signature(%{assigns: %{valid_signature: true}} = conn), do: conn - defp maybe_require_signature(conn) do + defp maybe_require_signature(%{remote_ip: remote_ip} = conn) do if Pleroma.Config.get([:activitypub, :authorized_fetch_mode], false) do - conn - |> put_status(:unauthorized) - |> text("Request not signed") - |> halt() + exceptions = + Pleroma.Config.get([:activitypub, :authorized_fetch_mode_exceptions], []) + |> Enum.map(&InetHelper.parse_cidr/1) + + if Enum.any?(exceptions, fn x -> InetCidr.contains?(x, remote_ip) end) do + conn + else + conn + |> put_status(:unauthorized) + |> text("Request not signed") + |> halt() + end else conn end diff --git a/test/pleroma/web/plugs/http_signature_plug_test.exs b/test/pleroma/web/plugs/http_signature_plug_test.exs index 2d8fba3cd..deb7e4a23 100644 --- a/test/pleroma/web/plugs/http_signature_plug_test.exs +++ b/test/pleroma/web/plugs/http_signature_plug_test.exs @@ -81,5 +81,24 @@ test "halts the connection when `signature` header is not present", %{conn: conn assert conn.state == :sent assert conn.resp_body == "Request not signed" end + + test "exempts specific IPs from `authorized_fetch_mode_exceptions`", %{conn: conn} do + clear_config([:activitypub, :authorized_fetch_mode_exceptions], ["192.168.0.0/24"]) + + with_mock HTTPSignatures, validate_conn: fn _ -> false end do + conn = + conn + |> Map.put(:remote_ip, {192, 168, 0, 1}) + |> put_req_header( + "signature", + "keyId=\"http://mastodon.example.org/users/admin#main-key" + ) + |> HTTPSignaturePlug.call(%{}) + + assert conn.remote_ip == {192, 168, 0, 1} + assert conn.halted == false + assert called(HTTPSignatures.validate_conn(:_)) + end + end end end From 7e3bbdded5dea73a0bad3a8905839e42d476e506 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Wed, 20 Dec 2023 23:39:12 +0000 Subject: [PATCH 22/93] Elixir 1.13 is the minimum required version --- .gitlab-ci.yml | 2 +- Dockerfile | 6 +++--- changelog.d/bump-elixir.change | 1 + mix.exs | 2 +- 4 files changed, 6 insertions(+), 5 deletions(-) create mode 100644 changelog.d/bump-elixir.change diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index eb31a8086..8f10790da 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -2,7 +2,7 @@ image: git.pleroma.social:5050/pleroma/pleroma/ci-base variables: &global_variables # Only used for the release - ELIXIR_VER: 1.12.3 + ELIXIR_VER: 1.13.4 POSTGRES_DB: pleroma_test POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres diff --git a/Dockerfile b/Dockerfile index 69c3509de..72461305c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,7 @@ ARG ELIXIR_IMG=hexpm/elixir -ARG ELIXIR_VER=1.12.3 -ARG ERLANG_VER=24.2.1 -ARG ALPINE_VER=3.17.0 +ARG ELIXIR_VER=1.13.4 +ARG ERLANG_VER=24.3.4.15 +ARG ALPINE_VER=3.17.5 FROM ${ELIXIR_IMG}:${ELIXIR_VER}-erlang-${ERLANG_VER}-alpine-${ALPINE_VER} as build diff --git a/changelog.d/bump-elixir.change b/changelog.d/bump-elixir.change new file mode 100644 index 000000000..afb25d4e7 --- /dev/null +++ b/changelog.d/bump-elixir.change @@ -0,0 +1 @@ +Elixir 1.13 is the minimum required version. diff --git a/mix.exs b/mix.exs index b4b77b161..2056e591d 100644 --- a/mix.exs +++ b/mix.exs @@ -5,7 +5,7 @@ def project do [ app: :pleroma, version: version("2.6.51"), - elixir: "~> 1.11", + elixir: "~> 1.13", elixirc_paths: elixirc_paths(Mix.env()), compilers: Mix.compilers(), elixirc_options: [warnings_as_errors: warnings_as_errors()], From ddb9e90c405369496fdf9e6dfed593eff8d5dc5c Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Thu, 28 Dec 2023 15:59:25 -0500 Subject: [PATCH 23/93] Update minimum elixir version found in various docs --- docs/installation/debian_based_jp.md | 2 +- docs/installation/generic_dependencies.include | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/installation/debian_based_jp.md b/docs/installation/debian_based_jp.md index 1424ad7f4..502eefaf8 100644 --- a/docs/installation/debian_based_jp.md +++ b/docs/installation/debian_based_jp.md @@ -14,7 +14,7 @@ Note: This article is potentially outdated because at this time we may not have - PostgreSQL 9.6以上 (Ubuntu16.04では9.5しか提供されていないので,[](https://www.postgresql.org/download/linux/ubuntu/)こちらから新しいバージョンを入手してください) - `postgresql-contrib` 9.6以上 (同上) -- Elixir 1.8 以上 ([Debianのリポジトリからインストールしないこと!!! ここからインストールすること!](https://elixir-lang.org/install.html#unix-and-unix-like)。または [asdf](https://github.com/asdf-vm/asdf) をpleromaユーザーでインストールしてください) +- Elixir 1.13 以上 ([Debianのリポジトリからインストールしないこと!!! ここからインストールすること!](https://elixir-lang.org/install.html#unix-and-unix-like)。または [asdf](https://github.com/asdf-vm/asdf) をpleromaユーザーでインストールしてください) - `erlang-dev` - `erlang-nox` - `git` diff --git a/docs/installation/generic_dependencies.include b/docs/installation/generic_dependencies.include index 3365a36a8..e0cfd3264 100644 --- a/docs/installation/generic_dependencies.include +++ b/docs/installation/generic_dependencies.include @@ -1,7 +1,7 @@ ## Required dependencies * PostgreSQL >=9.6 -* Elixir >=1.11.0 <1.15 +* Elixir >=1.13.0 <1.15 * Erlang OTP >=22.2.0 <26 * git * file / libmagic From 1bf3ae07b6719a762310615811a317035175ad2e Mon Sep 17 00:00:00 2001 From: faried nawaz Date: Mon, 18 Sep 2023 02:13:01 +0500 Subject: [PATCH 24/93] add options to mix pleroma.database prune_objects to delete more activities --- lib/mix/tasks/pleroma/database.ex | 165 +++++++- test/mix/tasks/pleroma/database_test.exs | 487 ++++++++++++++++++++++- 2 files changed, 625 insertions(+), 27 deletions(-) diff --git a/lib/mix/tasks/pleroma/database.ex b/lib/mix/tasks/pleroma/database.ex index 93ee57dc3..13ac6536c 100644 --- a/lib/mix/tasks/pleroma/database.ex +++ b/lib/mix/tasks/pleroma/database.ex @@ -67,43 +67,168 @@ def run(["prune_objects" | args]) do OptionParser.parse( args, strict: [ - vacuum: :boolean + vacuum: :boolean, + keep_threads: :boolean, + keep_non_public: :boolean, + prune_orphaned_activities: :boolean ] ) start_pleroma() deadline = Pleroma.Config.get([:instance, :remote_post_retention_days]) + time_deadline = NaiveDateTime.utc_now() |> NaiveDateTime.add(-(deadline * 86_400)) - Logger.info("Pruning objects older than #{deadline} days") + log_message = "Pruning objects older than #{deadline} days" - time_deadline = - NaiveDateTime.utc_now() - |> NaiveDateTime.add(-(deadline * 86_400)) + log_message = + if Keyword.get(options, :keep_non_public) do + log_message <> ", keeping non public posts" + else + log_message + end - from(o in Object, - where: - fragment( - "?->'to' \\? ? OR ?->'cc' \\? ?", - o.data, - ^Pleroma.Constants.as_public(), - o.data, - ^Pleroma.Constants.as_public() - ), - where: o.inserted_at < ^time_deadline, - where: + log_message = + if Keyword.get(options, :keep_threads) do + log_message <> ", keeping threads intact" + else + log_message + end + + log_message = + if Keyword.get(options, :prune_orphaned_activities) do + log_message <> ", pruning orphaned activities" + else + log_message + end + + log_message = + if Keyword.get(options, :vacuum) do + log_message <> + ", doing a full vacuum (you shouldn't do this as a recurring maintanance task)" + else + log_message + end + + Logger.info(log_message) + + if Keyword.get(options, :keep_threads) do + # We want to delete objects from threads where + # 1. the newest post is still old + # 2. none of the activities is local + # 3. none of the activities is bookmarked + # 4. optionally none of the posts is non-public + deletable_context = + if Keyword.get(options, :keep_non_public) do + Pleroma.Activity + |> join(:left, [a], b in Pleroma.Bookmark, on: a.id == b.activity_id) + |> group_by([a], fragment("? ->> 'context'::text", a.data)) + |> having( + [a], + not fragment( + # Posts (checked on Create Activity) is non-public + "bool_or((not(?->'to' \\? ? OR ?->'cc' \\? ?)) and ? ->> 'type' = 'Create')", + a.data, + ^Pleroma.Constants.as_public(), + a.data, + ^Pleroma.Constants.as_public(), + a.data + ) + ) + else + Pleroma.Activity + |> join(:left, [a], b in Pleroma.Bookmark, on: a.id == b.activity_id) + |> group_by([a], fragment("? ->> 'context'::text", a.data)) + end + |> having([a], max(a.updated_at) < ^time_deadline) + |> having([a], not fragment("bool_or(?)", a.local)) + |> having([_, b], fragment("max(?::text) is null", b.id)) + |> select([a], fragment("? ->> 'context'::text", a.data)) + + Pleroma.Object + |> where([o], fragment("? ->> 'context'::text", o.data) in subquery(deletable_context)) + else + if Keyword.get(options, :keep_non_public) do + Pleroma.Object + |> where( + [o], + fragment( + "?->'to' \\? ? OR ?->'cc' \\? ?", + o.data, + ^Pleroma.Constants.as_public(), + o.data, + ^Pleroma.Constants.as_public() + ) + ) + else + Pleroma.Object + end + |> where([o], o.updated_at < ^time_deadline) + |> where( + [o], fragment("split_part(?->>'actor', '/', 3) != ?", o.data, ^Pleroma.Web.Endpoint.host()) - ) + ) + end |> Repo.delete_all(timeout: :infinity) - prune_hashtags_query = """ + if !Keyword.get(options, :keep_threads) do + # Without the --keep-threads option, it's possible that bookmarked + # objects have been deleted. We remove the corresponding bookmarks. + """ + delete from public.bookmarks + where id in ( + select b.id from public.bookmarks b + left join public.activities a on b.activity_id = a.id + left join public.objects o on a."data" ->> 'object' = o.data ->> 'id' + where o.id is null + ) + """ + |> Repo.query([], timeout: :infinity) + end + + if Keyword.get(options, :prune_orphaned_activities) do + # Prune activities who link to a single object + """ + delete from public.activities + where id in ( + select a.id from public.activities a + left join public.objects o on a.data ->> 'object' = o.data ->> 'id' + left join public.activities a2 on a.data ->> 'object' = a2.data ->> 'id' + left join public.users u on a.data ->> 'object' = u.ap_id + where not a.local + and jsonb_typeof(a."data" -> 'object') = 'string' + and o.id is null + and a2.id is null + and u.id is null + ) + """ + |> Repo.query([], timeout: :infinity) + + # Prune activities who link to an array of objects + """ + delete from public.activities + where id in ( + select a.id from public.activities a + join json_array_elements_text((a."data" -> 'object')::json) as j on jsonb_typeof(a."data" -> 'object') = 'array' + left join public.objects o on j.value = o.data ->> 'id' + left join public.activities a2 on j.value = a2.data ->> 'id' + left join public.users u on j.value = u.ap_id + group by a.id + having max(o.data ->> 'id') is null + and max(a2.data ->> 'id') is null + and max(u.ap_id) is null + ) + """ + |> Repo.query([], timeout: :infinity) + end + + """ DELETE FROM hashtags AS ht WHERE NOT EXISTS ( SELECT 1 FROM hashtags_objects hto WHERE ht.id = hto.hashtag_id) """ - - Repo.query(prune_hashtags_query) + |> Repo.query() if Keyword.get(options, :vacuum) do Maintenance.vacuum("full") diff --git a/test/mix/tasks/pleroma/database_test.exs b/test/mix/tasks/pleroma/database_test.exs index fbc939171..01fd97e2d 100644 --- a/test/mix/tasks/pleroma/database_test.exs +++ b/test/mix/tasks/pleroma/database_test.exs @@ -7,6 +7,7 @@ defmodule Mix.Tasks.Pleroma.DatabaseTest do use Oban.Testing, repo: Pleroma.Repo alias Pleroma.Activity + alias Pleroma.Bookmark alias Pleroma.Object alias Pleroma.Repo alias Pleroma.User @@ -45,28 +46,500 @@ test "it replaces objects with references" do end describe "prune_objects" do - test "it prunes old objects from the database" do - insert(:note) + setup do deadline = Pleroma.Config.get([:instance, :remote_post_retention_days]) + 1 - date = + old_insert_date = Timex.now() |> Timex.shift(days: -deadline) |> Timex.to_naive_datetime() |> NaiveDateTime.truncate(:second) - %{id: id} = + %{old_insert_date: old_insert_date} + end + + test "it prunes old objects from the database", %{old_insert_date: old_insert_date} do + insert(:note) + + %{id: note_remote_public_id} = :note |> insert() - |> Ecto.Changeset.change(%{inserted_at: date}) + |> Ecto.Changeset.change(%{updated_at: old_insert_date}) |> Repo.update!() - assert length(Repo.all(Object)) == 2 + note_remote_non_public = + %{id: note_remote_non_public_id, data: note_remote_non_public_data} = + :note + |> insert() + + note_remote_non_public + |> Ecto.Changeset.change(%{ + updated_at: old_insert_date, + data: note_remote_non_public_data |> update_in(["to"], fn _ -> [] end) + }) + |> Repo.update!() + + assert length(Repo.all(Object)) == 3 Mix.Tasks.Pleroma.Database.run(["prune_objects"]) assert length(Repo.all(Object)) == 1 - refute Object.get_by_id(id) + refute Object.get_by_id(note_remote_public_id) + refute Object.get_by_id(note_remote_non_public_id) + end + + test "it cleans up bookmarks", %{old_insert_date: old_insert_date} do + user = insert(:user) + {:ok, old_object_activity} = CommonAPI.post(user, %{status: "yadayada"}) + + Repo.one(Object) + |> Ecto.Changeset.change(%{updated_at: old_insert_date}) + |> Repo.update!() + + {:ok, new_object_activity} = CommonAPI.post(user, %{status: "yadayada"}) + + {:ok, _} = Bookmark.create(user.id, old_object_activity.id) + {:ok, _} = Bookmark.create(user.id, new_object_activity.id) + + assert length(Repo.all(Object)) == 2 + assert length(Repo.all(Bookmark)) == 2 + + Mix.Tasks.Pleroma.Database.run(["prune_objects"]) + + assert length(Repo.all(Object)) == 1 + assert length(Repo.all(Bookmark)) == 1 + refute Bookmark.get(user.id, old_object_activity.id) + end + + test "with the --keep-non-public option it still keeps non-public posts even if they are not local", + %{old_insert_date: old_insert_date} do + insert(:note) + + %{id: note_remote_id} = + :note + |> insert() + |> Ecto.Changeset.change(%{updated_at: old_insert_date}) + |> Repo.update!() + + note_remote_non_public = + %{data: note_remote_non_public_data} = + :note + |> insert() + + note_remote_non_public + |> Ecto.Changeset.change(%{ + updated_at: old_insert_date, + data: note_remote_non_public_data |> update_in(["to"], fn _ -> [] end) + }) + |> Repo.update!() + + assert length(Repo.all(Object)) == 3 + + Mix.Tasks.Pleroma.Database.run(["prune_objects", "--keep-non-public"]) + + assert length(Repo.all(Object)) == 2 + refute Object.get_by_id(note_remote_id) + end + + test "with the --keep-threads and --keep-non-public option it keeps old threads with non-public replies even if the interaction is not local", + %{old_insert_date: old_insert_date} do + # For non-public we only check Create Activities because only these are relevant for threads + # Flags are always non-public, Announces from relays can be non-public... + + remote_user1 = insert(:user, local: false) + remote_user2 = insert(:user, local: false) + + # Old remote non-public reply (should be kept) + {:ok, old_remote_post1_activity} = + CommonAPI.post(remote_user1, %{status: "some thing", local: false}) + + old_remote_post1_activity + |> Ecto.Changeset.change(%{local: false, updated_at: old_insert_date}) + |> Repo.update!() + + {:ok, old_remote_non_public_reply_activity} = + CommonAPI.post(remote_user2, %{ + status: "some reply", + in_reply_to_status_id: old_remote_post1_activity.id + }) + + old_remote_non_public_reply_activity + |> Ecto.Changeset.change(%{ + local: false, + updated_at: old_insert_date, + data: old_remote_non_public_reply_activity.data |> update_in(["to"], fn _ -> [] end) + }) + |> Repo.update!() + + # Old remote non-public Announce (should be removed) + {:ok, old_remote_post2_activity = %{data: %{"object" => old_remote_post2_id}}} = + CommonAPI.post(remote_user1, %{status: "some thing", local: false}) + + old_remote_post2_activity + |> Ecto.Changeset.change(%{local: false, updated_at: old_insert_date}) + |> Repo.update!() + + {:ok, old_remote_non_public_repeat_activity} = + CommonAPI.repeat(old_remote_post2_activity.id, remote_user2) + + old_remote_non_public_repeat_activity + |> Ecto.Changeset.change(%{ + local: false, + updated_at: old_insert_date, + data: old_remote_non_public_repeat_activity.data |> update_in(["to"], fn _ -> [] end) + }) + |> Repo.update!() + + assert length(Repo.all(Object)) == 3 + + Mix.Tasks.Pleroma.Database.run(["prune_objects", "--keep-threads", "--keep-non-public"]) + + Repo.all(Pleroma.Activity) + assert length(Repo.all(Object)) == 2 + refute Object.get_by_ap_id(old_remote_post2_id) + end + + test "with the --keep-threads option it still keeps non-old threads even with no local interactions" do + remote_user = insert(:user, local: false) + remote_user2 = insert(:user, local: false) + + {:ok, remote_post_activity} = + CommonAPI.post(remote_user, %{status: "some thing", local: false}) + + {:ok, remote_post_reply_activity} = + CommonAPI.post(remote_user2, %{ + status: "some reply", + in_reply_to_status_id: remote_post_activity.id + }) + + remote_post_activity + |> Ecto.Changeset.change(%{local: false}) + |> Repo.update!() + + remote_post_reply_activity + |> Ecto.Changeset.change(%{local: false}) + |> Repo.update!() + + assert length(Repo.all(Object)) == 2 + + Mix.Tasks.Pleroma.Database.run(["prune_objects", "--keep-threads"]) + + assert length(Repo.all(Object)) == 2 + end + + test "with the --keep-threads option it deletes old threads with no local interaction", %{ + old_insert_date: old_insert_date + } do + remote_user = insert(:user, local: false) + remote_user2 = insert(:user, local: false) + + {:ok, old_remote_post_activity} = + CommonAPI.post(remote_user, %{status: "some thing", local: false}) + + old_remote_post_activity + |> Ecto.Changeset.change(%{local: false, updated_at: old_insert_date}) + |> Repo.update!() + + {:ok, old_remote_post_reply_activity} = + CommonAPI.post(remote_user2, %{ + status: "some reply", + in_reply_to_status_id: old_remote_post_activity.id + }) + + old_remote_post_reply_activity + |> Ecto.Changeset.change(%{local: false, updated_at: old_insert_date}) + |> Repo.update!() + + {:ok, old_favourite_activity} = + CommonAPI.favorite(remote_user2, old_remote_post_activity.id) + + old_favourite_activity + |> Ecto.Changeset.change(%{local: false, updated_at: old_insert_date}) + |> Repo.update!() + + {:ok, old_repeat_activity} = CommonAPI.repeat(old_remote_post_activity.id, remote_user2) + + old_repeat_activity + |> Ecto.Changeset.change(%{local: false, updated_at: old_insert_date}) + |> Repo.update!() + + assert length(Repo.all(Object)) == 2 + + Mix.Tasks.Pleroma.Database.run(["prune_objects", "--keep-threads"]) + + assert length(Repo.all(Object)) == 0 + end + + test "with the --keep-threads option it keeps old threads with local interaction", %{ + old_insert_date: old_insert_date + } do + remote_user = insert(:user, local: false) + local_user = insert(:user, local: true) + + # local reply + {:ok, old_remote_post1_activity} = + CommonAPI.post(remote_user, %{status: "some thing", local: false}) + + old_remote_post1_activity + |> Ecto.Changeset.change(%{local: false, updated_at: old_insert_date}) + |> Repo.update!() + + {:ok, old_local_post2_reply_activity} = + CommonAPI.post(local_user, %{ + status: "some reply", + in_reply_to_status_id: old_remote_post1_activity.id + }) + + old_local_post2_reply_activity + |> Ecto.Changeset.change(%{local: true, updated_at: old_insert_date}) + |> Repo.update!() + + # local Like + {:ok, old_remote_post3_activity} = + CommonAPI.post(remote_user, %{status: "some thing", local: false}) + + old_remote_post3_activity + |> Ecto.Changeset.change(%{local: false, updated_at: old_insert_date}) + |> Repo.update!() + + {:ok, old_favourite_activity} = CommonAPI.favorite(local_user, old_remote_post3_activity.id) + + old_favourite_activity + |> Ecto.Changeset.change(%{local: true, updated_at: old_insert_date}) + |> Repo.update!() + + # local Announce + {:ok, old_remote_post4_activity} = + CommonAPI.post(remote_user, %{status: "some thing", local: false}) + + old_remote_post4_activity + |> Ecto.Changeset.change(%{local: false, updated_at: old_insert_date}) + |> Repo.update!() + + {:ok, old_repeat_activity} = CommonAPI.repeat(old_remote_post4_activity.id, local_user) + + old_repeat_activity + |> Ecto.Changeset.change(%{local: true, updated_at: old_insert_date}) + |> Repo.update!() + + assert length(Repo.all(Object)) == 4 + + Mix.Tasks.Pleroma.Database.run(["prune_objects", "--keep-threads"]) + + assert length(Repo.all(Object)) == 4 + end + + test "with the --keep-threads option it keeps old threads with bookmarked posts", %{ + old_insert_date: old_insert_date + } do + remote_user = insert(:user, local: false) + local_user = insert(:user, local: true) + + {:ok, old_remote_post_activity} = + CommonAPI.post(remote_user, %{status: "some thing", local: false}) + + old_remote_post_activity + |> Ecto.Changeset.change(%{local: false, updated_at: old_insert_date}) + |> Repo.update!() + + Pleroma.Bookmark.create(local_user.id, old_remote_post_activity.id) + + assert length(Repo.all(Object)) == 1 + + Mix.Tasks.Pleroma.Database.run(["prune_objects", "--keep-threads"]) + + assert length(Repo.all(Object)) == 1 + end + + test "We don't have unexpected tables which may contain objects that are referenced by activities" do + # We can delete orphaned activities. For that we look for the objects they reference in the 'objects', 'activities', and 'users' table. + # If someone adds another table with objects (idk, maybe with separate relations, or collections or w/e), then we need to make sure we + # add logic for that in the 'prune_objects' task so that we don't wrongly delete their corresponding activities. + # So when someone adds (or removes) a table, this test will fail. + # Either the table contains objects which can be referenced from the activities table + # => in that case the prune_objects job should be adapted so we don't delete activities who still have the referenced object. + # Or it doesn't contain objects which can be referenced from the activities table + # => in that case you can add/remove the table to/from this (sorted) list. + + assert Repo.query!( + "SELECT table_name FROM information_schema.tables WHERE table_schema='public' AND table_type='BASE TABLE';" + ).rows + |> Enum.sort() == [ + ["activities"], + ["announcement_read_relationships"], + ["announcements"], + ["apps"], + ["backups"], + ["bookmarks"], + ["chat_message_references"], + ["chats"], + ["config"], + ["conversation_participation_recipient_ships"], + ["conversation_participations"], + ["conversations"], + ["counter_cache"], + ["data_migration_failed_ids"], + ["data_migrations"], + ["deliveries"], + ["filters"], + ["following_relationships"], + ["hashtags"], + ["hashtags_objects"], + ["instances"], + ["lists"], + ["markers"], + ["mfa_tokens"], + ["moderation_log"], + ["notifications"], + ["oauth_authorizations"], + ["oauth_tokens"], + ["oban_jobs"], + ["oban_peers"], + ["objects"], + ["password_reset_tokens"], + ["push_subscriptions"], + ["registrations"], + ["report_notes"], + ["scheduled_activities"], + ["schema_migrations"], + ["thread_mutes"], + # ["user_follows_hashtag"], # not in pleroma + # ["user_frontend_setting_profiles"], # not in pleroma + ["user_invite_tokens"], + ["user_notes"], + ["user_relationships"], + ["users"] + ] + end + + test "it prunes orphaned activities with the --prune-orphaned-activities" do + # Add a remote activity which references an Object + %Object{} |> Map.merge(%{data: %{"id" => "object_for_activity"}}) |> Repo.insert() + + %Activity{} + |> Map.merge(%{ + local: false, + data: %{"id" => "remote_activity_with_object", "object" => "object_for_activity"} + }) + |> Repo.insert() + + # Add a remote activity which references an activity + %Activity{} + |> Map.merge(%{ + local: false, + data: %{ + "id" => "remote_activity_with_activity", + "object" => "remote_activity_with_object" + } + }) + |> Repo.insert() + + # Add a remote activity which references an Actor + %User{} |> Map.merge(%{ap_id: "actor"}) |> Repo.insert() + + %Activity{} + |> Map.merge(%{ + local: false, + data: %{"id" => "remote_activity_with_actor", "object" => "actor"} + }) + |> Repo.insert() + + # Add a remote activity without existing referenced object, activity or actor + %Activity{} + |> Map.merge(%{ + local: false, + data: %{ + "id" => "remote_activity_without_existing_referenced_object", + "object" => "non_existing" + } + }) + |> Repo.insert() + + # Add a local activity without existing referenced object, activity or actor + %Activity{} + |> Map.merge(%{ + local: true, + data: %{"id" => "local_activity_with_actor", "object" => "non_existing"} + }) + |> Repo.insert() + + # The remote activities without existing reference, and only the remote activities without existing reference, are deleted + # if, and only if, we provide the --prune-orphaned-activities option + assert length(Repo.all(Activity)) == 5 + Mix.Tasks.Pleroma.Database.run(["prune_objects"]) + assert length(Repo.all(Activity)) == 5 + Mix.Tasks.Pleroma.Database.run(["prune_objects", "--prune-orphaned-activities"]) + activities = Repo.all(Activity) + + assert "remote_activity_without_existing_referenced_object" not in Enum.map( + activities, + fn a -> a.data["id"] end + ) + + assert length(activities) == 4 + end + + test "it prunes orphaned activities with the --prune-orphaned-activities when the objects are referenced from an array" do + %Object{} |> Map.merge(%{data: %{"id" => "existing_object"}}) |> Repo.insert() + %User{} |> Map.merge(%{ap_id: "existing_actor"}) |> Repo.insert() + + # Multiple objects, one object exists (keep) + %Activity{} + |> Map.merge(%{ + local: false, + data: %{ + "id" => "remote_activity_existing_object", + "object" => ["non_ existing_object", "existing_object"] + } + }) + |> Repo.insert() + + # Multiple objects, one actor exists (keep) + %Activity{} + |> Map.merge(%{ + local: false, + data: %{ + "id" => "remote_activity_existing_actor", + "object" => ["non_ existing_object", "existing_actor"] + } + }) + |> Repo.insert() + + # Multiple objects, one activity exists (keep) + %Activity{} + |> Map.merge(%{ + local: false, + data: %{ + "id" => "remote_activity_existing_activity", + "object" => ["non_ existing_object", "remote_activity_existing_actor"] + } + }) + |> Repo.insert() + + # Multiple objects none exist (prune) + %Activity{} + |> Map.merge(%{ + local: false, + data: %{ + "id" => "remote_activity_without_existing_referenced_object", + "object" => ["owo", "whats_this"] + } + }) + |> Repo.insert() + + assert length(Repo.all(Activity)) == 4 + Mix.Tasks.Pleroma.Database.run(["prune_objects"]) + assert length(Repo.all(Activity)) == 4 + Mix.Tasks.Pleroma.Database.run(["prune_objects", "--prune-orphaned-activities"]) + activities = Repo.all(Activity) + assert length(activities) == 3 + + assert "remote_activity_without_existing_referenced_object" not in Enum.map( + activities, + fn a -> a.data["id"] end + ) + + assert length(activities) == 3 end end From fdc3cbb8cbefad2161cc408b4440420d6e4b2a88 Mon Sep 17 00:00:00 2001 From: faried nawaz Date: Mon, 18 Sep 2023 02:13:52 +0500 Subject: [PATCH 25/93] add documentation for the prune_objects mix task options --- changelog.d/akkoma-prune-options.add | 1 + docs/administration/CLI_tasks/database.md | 17 +++++++++++++---- 2 files changed, 14 insertions(+), 4 deletions(-) create mode 100644 changelog.d/akkoma-prune-options.add diff --git a/changelog.d/akkoma-prune-options.add b/changelog.d/akkoma-prune-options.add new file mode 100644 index 000000000..6bc5e7f92 --- /dev/null +++ b/changelog.d/akkoma-prune-options.add @@ -0,0 +1 @@ +Add options to the mix prune_objects task diff --git a/docs/administration/CLI_tasks/database.md b/docs/administration/CLI_tasks/database.md index c53c49921..c5e51e555 100644 --- a/docs/administration/CLI_tasks/database.md +++ b/docs/administration/CLI_tasks/database.md @@ -21,16 +21,18 @@ Replaces embedded objects with references to them in the `objects` table. Only n mix pleroma.database remove_embedded_objects [option ...] ``` - ### Options - `--vacuum` - run `VACUUM FULL` after the embedded objects are replaced with their references ## Prune old remote posts from the database -This will prune remote posts older than 90 days (configurable with [`config :pleroma, :instance, remote_post_retention_days`](../../configuration/cheatsheet.md#instance)) from the database, they will be refetched from source when accessed. +This will prune remote posts older than 90 days (configurable with [`config :pleroma, :instance, remote_post_retention_days`](../../configuration/cheatsheet.md#instance)) from the database. Pruned posts may be refetched in some cases. + +!!! note + The disk space will only be reclaimed after a proper vacuum. By default Postgresql does this for you on a regular basis, but if your instance has been running for a long time and there are many rows deleted, it may be advantageous to use `VACUUM FULL` (e.g. by using the `--vacuum` option). !!! danger - The disk space will only be reclaimed after `VACUUM FULL`. You may run out of disk space during the execution of the task or vacuuming if you don't have about 1/3rds of the database size free. + You may run out of disk space during the execution of the task or vacuuming if you don't have about 1/3rds of the database size free. Vacuum causes a substantial increase in I/O traffic, and may lead to a degraded experience while it is running. === "OTP" @@ -45,7 +47,11 @@ This will prune remote posts older than 90 days (configurable with [`config :ple ``` ### Options -- `--vacuum` - run `VACUUM FULL` after the objects are pruned + +- `--keep-threads` - Don't prune posts when they are part of a thread where at least one post has seen local interaction (e.g. one of the posts is a local post, or is favourited by a local user, or has been repeated by a local user...). It also won't delete posts when at least one of the posts in that thread is kept (e.g. because one of the posts has seen recent activity). +- `--keep-non-public` - Keep non-public posts like DM's and followers-only, even if they are remote. +- `--prune-orphaned-activities` - Also prune orphaned activities afterwards. Activities are things like Like, Create, Announce, Flag (aka reports). They can significantly help reduce the database size. Note: this can take a very long time. +- `--vacuum` - Run `VACUUM FULL` after the objects are pruned. This should not be used on a regular basis, but is useful if your instance has been running for a long time before pruning. ## Create a conversation for all existing DMs @@ -93,6 +99,9 @@ Can be safely re-run ## Vacuum the database +!!! note + By default Postgresql has an autovacuum deamon running. While the tasks described here can help in some cases, they shouldn't be needed on a regular basis. See [the Postgresql docs on vacuuming](https://www.postgresql.org/docs/current/sql-vacuum.html) for more information on this. + ### Analyze Running an `analyze` vacuum job can improve performance by updating statistics used by the query planner. **It is safe to cancel this.** From 226874c9d603be72699d5aa5434616efffe3f239 Mon Sep 17 00:00:00 2001 From: Lain Soykaf Date: Mon, 20 May 2024 13:12:12 +0400 Subject: [PATCH 26/93] CI: Add new builders for base images --- .gitlab-ci.yml | 6 +++--- ci/elixir-1.13/Dockerfile | 8 ++++++++ ci/elixir-1.13/build_and_push.sh | 1 + ci/elixir-1.15-otp25/build_and_push.sh | 2 +- 4 files changed, 13 insertions(+), 4 deletions(-) create mode 100644 ci/elixir-1.13/Dockerfile create mode 100755 ci/elixir-1.13/build_and_push.sh diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 2e321c978..eba769af8 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,4 +1,4 @@ -image: git.pleroma.social:5050/pleroma/pleroma/ci-base +image: git.pleroma.social:5050/pleroma/pleroma/ci-base:1.13.4-otp24 variables: &global_variables # Only used for the release @@ -72,7 +72,7 @@ check-changelog: tags: - amd64 -build-1.12.3: +build-1.13.4: extends: - .build_changes_policy - .using-ci-base @@ -85,7 +85,7 @@ build-1.15.7-otp-25: - .build_changes_policy - .using-ci-base stage: build - image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15 + image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25 allow_failure: true script: - mix compile --force diff --git a/ci/elixir-1.13/Dockerfile b/ci/elixir-1.13/Dockerfile new file mode 100644 index 000000000..b8bceb3d9 --- /dev/null +++ b/ci/elixir-1.13/Dockerfile @@ -0,0 +1,8 @@ +FROM elixir:1.13.4-otp-24 + +# Single RUN statement, otherwise intermediate images are created +# https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#run +RUN apt-get update &&\ + apt-get install -y libmagic-dev cmake libimage-exiftool-perl ffmpeg &&\ + mix local.hex --force &&\ + mix local.rebar --force diff --git a/ci/elixir-1.13/build_and_push.sh b/ci/elixir-1.13/build_and_push.sh new file mode 100755 index 000000000..53af4245f --- /dev/null +++ b/ci/elixir-1.13/build_and_push.sh @@ -0,0 +1 @@ +docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13-otp24 --push . diff --git a/ci/elixir-1.15-otp25/build_and_push.sh b/ci/elixir-1.15-otp25/build_and_push.sh index 06fe74f34..a28e0d33c 100755 --- a/ci/elixir-1.15-otp25/build_and_push.sh +++ b/ci/elixir-1.15-otp25/build_and_push.sh @@ -1 +1 @@ -docker buildx build --platform linux/amd64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25 --push . +docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25 --push . From f8411a351de07f14fdc9c9eca30109feaadf6f93 Mon Sep 17 00:00:00 2001 From: Lain Soykaf Date: Mon, 20 May 2024 13:30:31 +0400 Subject: [PATCH 27/93] CI: Specify version fully in base image tag --- ci/elixir-1.13/build_and_push.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/elixir-1.13/build_and_push.sh b/ci/elixir-1.13/build_and_push.sh index 53af4245f..d848344a3 100755 --- a/ci/elixir-1.13/build_and_push.sh +++ b/ci/elixir-1.13/build_and_push.sh @@ -1 +1 @@ -docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13-otp24 --push . +docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp24 --push . From f5c029524752e1820ea29f6557647823ae89ecf1 Mon Sep 17 00:00:00 2001 From: Lain Soykaf Date: Mon, 20 May 2024 13:32:25 +0400 Subject: [PATCH 28/93] CI: Specify correct image name. --- .gitlab-ci.yml | 2 +- ci/elixir-1.13/build_and_push.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index eba769af8..21d7b2242 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,4 +1,4 @@ -image: git.pleroma.social:5050/pleroma/pleroma/ci-base:1.13.4-otp24 +image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp-24 variables: &global_variables # Only used for the release diff --git a/ci/elixir-1.13/build_and_push.sh b/ci/elixir-1.13/build_and_push.sh index d848344a3..64e1856db 100755 --- a/ci/elixir-1.13/build_and_push.sh +++ b/ci/elixir-1.13/build_and_push.sh @@ -1 +1 @@ -docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp24 --push . +docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp-24 --push . From 818712f99f165011aaaad5fd82c40304004ace23 Mon Sep 17 00:00:00 2001 From: "Haelwenn (lanodan) Monnier" Date: Thu, 23 May 2024 00:35:38 +0200 Subject: [PATCH 29/93] pleroma_ctl: Use realpath(1) instead of readlink(1) From realpath(1) in POSIX 202x Draft 4.1: > If file does not name a symbolic link, readlink shall write a diagnostic > message to standard error and exit with non-zero status. Which also doesn't includes `-f`, in preference of `realpath`. --- changelog.d/realpath-over-readlink.fix | 1 + rel/files/bin/pleroma_ctl | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/realpath-over-readlink.fix diff --git a/changelog.d/realpath-over-readlink.fix b/changelog.d/realpath-over-readlink.fix new file mode 100644 index 000000000..479561b95 --- /dev/null +++ b/changelog.d/realpath-over-readlink.fix @@ -0,0 +1 @@ +pleroma_ctl: Use realpath(1) instead of readlink(1) diff --git a/rel/files/bin/pleroma_ctl b/rel/files/bin/pleroma_ctl index 87c486514..6f0dba3a8 100755 --- a/rel/files/bin/pleroma_ctl +++ b/rel/files/bin/pleroma_ctl @@ -134,7 +134,7 @@ if [ -z "$1" ] || [ "$1" = "help" ]; then " else - SCRIPT=$(readlink -f "$0") + SCRIPT=$(realpath "$0") SCRIPTPATH=$(dirname "$SCRIPT") FULL_ARGS="$*" From 618b77071afb480b763a493bfcd9b376effedaaf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?marcin=20miko=C5=82ajczak?= Date: Sat, 25 May 2024 09:10:11 +0200 Subject: [PATCH 30/93] Update pleroma_api.md MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: marcin mikołajczak --- changelog.d/api-docs-2.skip | 0 docs/development/API/pleroma_api.md | 4 +--- 2 files changed, 1 insertion(+), 3 deletions(-) create mode 100644 changelog.d/api-docs-2.skip diff --git a/changelog.d/api-docs-2.skip b/changelog.d/api-docs-2.skip new file mode 100644 index 000000000..e69de29bb diff --git a/docs/development/API/pleroma_api.md b/docs/development/API/pleroma_api.md index 267dfc1ec..57d333ffe 100644 --- a/docs/development/API/pleroma_api.md +++ b/docs/development/API/pleroma_api.md @@ -295,9 +295,7 @@ See [Admin-API](admin_api.md) "id": "9umDrYheeY451cQnEe", "name": "Read later", "emoji": "🕓", - "source": { - "emoji": "🕓" - } + "emoji_url": null } ] ``` From 61a3b793165e92d6f23a2e59fb9b95e06737cf25 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Sat, 25 May 2024 14:20:47 -0400 Subject: [PATCH 31/93] Search backend healthcheck process --- changelog.d/search-healthcheck.add | 1 + config/config.exs | 2 +- lib/pleroma/application.ex | 3 +- lib/pleroma/search.ex | 5 ++ lib/pleroma/search/database_search.ex | 3 + lib/pleroma/search/healthcheck.ex | 85 +++++++++++++++++++++++++++ lib/pleroma/search/meilisearch.ex | 11 ++++ lib/pleroma/search/search_backend.ex | 8 +++ 8 files changed, 116 insertions(+), 2 deletions(-) create mode 100644 changelog.d/search-healthcheck.add create mode 100644 lib/pleroma/search/healthcheck.ex diff --git a/changelog.d/search-healthcheck.add b/changelog.d/search-healthcheck.add new file mode 100644 index 000000000..4974925e7 --- /dev/null +++ b/changelog.d/search-healthcheck.add @@ -0,0 +1 @@ +Monitoring of search backend health to control the processing of jobs in the search indexing Oban queue diff --git a/config/config.exs b/config/config.exs index b69044a2b..8b9a588b7 100644 --- a/config/config.exs +++ b/config/config.exs @@ -579,7 +579,7 @@ attachments_cleanup: 1, new_users_digest: 1, mute_expire: 5, - search_indexing: 10, + search_indexing: [limit: 10, paused: true], rich_media_expiration: 2 ], plugins: [Oban.Plugins.Pruner], diff --git a/lib/pleroma/application.ex b/lib/pleroma/application.ex index 649bb11c8..d266d1836 100644 --- a/lib/pleroma/application.ex +++ b/lib/pleroma/application.ex @@ -109,7 +109,8 @@ def start(_type, _args) do streamer_registry() ++ background_migrators() ++ shout_child(shout_enabled?()) ++ - [Pleroma.Gopher.Server] + [Pleroma.Gopher.Server] ++ + [Pleroma.Search.Healthcheck] # See http://elixir-lang.org/docs/stable/elixir/Supervisor.html # for other strategies and supported options diff --git a/lib/pleroma/search.ex b/lib/pleroma/search.ex index 3b266e59b..e8dbcca1f 100644 --- a/lib/pleroma/search.ex +++ b/lib/pleroma/search.ex @@ -14,4 +14,9 @@ def search(query, options) do search_module.search(options[:for_user], query, options) end + + def healthcheck_endpoints do + search_module = Pleroma.Config.get([Pleroma.Search, :module], Pleroma.Activity) + search_module.healthcheck_endpoints + end end diff --git a/lib/pleroma/search/database_search.ex b/lib/pleroma/search/database_search.ex index 31bfc7e33..11e99e7f1 100644 --- a/lib/pleroma/search/database_search.ex +++ b/lib/pleroma/search/database_search.ex @@ -48,6 +48,9 @@ def add_to_index(_activity), do: :ok @impl true def remove_from_index(_object), do: :ok + @impl true + def healthcheck_endpoints, do: nil + def maybe_restrict_author(query, %User{} = author) do Activity.Queries.by_author(query, author) end diff --git a/lib/pleroma/search/healthcheck.ex b/lib/pleroma/search/healthcheck.ex new file mode 100644 index 000000000..495aee930 --- /dev/null +++ b/lib/pleroma/search/healthcheck.ex @@ -0,0 +1,85 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2024 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only +defmodule Pleroma.Search.Healthcheck do + @doc """ + Monitors health of search backend to control processing of events based on health and availability. + """ + use GenServer + require Logger + + @tick :timer.seconds(60) + @queue :search_indexing + + def start_link(_) do + GenServer.start_link(__MODULE__, [], name: __MODULE__) + end + + @impl true + def init(_) do + state = %{healthy: false} + {:ok, state, {:continue, :start}} + end + + @impl true + def handle_continue(:start, state) do + tick() + {:noreply, state} + end + + @impl true + def handle_info(:check, state) do + urls = Pleroma.Search.healthcheck_endpoints() + + new_state = + if healthy?(urls) do + Oban.resume_queue(queue: @queue) + Map.put(state, :healthy, true) + else + Oban.pause_queue(queue: @queue) + Map.put(state, :healthy, false) + end + + maybe_log_state_change(state, new_state) + + tick() + {:noreply, new_state} + end + + @impl true + def handle_call(:check, _from, state) do + status = Map.get(state, :healthy) + + {:reply, status, state, :hibernate} + end + + defp healthy?([]), do: true + + defp healthy?(urls) when is_list(urls) do + Enum.all?( + urls, + fn url -> + case Pleroma.HTTP.get(url) do + {:ok, %{status: 200}} -> true + _ -> false + end + end + ) + end + + defp healthy?(_), do: true + + defp tick do + Process.send_after(self(), :check, @tick) + end + + defp maybe_log_state_change(%{healthy: true}, %{healthy: false}) do + Logger.error("Pausing Oban queue #{@queue} due to search backend healthcheck failure") + end + + defp maybe_log_state_change(%{healthy: false}, %{healthy: true}) do + Logger.info("Resuming Oban queue #{@queue} due to search backend healthcheck pass") + end + + defp maybe_log_state_change(_, _), do: :ok +end diff --git a/lib/pleroma/search/meilisearch.ex b/lib/pleroma/search/meilisearch.ex index 2bff663e8..08c2f3d86 100644 --- a/lib/pleroma/search/meilisearch.ex +++ b/lib/pleroma/search/meilisearch.ex @@ -178,4 +178,15 @@ def add_to_index(activity) do def remove_from_index(object) do meili_delete("/indexes/objects/documents/#{object.id}") end + + @impl true + def healthcheck_endpoints do + endpoint = + Config.get([Pleroma.Search.Meilisearch, :url]) + |> URI.parse() + |> Map.put(:path, "/health") + |> URI.to_string() + + [endpoint] + end end diff --git a/lib/pleroma/search/search_backend.ex b/lib/pleroma/search/search_backend.ex index 68bc48cec..13c887bc2 100644 --- a/lib/pleroma/search/search_backend.ex +++ b/lib/pleroma/search/search_backend.ex @@ -21,4 +21,12 @@ defmodule Pleroma.Search.SearchBackend do from index. """ @callback remove_from_index(object :: Pleroma.Object.t()) :: :ok | {:error, any()} + + @doc """ + Healthcheck endpoints of search backend infrastructure to monitor for controlling + processing of jobs in the Oban queue. + + It is expected a 200 response is healthy and other responses are unhealthy. + """ + @callback healthcheck_endpoints :: list() | nil end From 3474b42ce396150b21f26ed35bea46ad61f57d5f Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Sat, 25 May 2024 16:55:29 -0400 Subject: [PATCH 32/93] Drop TTL to 5 seconds --- lib/pleroma/search/healthcheck.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/pleroma/search/healthcheck.ex b/lib/pleroma/search/healthcheck.ex index 495aee930..9a2d9fdd6 100644 --- a/lib/pleroma/search/healthcheck.ex +++ b/lib/pleroma/search/healthcheck.ex @@ -8,7 +8,7 @@ defmodule Pleroma.Search.Healthcheck do use GenServer require Logger - @tick :timer.seconds(60) + @tick :timer.seconds(5) @queue :search_indexing def start_link(_) do From 354b700bedf8ad6e9187245977165ebd7bc2fa1c Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Sun, 26 May 2024 14:01:00 -0400 Subject: [PATCH 33/93] Assert that AWS URLs without query parameters do not crash --- .../web/rich_media/parser/ttl/aws_signed_url_test.exs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/test/pleroma/web/rich_media/parser/ttl/aws_signed_url_test.exs b/test/pleroma/web/rich_media/parser/ttl/aws_signed_url_test.exs index cd8be8675..cc28aa7f3 100644 --- a/test/pleroma/web/rich_media/parser/ttl/aws_signed_url_test.exs +++ b/test/pleroma/web/rich_media/parser/ttl/aws_signed_url_test.exs @@ -10,6 +10,7 @@ defmodule Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrlTest do alias Pleroma.UnstubbedConfigMock, as: ConfigMock alias Pleroma.Web.RichMedia.Card + alias Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl setup do ConfigMock @@ -82,6 +83,12 @@ test "s3 signed url is parsed and correct ttl is set for rich media" do assert DateTime.diff(scheduled_at, timestamp_dt) == valid_till end + test "AWS URL for an image without expiration works" do + og_data = %{"image" => "https://amazonaws.com/image.png"} + + assert is_nil(AwsSignedUrl.ttl(og_data, "")) + end + defp construct_s3_url(timestamp, valid_till) do "https://pleroma.s3.ap-southeast-1.amazonaws.com/sachin%20%281%29%20_a%20-%25%2Aasdasd%20BNN%20bnnn%20.png?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAIBLWWK6RGDQXDLJQ%2F20190716%2Fap-southeast-1%2Fs3%2Faws4_request&X-Amz-Date=#{timestamp}&X-Amz-Expires=#{valid_till}&X-Amz-Signature=04ffd6b98634f4b1bbabc62e0fac4879093cd54a6eed24fe8eb38e8369526bbf&X-Amz-SignedHeaders=host" end From 807782b7f96ee0e053ad59b464766d750f8a8800 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Sat, 25 May 2024 16:27:59 -0400 Subject: [PATCH 34/93] Fix rich media parsing some Amazon URLs --- changelog.d/richmediattl.fix | 1 + lib/pleroma/web/rich_media/parser/ttl/aws_signed_url.ex | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/richmediattl.fix diff --git a/changelog.d/richmediattl.fix b/changelog.d/richmediattl.fix new file mode 100644 index 000000000..98de63015 --- /dev/null +++ b/changelog.d/richmediattl.fix @@ -0,0 +1 @@ +Parsing of RichMedia TTLs for Amazon URLs when query parameters are nil diff --git a/lib/pleroma/web/rich_media/parser/ttl/aws_signed_url.ex b/lib/pleroma/web/rich_media/parser/ttl/aws_signed_url.ex index 948c727e1..1172a120a 100644 --- a/lib/pleroma/web/rich_media/parser/ttl/aws_signed_url.ex +++ b/lib/pleroma/web/rich_media/parser/ttl/aws_signed_url.ex @@ -23,7 +23,7 @@ defp aws_signed_url?(image) when is_binary(image) and image != "" do %URI{host: host, query: query} = URI.parse(image) is_binary(host) and String.contains?(host, "amazonaws.com") and - String.contains?(query, "X-Amz-Expires") + is_binary(query) and String.contains?(query, "X-Amz-Expires") end defp aws_signed_url?(_), do: nil From f2b0d5f1d02e243a7a1a6f339b59e5abcb8e1bd8 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Sun, 26 May 2024 14:11:41 -0400 Subject: [PATCH 35/93] Make it easier to read the state for debugging purposes and expose functions for testing --- lib/pleroma/search/healthcheck.ex | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/lib/pleroma/search/healthcheck.ex b/lib/pleroma/search/healthcheck.ex index 9a2d9fdd6..170f29344 100644 --- a/lib/pleroma/search/healthcheck.ex +++ b/lib/pleroma/search/healthcheck.ex @@ -32,7 +32,7 @@ def handle_info(:check, state) do urls = Pleroma.Search.healthcheck_endpoints() new_state = - if healthy?(urls) do + if check(urls) do Oban.resume_queue(queue: @queue) Map.put(state, :healthy, true) else @@ -47,15 +47,15 @@ def handle_info(:check, state) do end @impl true - def handle_call(:check, _from, state) do - status = Map.get(state, :healthy) - - {:reply, status, state, :hibernate} + def handle_call(:state, _from, state) do + {:reply, state, state, :hibernate} end - defp healthy?([]), do: true + def state, do: GenServer.call(__MODULE__, :state) - defp healthy?(urls) when is_list(urls) do + def check([]), do: true + + def check(urls) when is_list(urls) do Enum.all?( urls, fn url -> @@ -67,7 +67,7 @@ defp healthy?(urls) when is_list(urls) do ) end - defp healthy?(_), do: true + def check(_), do: true defp tick do Process.send_after(self(), :check, @tick) From 03f4b461895802259c895c81462a3e9d0d31c1e5 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Sun, 26 May 2024 14:21:24 -0400 Subject: [PATCH 36/93] Test that healthchecks behave correctly for the expected HTTP responses --- test/pleroma/search/healthcheck_test.exs | 49 ++++++++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 test/pleroma/search/healthcheck_test.exs diff --git a/test/pleroma/search/healthcheck_test.exs b/test/pleroma/search/healthcheck_test.exs new file mode 100644 index 000000000..e7649d949 --- /dev/null +++ b/test/pleroma/search/healthcheck_test.exs @@ -0,0 +1,49 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2024 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Search.HealthcheckTest do + use Pleroma.DataCase + + import Tesla.Mock + + alias Pleroma.Search.Healthcheck + + @good1 "http://good1.example.com/healthz" + @good2 "http://good2.example.com/health" + @bad "http://bad.example.com/healthy" + + setup do + mock(fn + %{method: :get, url: @good1} -> + %Tesla.Env{ + status: 200, + body: "" + } + + %{method: :get, url: @good2} -> + %Tesla.Env{ + status: 200, + body: "" + } + + %{method: :get, url: @bad} -> + %Tesla.Env{ + status: 503, + body: "" + } + end) + + :ok + end + + test "true for 200 responses" do + assert Healthcheck.check([@good1]) + assert Healthcheck.check([@good1, @good2]) + end + + test "false if any response is not a 200" do + refute Healthcheck.check([@bad]) + refute Healthcheck.check([@good1, @bad]) + end +end From d4769b076a95ce2281dba5673c410eb098445bba Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Sun, 26 May 2024 15:13:59 -0400 Subject: [PATCH 37/93] Return a 422 when trying to reply to a deleted status --- changelog.d/reply-to-deleted.change | 1 + lib/pleroma/web/common_api/activity_draft.ex | 18 ++++++++++++++++-- .../controllers/status_controller_test.exs | 10 ++++++++++ 3 files changed, 27 insertions(+), 2 deletions(-) create mode 100644 changelog.d/reply-to-deleted.change diff --git a/changelog.d/reply-to-deleted.change b/changelog.d/reply-to-deleted.change new file mode 100644 index 000000000..8b952ee7a --- /dev/null +++ b/changelog.d/reply-to-deleted.change @@ -0,0 +1 @@ +A 422 error is returned when attempting to reply to a deleted status diff --git a/lib/pleroma/web/common_api/activity_draft.ex b/lib/pleroma/web/common_api/activity_draft.ex index bc46a8a36..8aa1e258d 100644 --- a/lib/pleroma/web/common_api/activity_draft.ex +++ b/lib/pleroma/web/common_api/activity_draft.ex @@ -129,8 +129,22 @@ defp attachments(%{params: params} = draft) do defp in_reply_to(%{params: %{in_reply_to_status_id: ""}} = draft), do: draft - defp in_reply_to(%{params: %{in_reply_to_status_id: id}} = draft) when is_binary(id) do - %__MODULE__{draft | in_reply_to: Activity.get_by_id(id)} + defp in_reply_to(%{params: %{in_reply_to_status_id: :deleted}} = draft) do + add_error(draft, dgettext("errors", "Cannot reply to a deleted status")) + end + + defp in_reply_to(%{params: %{in_reply_to_status_id: id} = params} = draft) when is_binary(id) do + activity = Activity.get_by_id(id) + + params = + if is_nil(activity) do + # Deleted activities are returned as nil + Map.put(params, :in_reply_to_status_id, :deleted) + else + Map.put(params, :in_reply_to_status_id, activity) + end + + in_reply_to(%{draft | params: params}) end defp in_reply_to(%{params: %{in_reply_to_status_id: %Activity{} = in_reply_to}} = draft) do diff --git a/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs index 80c1ed099..f34911e5b 100644 --- a/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs +++ b/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs @@ -235,6 +235,16 @@ test "replying to a status", %{user: user, conn: conn} do assert Activity.get_in_reply_to_activity(activity).id == replied_to.id end + test "replying to a deleted status", %{user: user, conn: conn} do + {:ok, status} = CommonAPI.post(user, %{status: "cofe"}) + {:ok, _deleted_status} = CommonAPI.delete(status.id, user) + + conn + |> put_req_header("content-type", "application/json") + |> post("/api/v1/statuses", %{"status" => "xD", "in_reply_to_id" => status.id}) + |> json_response_and_validate_schema(422) + end + test "replying to a direct message with visibility other than direct", %{ user: user, conn: conn From d9b82255b9cf49176f8ef1d5a87abf7d80769a47 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Sun, 26 May 2024 15:23:12 -0400 Subject: [PATCH 38/93] Add an HTTP timeout for the healthcheck --- lib/pleroma/search/healthcheck.ex | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/pleroma/search/healthcheck.ex b/lib/pleroma/search/healthcheck.ex index 170f29344..e562c8478 100644 --- a/lib/pleroma/search/healthcheck.ex +++ b/lib/pleroma/search/healthcheck.ex @@ -8,8 +8,9 @@ defmodule Pleroma.Search.Healthcheck do use GenServer require Logger - @tick :timer.seconds(5) @queue :search_indexing + @tick :timer.seconds(5) + @timeout :timer.seconds(2) def start_link(_) do GenServer.start_link(__MODULE__, [], name: __MODULE__) @@ -59,7 +60,7 @@ def check(urls) when is_list(urls) do Enum.all?( urls, fn url -> - case Pleroma.HTTP.get(url) do + case Pleroma.HTTP.get(url, [], recv_timeout: @timeout) do {:ok, %{status: 200}} -> true _ -> false end From d35b69d2686e62cc5076bd7a33449f98f8a11a85 Mon Sep 17 00:00:00 2001 From: Lain Soykaf Date: Mon, 27 May 2024 13:18:02 +0400 Subject: [PATCH 39/93] Pleroma.Search: Remove wrong (but irrelevant) results --- lib/pleroma/search.ex | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/lib/pleroma/search.ex b/lib/pleroma/search.ex index e8dbcca1f..fd0218cb8 100644 --- a/lib/pleroma/search.ex +++ b/lib/pleroma/search.ex @@ -10,13 +10,12 @@ def remove_from_index(%Pleroma.Object{id: object_id}) do end def search(query, options) do - search_module = Pleroma.Config.get([Pleroma.Search, :module], Pleroma.Activity) - + search_module = Pleroma.Config.get([Pleroma.Search, :module]) search_module.search(options[:for_user], query, options) end def healthcheck_endpoints do - search_module = Pleroma.Config.get([Pleroma.Search, :module], Pleroma.Activity) + search_module = Pleroma.Config.get([Pleroma.Search, :module]) search_module.healthcheck_endpoints end end From 8b76f56050a609bf562053cb7201a9204901490e Mon Sep 17 00:00:00 2001 From: Lain Soykaf Date: Mon, 27 May 2024 13:57:42 +0400 Subject: [PATCH 40/93] QdrantSearch: Add healthcheck for qdrant --- lib/pleroma/search/qdrant_search.ex | 11 +++++++++++ test/pleroma/search/qdrant_search_test.exs | 12 ++++++++++++ 2 files changed, 23 insertions(+) diff --git a/lib/pleroma/search/qdrant_search.ex b/lib/pleroma/search/qdrant_search.ex index 19e8cd4bf..3c3ffce16 100644 --- a/lib/pleroma/search/qdrant_search.ex +++ b/lib/pleroma/search/qdrant_search.ex @@ -139,6 +139,17 @@ def search(_user, query, options) do [] end end + + @impl true + def healthcheck_endpoints do + qdrant_health = + Config.get([Pleroma.Search.QdrantSearch, :qdrant_url]) + |> URI.parse() + |> Map.put(:path, "/healthz") + |> URI.to_string() + + [qdrant_health] + end end defmodule Pleroma.Search.QdrantSearch.OpenAIClient do diff --git a/test/pleroma/search/qdrant_search_test.exs b/test/pleroma/search/qdrant_search_test.exs index 46485392e..b389aa816 100644 --- a/test/pleroma/search/qdrant_search_test.exs +++ b/test/pleroma/search/qdrant_search_test.exs @@ -15,6 +15,18 @@ defmodule Pleroma.Search.QdrantSearchTest do alias Pleroma.Workers.SearchIndexingWorker describe "Qdrant search" do + test "returns the correct healthcheck endpoints" do + Config + |> expect(:get, 1, fn + [Pleroma.Search.QdrantSearch, key], nil -> + %{qdrant_url: "https://qdrant.url"}[key] + end) + + health_endpoints = QdrantSearch.healthcheck_endpoints() + + assert "https://qdrant.url/healthz" in health_endpoints + end + test "searches for a term by encoding it and sending it to qdrant" do user = insert(:user) From ec3f3fef7798111641f08020d5fd7ae16e407b89 Mon Sep 17 00:00:00 2001 From: Lain Soykaf Date: Mon, 27 May 2024 14:15:04 +0400 Subject: [PATCH 41/93] Fastembed Server: Add health check endpoint --- supplemental/search/fastembed-api/fastembed-server.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/supplemental/search/fastembed-api/fastembed-server.py b/supplemental/search/fastembed-api/fastembed-server.py index dd4a7a9c8..02da69db2 100644 --- a/supplemental/search/fastembed-api/fastembed-server.py +++ b/supplemental/search/fastembed-api/fastembed-server.py @@ -17,6 +17,10 @@ def embeddings(request: EmbeddingRequest): embeddings = next(model.embed(request.input)).tolist() return {"data": [{"embedding": embeddings}]} +@app.get("/health") +def health(): + return {"status": "ok"} + if __name__ == "__main__": import uvicorn From f4c04e6b2dce6d75d148ca520aaef27005ecaa82 Mon Sep 17 00:00:00 2001 From: Lain Soykaf Date: Mon, 27 May 2024 14:21:55 +0400 Subject: [PATCH 42/93] QdrantSearch: Add health checks. --- config/config.exs | 3 +++ lib/pleroma/search/qdrant_search.ex | 4 +++- test/pleroma/search/qdrant_search_test.exs | 20 +++++++++++++++++--- 3 files changed, 23 insertions(+), 4 deletions(-) diff --git a/config/config.exs b/config/config.exs index d891a5218..f388dfe52 100644 --- a/config/config.exs +++ b/config/config.exs @@ -919,6 +919,9 @@ qdrant_url: "http://127.0.0.1:6333/", qdrant_api_key: "", openai_url: "http://127.0.0.1:11345", + # The healthcheck url has to be set to nil when used with the real openai + # API, as it doesn't have a healthcheck endpoint. + openai_healthcheck_url: "http://127.0.0.1:11345/health", openai_model: "snowflake/snowflake-arctic-embed-xs", openai_api_key: "", qdrant_index_configuration: %{ diff --git a/lib/pleroma/search/qdrant_search.ex b/lib/pleroma/search/qdrant_search.ex index 3c3ffce16..429ae05b8 100644 --- a/lib/pleroma/search/qdrant_search.ex +++ b/lib/pleroma/search/qdrant_search.ex @@ -148,7 +148,9 @@ def healthcheck_endpoints do |> Map.put(:path, "/healthz") |> URI.to_string() - [qdrant_health] + openai_health = Config.get([Pleroma.Search.QdrantSearch, :openai_healthcheck_url]) + + [qdrant_health, openai_health] |> Enum.filter(& &1) end end diff --git a/test/pleroma/search/qdrant_search_test.exs b/test/pleroma/search/qdrant_search_test.exs index b389aa816..47a77a391 100644 --- a/test/pleroma/search/qdrant_search_test.exs +++ b/test/pleroma/search/qdrant_search_test.exs @@ -16,15 +16,29 @@ defmodule Pleroma.Search.QdrantSearchTest do describe "Qdrant search" do test "returns the correct healthcheck endpoints" do + # No openai healthcheck URL Config - |> expect(:get, 1, fn + |> expect(:get, 2, fn [Pleroma.Search.QdrantSearch, key], nil -> %{qdrant_url: "https://qdrant.url"}[key] end) - health_endpoints = QdrantSearch.healthcheck_endpoints() + [health_endpoint] = QdrantSearch.healthcheck_endpoints() - assert "https://qdrant.url/healthz" in health_endpoints + assert "https://qdrant.url/healthz" == health_endpoint + + # Set openai healthcheck URL + Config + |> expect(:get, 2, fn + [Pleroma.Search.QdrantSearch, key], nil -> + %{qdrant_url: "https://qdrant.url", openai_healthcheck_url: "https://openai.url/health"}[ + key + ] + end) + + [_, health_endpoint] = QdrantSearch.healthcheck_endpoints() + + assert "https://openai.url/health" == health_endpoint end test "searches for a term by encoding it and sending it to qdrant" do From ddf103eca04c9571ba8310915556cc51cd4a9af8 Mon Sep 17 00:00:00 2001 From: Lain Soykaf Date: Mon, 27 May 2024 14:35:08 +0400 Subject: [PATCH 43/93] QdrantSearch: Fetch a post in search if possible. --- lib/pleroma/search/qdrant_search.ex | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/pleroma/search/qdrant_search.ex b/lib/pleroma/search/qdrant_search.ex index 429ae05b8..b659bb682 100644 --- a/lib/pleroma/search/qdrant_search.ex +++ b/lib/pleroma/search/qdrant_search.ex @@ -9,6 +9,7 @@ defmodule Pleroma.Search.QdrantSearch do alias __MODULE__.QdrantClient import Pleroma.Search.Meilisearch, only: [object_to_search_data: 1] + import Pleroma.Search.DatabaseSearch, only: [maybe_fetch: 3] @impl true def create_index do @@ -115,8 +116,8 @@ def remove_from_index(object) do end @impl true - def search(_user, query, options) do - query = "Represent this sentence for searching relevant passages: #{query}" + def search(user, original_query, options) do + query = "Represent this sentence for searching relevant passages: #{original_query}" with {:ok, embedding} <- get_embedding(query), {:ok, %{body: %{"result" => result}}} <- @@ -134,6 +135,7 @@ def search(_user, query, options) do |> Activity.restrict_deactivated_users() |> Ecto.Query.order_by([a], fragment("array_position(?, ?)", ^ids, a.id)) |> Pleroma.Repo.all() + |> maybe_fetch(user, original_query) else _ -> [] From f214c2cdac4a94fae51e7679223df9557c6a1827 Mon Sep 17 00:00:00 2001 From: Lain Soykaf Date: Mon, 27 May 2024 15:23:33 +0400 Subject: [PATCH 44/93] NotificationTest: Remove impossible case. --- test/pleroma/notification_test.exs | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/test/pleroma/notification_test.exs b/test/pleroma/notification_test.exs index ecdb32e32..2c582c708 100644 --- a/test/pleroma/notification_test.exs +++ b/test/pleroma/notification_test.exs @@ -859,22 +859,6 @@ test "repeating an activity which is already deleted does not generate a notific assert Enum.empty?(Notification.for_user(user)) end - test "replying to a deleted post without tagging does not generate a notification" do - user = insert(:user) - other_user = insert(:user) - - {:ok, activity} = CommonAPI.post(user, %{status: "test post"}) - {:ok, _deletion_activity} = CommonAPI.delete(activity.id, user) - - {:ok, _reply_activity} = - CommonAPI.post(other_user, %{ - status: "test reply", - in_reply_to_status_id: activity.id - }) - - assert Enum.empty?(Notification.for_user(user)) - end - test "notifications are deleted if a local user is deleted" do user = insert(:user) other_user = insert(:user) From 3055c1598b43ee9460b88880e2752c68e9cf6edb Mon Sep 17 00:00:00 2001 From: Lain Soykaf Date: Mon, 27 May 2024 17:22:18 +0400 Subject: [PATCH 45/93] IPFSTest: Fix configuration mocking --- config/test.exs | 1 + lib/pleroma/upload.ex | 2 +- lib/pleroma/uploaders/ipfs.ex | 7 +-- test/pleroma/uploaders/ipfs_test.exs | 70 +++++++++++++++++++--------- 4 files changed, 55 insertions(+), 25 deletions(-) diff --git a/config/test.exs b/config/test.exs index 9b4113dd5..3345bb3a9 100644 --- a/config/test.exs +++ b/config/test.exs @@ -153,6 +153,7 @@ config :pleroma, Pleroma.Upload, config_impl: Pleroma.UnstubbedConfigMock config :pleroma, Pleroma.ScheduledActivity, config_impl: Pleroma.UnstubbedConfigMock config :pleroma, Pleroma.Web.RichMedia.Helpers, config_impl: Pleroma.StaticStubbedConfigMock +config :pleroma, Pleroma.Uploaders.IPFS, config_impl: Pleroma.UnstubbedConfigMock peer_module = if String.to_integer(System.otp_release()) >= 25 do diff --git a/lib/pleroma/upload.ex b/lib/pleroma/upload.ex index 2c6b23c39..35c7c02a5 100644 --- a/lib/pleroma/upload.ex +++ b/lib/pleroma/upload.ex @@ -282,7 +282,7 @@ def base_url do end Pleroma.Uploaders.IPFS -> - Config.get([Pleroma.Uploaders.IPFS, :get_gateway_url]) + @config_impl.get([Pleroma.Uploaders.IPFS, :get_gateway_url]) _ -> public_endpoint || upload_base_url || Pleroma.Web.Endpoint.url() <> "/media/" diff --git a/lib/pleroma/uploaders/ipfs.ex b/lib/pleroma/uploaders/ipfs.ex index 32e06c5cf..d171e4652 100644 --- a/lib/pleroma/uploaders/ipfs.ex +++ b/lib/pleroma/uploaders/ipfs.ex @@ -6,11 +6,12 @@ defmodule Pleroma.Uploaders.IPFS do @behaviour Pleroma.Uploaders.Uploader require Logger - alias Pleroma.Config alias Tesla.Multipart + @config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config) + defp get_final_url(method) do - config = Config.get([__MODULE__]) + config = @config_impl.get([__MODULE__]) post_base_url = Keyword.get(config, :post_gateway_url) Path.join([post_base_url, method]) @@ -69,7 +70,7 @@ def put_file(%Pleroma.Upload{} = upload) do @impl true def delete_file(file) do case Pleroma.HTTP.post(delete_file_endpoint(), "", [], params: [arg: file]) do - {:ok, %{status_code: 204}} -> :ok + {:ok, %{status: 204}} -> :ok error -> {:error, inspect(error)} end end diff --git a/test/pleroma/uploaders/ipfs_test.exs b/test/pleroma/uploaders/ipfs_test.exs index 853d185e5..cf325b54f 100644 --- a/test/pleroma/uploaders/ipfs_test.exs +++ b/test/pleroma/uploaders/ipfs_test.exs @@ -8,22 +8,22 @@ defmodule Pleroma.Uploaders.IPFSTest do alias Pleroma.Uploaders.IPFS alias Tesla.Multipart - import Mock import ExUnit.CaptureLog + import Mock + import Mox - setup do - clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.IPFS) - clear_config([Pleroma.Uploaders.IPFS]) - - clear_config( - [Pleroma.Uploaders.IPFS, :get_gateway_url], - "https://{CID}.ipfs.mydomain.com" - ) - - clear_config([Pleroma.Uploaders.IPFS, :post_gateway_url], "http://localhost:5001") - end + alias Pleroma.UnstubbedConfigMock, as: Config describe "get_final_url" do + setup do + Config + |> expect(:get, fn [Pleroma.Uploaders.IPFS] -> + [post_gateway_url: "http://localhost:5001"] + end) + + :ok + end + test "it returns the final url for put_file" do assert IPFS.put_file_endpoint() == "http://localhost:5001/api/v0/add" end @@ -34,7 +34,21 @@ test "it returns the final url for delete_file" do end describe "get_file/1" do + setup do + Config + |> expect(:get, fn [Pleroma.Upload, :uploader] -> Pleroma.Uploaders.IPFS end) + |> expect(:get, fn [Pleroma.Upload, :base_url] -> nil end) + |> expect(:get, fn [Pleroma.Uploaders.IPFS, :public_endpoint] -> nil end) + + :ok + end + test "it returns path to ipfs file with cid as subdomain" do + Config + |> expect(:get, fn [Pleroma.Uploaders.IPFS, :get_gateway_url] -> + "https://{CID}.ipfs.mydomain.com" + end) + assert IPFS.get_file("testcid") == { :ok, {:url, "https://testcid.ipfs.mydomain.com"} @@ -42,10 +56,10 @@ test "it returns path to ipfs file with cid as subdomain" do end test "it returns path to ipfs file with cid as path" do - clear_config( - [Pleroma.Uploaders.IPFS, :get_gateway_url], + Config + |> expect(:get, fn [Pleroma.Uploaders.IPFS, :get_gateway_url] -> "https://ipfs.mydomain.com/ipfs/{CID}" - ) + end) assert IPFS.get_file("testcid") == { :ok, @@ -56,6 +70,11 @@ test "it returns path to ipfs file with cid as path" do describe "put_file/1" do setup do + Config + |> expect(:get, fn [Pleroma.Uploaders.IPFS] -> + [post_gateway_url: "http://localhost:5001"] + end) + file_upload = %Pleroma.Upload{ name: "image-tet.jpg", content_type: "image/jpeg", @@ -73,7 +92,7 @@ test "it returns path to ipfs file with cid as path" do test "save file", %{file_upload: file_upload} do with_mock Pleroma.HTTP, - post: fn "http://localhost:5001/api/v0/add", mp, [], params: ["cid-version": "1"] -> + post: fn "http://localhost:5001/api/v0/add", _mp, [], params: ["cid-version": "1"] -> {:ok, %Tesla.Env{ status: 200, @@ -88,7 +107,7 @@ test "save file", %{file_upload: file_upload} do test "returns error", %{file_upload: file_upload} do with_mock Pleroma.HTTP, - post: fn "http://localhost:5001/api/v0/add", mp, [], params: ["cid-version": "1"] -> + post: fn "http://localhost:5001/api/v0/add", _mp, [], params: ["cid-version": "1"] -> {:error, "IPFS Gateway upload failed"} end do assert capture_log(fn -> @@ -99,19 +118,19 @@ test "returns error", %{file_upload: file_upload} do test "returns error if JSON decode fails", %{file_upload: file_upload} do with_mock Pleroma.HTTP, [], - post: fn "http://localhost:5001/api/v0/add", mp, [], params: ["cid-version": "1"] -> + post: fn "http://localhost:5001/api/v0/add", _mp, [], params: ["cid-version": "1"] -> {:ok, %Tesla.Env{status: 200, body: "invalid"}} end do assert capture_log(fn -> assert IPFS.put_file(file_upload) == {:error, "JSON decode failed"} end) =~ - "Elixir.Pleroma.Uploaders.IPFS: {:error, %Jason.DecodeError{data: \"invalid\", position: 0, token: nil}}" + "Elixir.Pleroma.Uploaders.IPFS: {:error, %Jason.DecodeError" end end test "returns error if JSON body doesn't contain Hash key", %{file_upload: file_upload} do with_mock Pleroma.HTTP, [], - post: fn "http://localhost:5001/api/v0/add", mp, [], params: ["cid-version": "1"] -> + post: fn "http://localhost:5001/api/v0/add", _mp, [], params: ["cid-version": "1"] -> {:ok, %Tesla.Env{status: 200, body: "{\"key\": \"value\"}"}} end do assert IPFS.put_file(file_upload) == {:error, "JSON doesn't contain Hash key"} @@ -120,9 +139,18 @@ test "returns error if JSON body doesn't contain Hash key", %{file_upload: file_ end describe "delete_file/1" do + setup do + Config + |> expect(:get, fn [Pleroma.Uploaders.IPFS] -> + [post_gateway_url: "http://localhost:5001"] + end) + + :ok + end + test_with_mock "deletes file", Pleroma.HTTP, post: fn "http://localhost:5001/api/v0/files/rm", "", [], params: [arg: "image.jpg"] -> - {:ok, %{status_code: 204}} + {:ok, %{status: 204}} end do assert :ok = IPFS.delete_file("image.jpg") end From ed93af64e14e1e82cf4840b1a160df8eddecc55c Mon Sep 17 00:00:00 2001 From: Lain Soykaf Date: Mon, 27 May 2024 17:50:34 +0400 Subject: [PATCH 46/93] Add changelog --- changelog.d/add-nsfw-mrf.add | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/add-nsfw-mrf.add diff --git a/changelog.d/add-nsfw-mrf.add b/changelog.d/add-nsfw-mrf.add new file mode 100644 index 000000000..ce62c7ed0 --- /dev/null +++ b/changelog.d/add-nsfw-mrf.add @@ -0,0 +1 @@ +Add NSFW-detecting MRF From a50c657427a2dfe9d48c25529a179fe634d30e48 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 27 May 2024 11:17:02 -0400 Subject: [PATCH 47/93] Add a dedicated connection pool for Rich Media Sharing this pool with regular Media is problematic as Rich Media will connect to many different domains and thrash the pool, but regular Media will have predictable connections to the webservers hosting media for the fediverse servers you peer with. --- config/config.exs | 9 +++++++++ lib/pleroma/web/rich_media/helpers.ex | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/config/config.exs b/config/config.exs index 8b9a588b7..b8030651f 100644 --- a/config/config.exs +++ b/config/config.exs @@ -827,6 +827,11 @@ max_waiting: 20, recv_timeout: 15_000 ], + rich_media: [ + size: 25, + max_waiting: 20, + recv_timeout: 15_000 + ], upload: [ size: 25, max_waiting: 5, @@ -847,6 +852,10 @@ max_connections: 50, timeout: 150_000 ], + rich_media: [ + max_connections: 50, + timeout: 150_000 + ], upload: [ max_connections: 25, timeout: 300_000 diff --git a/lib/pleroma/web/rich_media/helpers.ex b/lib/pleroma/web/rich_media/helpers.ex index 119994458..ea41bd285 100644 --- a/lib/pleroma/web/rich_media/helpers.ex +++ b/lib/pleroma/web/rich_media/helpers.ex @@ -58,7 +58,7 @@ defp check_content_length(headers) do defp http_options do [ - pool: :media, + pool: :rich_media, max_body: Config.get([:rich_media, :max_body], 5_000_000) ] end From 6708f154a4f7ad46b4637d4d566b8cf81e3ebb7b Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 27 May 2024 11:18:58 -0400 Subject: [PATCH 48/93] Rework Gun connection pool sizes to make better use of the default 250 connections --- config/config.exs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/config/config.exs b/config/config.exs index b8030651f..a025defeb 100644 --- a/config/config.exs +++ b/config/config.exs @@ -818,27 +818,27 @@ config :pleroma, :pools, federation: [ - size: 50, - max_waiting: 10, + size: 75, + max_waiting: 20, recv_timeout: 10_000 ], media: [ - size: 50, + size: 75, max_waiting: 20, recv_timeout: 15_000 ], rich_media: [ size: 25, max_waiting: 20, - recv_timeout: 15_000 - ], + recv_timeout: 15_000 + ], upload: [ size: 25, - max_waiting: 5, + max_waiting: 20, recv_timeout: 15_000 ], default: [ - size: 10, + size: 50, max_waiting: 2, recv_timeout: 5_000 ] From d272eb62cd4da45e5ef82fcc0126d2cf799d292a Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 27 May 2024 11:22:45 -0400 Subject: [PATCH 49/93] Trust the connection pools to enforce the concurrency limitations --- .../web/activity_pub/mrf/media_proxy_warming_policy.ex | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex b/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex index c95d35bb9..f10dc3ce5 100644 --- a/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex @@ -30,9 +30,7 @@ defp prefetch(url) do if Pleroma.Config.get(:env) == :test do fetch(prefetch_url) else - ConcurrentLimiter.limit(__MODULE__, fn -> - Task.start(fn -> fetch(prefetch_url) end) - end) + Task.start(fn -> fetch(prefetch_url) end) end end end From 37d79b76bb770cb294c0a54777b435dc49c042ab Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 27 May 2024 11:24:54 -0400 Subject: [PATCH 50/93] Use the configured http client options for mediaproxy --- .../web/activity_pub/mrf/media_proxy_warming_policy.ex | 10 ++++------ lib/pleroma/web/media_proxy/media_proxy_controller.ex | 3 ++- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex b/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex index f10dc3ce5..e5eb6896a 100644 --- a/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex @@ -11,11 +11,6 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do require Logger - @adapter_options [ - pool: :media, - recv_timeout: 10_000 - ] - @impl true def history_awareness, do: :auto @@ -35,7 +30,10 @@ defp prefetch(url) do end end - defp fetch(url), do: HTTP.get(url, [], @adapter_options) + defp fetch(url) do + http_client_opts = Pleroma.Config.get([:media_proxy, :proxy_opts, :http], pool: :media) + HTTP.get(url, [], http_client_opts) + end defp preload(%{"object" => %{"attachment" => attachments}} = _message) do Enum.each(attachments, fn diff --git a/lib/pleroma/web/media_proxy/media_proxy_controller.ex b/lib/pleroma/web/media_proxy/media_proxy_controller.ex index c11484ecb..0b446e0a6 100644 --- a/lib/pleroma/web/media_proxy/media_proxy_controller.ex +++ b/lib/pleroma/web/media_proxy/media_proxy_controller.ex @@ -54,9 +54,10 @@ def preview(%Conn{} = conn, %{"sig" => sig64, "url" => url64}) do defp handle_preview(conn, url) do media_proxy_url = MediaProxy.url(url) + http_client_opts = Pleroma.Config.get([:media_proxy, :proxy_opts, :http], pool: :media) with {:ok, %{status: status} = head_response} when status in 200..299 <- - Pleroma.HTTP.request(:head, media_proxy_url, "", [], pool: :media) do + Pleroma.HTTP.request(:head, media_proxy_url, "", [], http_client_opts) do content_type = Tesla.get_header(head_response, "content-type") content_length = Tesla.get_header(head_response, "content-length") content_length = content_length && String.to_integer(content_length) From 8b61d4e3e124c4fafeaabe58a8c7673f767b2871 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 27 May 2024 11:28:31 -0400 Subject: [PATCH 51/93] Changelogs --- changelog.d/mediaproxy-http.fix | 1 + changelog.d/pools.change | 1 + 2 files changed, 2 insertions(+) create mode 100644 changelog.d/mediaproxy-http.fix create mode 100644 changelog.d/pools.change diff --git a/changelog.d/mediaproxy-http.fix b/changelog.d/mediaproxy-http.fix new file mode 100644 index 000000000..4ff6430e0 --- /dev/null +++ b/changelog.d/mediaproxy-http.fix @@ -0,0 +1 @@ +Ensure MediaProxy HTTP requests obey all the defined connection settings diff --git a/changelog.d/pools.change b/changelog.d/pools.change new file mode 100644 index 000000000..3c689195a --- /dev/null +++ b/changelog.d/pools.change @@ -0,0 +1 @@ +HTTP connection pool adjustments From e4f1325f78e9be9fb200358d73794f15794c39bd Mon Sep 17 00:00:00 2001 From: Lain Soykaf Date: Mon, 27 May 2024 19:44:41 +0400 Subject: [PATCH 52/93] InetHelper: Don't use deprecated function. --- lib/pleroma/helpers/inet_helper.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/pleroma/helpers/inet_helper.ex b/lib/pleroma/helpers/inet_helper.ex index 3500fc679..00e18649e 100644 --- a/lib/pleroma/helpers/inet_helper.ex +++ b/lib/pleroma/helpers/inet_helper.ex @@ -25,6 +25,6 @@ def parse_cidr(proxy) when is_binary(proxy) do InetCidr.v6?(InetCidr.parse_address!(proxy)) -> proxy <> "/128" end - InetCidr.parse(proxy, true) + InetCidr.parse_cidr!(proxy, true) end end From 284cd0abe5fd34d0bb31281614a7dc9249731b40 Mon Sep 17 00:00:00 2001 From: Lain Soykaf Date: Mon, 27 May 2024 20:04:12 +0400 Subject: [PATCH 53/93] Add changelog --- changelog.d/support-honk-image-summaries.add | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/support-honk-image-summaries.add diff --git a/changelog.d/support-honk-image-summaries.add b/changelog.d/support-honk-image-summaries.add new file mode 100644 index 000000000..052c03f95 --- /dev/null +++ b/changelog.d/support-honk-image-summaries.add @@ -0,0 +1 @@ +Support honk-style attachment summaries as alt-text. From f4693dc6710c8c8ac878c2845793c7d138f90c04 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Wed, 27 Dec 2023 22:32:42 -0500 Subject: [PATCH 54/93] Update Prometheus/Grafana docs for PromEx --- changelog.d/prometheus-docs.change | 1 + docs/development/API/prometheus.md | 73 ++++++++++++++++-------------- 2 files changed, 39 insertions(+), 35 deletions(-) create mode 100644 changelog.d/prometheus-docs.change diff --git a/changelog.d/prometheus-docs.change b/changelog.d/prometheus-docs.change new file mode 100644 index 000000000..a9bd1e2e9 --- /dev/null +++ b/changelog.d/prometheus-docs.change @@ -0,0 +1 @@ +Update the documentation for configuring Prometheus metrics. diff --git a/docs/development/API/prometheus.md b/docs/development/API/prometheus.md index a5158d905..140291fe0 100644 --- a/docs/development/API/prometheus.md +++ b/docs/development/API/prometheus.md @@ -1,44 +1,47 @@ -# Prometheus Metrics +# Prometheus / OpenTelemetry Metrics -Pleroma includes support for exporting metrics via the [prometheus_ex](https://github.com/deadtrickster/prometheus.ex) library. +Pleroma includes support for exporting metrics via the [prom_ex](https://github.com/akoutmos/prom_ex) library. +The metrics are exposed by a dedicated webserver/port to improve privacy and security. Config example: ``` -config :prometheus, Pleroma.Web.Endpoint.MetricsExporter, - enabled: true, - auth: {:basic, "myusername", "mypassword"}, - ip_whitelist: ["127.0.0.1"], - path: "/api/pleroma/app_metrics", - format: :text -``` - -* `enabled` (Pleroma extension) enables the endpoint -* `ip_whitelist` (Pleroma extension) could be used to restrict access only to specified IPs -* `auth` sets the authentication (`false` for no auth; configurable to HTTP Basic Auth, see [prometheus-plugs](https://github.com/deadtrickster/prometheus-plugs#exporting) documentation) -* `format` sets the output format (`:text` or `:protobuf`) -* `path` sets the path to app metrics page - - -## `/api/pleroma/app_metrics` - -### Exports Prometheus application metrics - -* Method: `GET` -* Authentication: not required by default (see configuration options above) -* Params: none -* Response: text - -## Grafana - -### Config example - -The following is a config example to use with [Grafana](https://grafana.com) +config :pleroma, Pleroma.PromEx, + disabled: false, + manual_metrics_start_delay: :no_delay, + drop_metrics_groups: [], + grafana: [ + host: System.get_env("GRAFANA_HOST", "http://localhost:3000"), + auth_token: System.get_env("GRAFANA_TOKEN"), + upload_dashboards_on_start: false, + folder_name: "BEAM", + annotate_app_lifecycle: true + ], + metrics_server: [ + port: 4021, + path: "/metrics", + protocol: :http, + pool_size: 5, + cowboy_opts: [], + auth_strategy: :none + ], + datasource: "Prometheus" ``` - - job_name: 'beam' - metrics_path: /api/pleroma/app_metrics - scheme: https + +PromEx supports the ability to automatically publish dashboards to your Grafana server as well as register Annotations. If you do not wish to configure this capability you must generate the dashboard JSON files and import them directly. You can find the mix commands in the upstream [documentation](https://hexdocs.pm/prom_ex/Mix.Tasks.PromEx.Dashboard.Export.html). You can find the list of modules enabled in Pleroma for which you should generate dashboards for by examining the contents of the `lib/pleroma/prom_ex.ex` module. + +## prometheus.yml + +The following is a bare minimum config example to use with [Prometheus](https://prometheus.io) or Prometheus-compatible software like [VictoriaMetrics](https://victoriametrics.com). + +``` +global: + scrape_interval: 15s + +scrape_configs: + - job_name: 'pleroma' + scheme: http static_configs: - - targets: ['pleroma.soykaf.com'] + - targets: ['pleroma.soykaf.com:4021'] ``` From 7258ab1aed53da796e24bdab81f39ea1d358a549 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 27 May 2024 12:20:00 -0400 Subject: [PATCH 55/93] Changelog --- changelog.d/promexdocs.add | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/promexdocs.add diff --git a/changelog.d/promexdocs.add b/changelog.d/promexdocs.add new file mode 100644 index 000000000..dda972994 --- /dev/null +++ b/changelog.d/promexdocs.add @@ -0,0 +1 @@ +PromEx documentation From 0bddca361d12f347ca9907c5ddb5d1464a17b32a Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Sun, 24 Jan 2021 14:56:45 -0600 Subject: [PATCH 56/93] DNSRBL in an MRF --- changelog.d/add-rbl-mrf.add | 1 + config/config.exs | 5 + .../web/activity_pub/mrf/dnsrbl_policy.ex | 142 ++++++++++++++++++ 3 files changed, 148 insertions(+) create mode 100644 changelog.d/add-rbl-mrf.add create mode 100644 lib/pleroma/web/activity_pub/mrf/dnsrbl_policy.ex diff --git a/changelog.d/add-rbl-mrf.add b/changelog.d/add-rbl-mrf.add new file mode 100644 index 000000000..363270fb9 --- /dev/null +++ b/changelog.d/add-rbl-mrf.add @@ -0,0 +1 @@ +Add DNSRBL MRF diff --git a/config/config.exs b/config/config.exs index b93de52e1..1fb0f3911 100644 --- a/config/config.exs +++ b/config/config.exs @@ -410,6 +410,11 @@ accept: [], reject: [] +config :pleroma, :mrf_dnsrbl, + nameserver: "127.0.0.1", + port: 53, + zone: "bl.pleroma.com" + # threshold of 7 days config :pleroma, :mrf_object_age, threshold: 604_800, diff --git a/lib/pleroma/web/activity_pub/mrf/dnsrbl_policy.ex b/lib/pleroma/web/activity_pub/mrf/dnsrbl_policy.ex new file mode 100644 index 000000000..9543cc545 --- /dev/null +++ b/lib/pleroma/web/activity_pub/mrf/dnsrbl_policy.ex @@ -0,0 +1,142 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2024 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ActivityPub.MRF.DNSRBLPolicy do + @moduledoc """ + Dynamic activity filtering based on an RBL database + + This MRF makes queries to a custom DNS server which will + respond with values indicating the classification of the domain + the activity originated from. This method has been widely used + in the email anti-spam industry for very fast reputation checks. + + e.g., if the DNS response is 127.0.0.1 or empty, the domain is OK + Other values such as 127.0.0.2 may be used for specific classifications. + + Information for why the host is blocked can be stored in a corresponding TXT record. + + This method is fail-open so if the queries fail the activites are accepted. + + An example of software meant for this purpsoe is rbldnsd which can be found + at http://www.corpit.ru/mjt/rbldnsd.html or mirrored at + https://git.pleroma.social/feld/rbldnsd + + It is highly recommended that you run your own copy of rbldnsd and use an + external mechanism to sync/share the contents of the zone file. This is + important to keep the latency on the queries as low as possible and prevent + your DNS server from being attacked so it fails and content is permitted. + """ + + @behaviour Pleroma.Web.ActivityPub.MRF.Policy + + alias Pleroma.Config + + require Logger + + @query_retries 1 + @query_timeout 500 + + @impl true + def filter(%{"actor" => actor} = object) do + actor_info = URI.parse(actor) + + with {:ok, object} <- check_rbl(actor_info, object) do + {:ok, object} + else + _ -> {:reject, "[DNSRBLPolicy]"} + end + end + + @impl true + def filter(object), do: {:ok, object} + + @impl true + def describe do + mrf_dnsrbl = + Config.get(:mrf_dnsrbl) + |> Enum.into(%{}) + + {:ok, %{mrf_dnsrbl: mrf_dnsrbl}} + end + + @impl true + def config_description do + %{ + key: :mrf_dnsrbl, + related_policy: "Pleroma.Web.ActivityPub.MRF.DNSRBLPolicy", + label: "MRF DNSRBL", + description: "DNS RealTime Blackhole Policy", + children: [ + %{ + key: :nameserver, + type: {:string}, + description: "DNSRBL Nameserver to Query (IP or hostame)", + suggestions: ["127.0.0.1"] + }, + %{ + key: :port, + type: {:string}, + description: "Nameserver port", + suggestions: ["53"] + }, + %{ + key: :zone, + type: {:string}, + description: "Root zone for querying", + suggestions: ["bl.pleroma.com"] + } + ] + } + end + + defp check_rbl(%{host: actor_host}, object) do + with false <- match?(^actor_host, Pleroma.Web.Endpoint.host()), + zone when not is_nil(zone) <- Keyword.get(Config.get([:mrf_dnsrbl]), :zone) do + query = + Enum.join([actor_host, zone], ".") + |> String.to_charlist() + + rbl_response = rblquery(query) + + if Enum.empty?(rbl_response) do + {:ok, object} + else + Task.start(fn -> + reason = rblquery(query, :txt) || "undefined" + + Logger.warning( + "DNSRBL Rejected activity from #{actor_host} for reason: #{inspect(reason)}" + ) + end) + + :error + end + else + _ -> {:ok, object} + end + end + + defp get_rblhost_ip(rblhost) do + case rblhost |> String.to_charlist() |> :inet_parse.address() do + {:ok, _} -> rblhost |> String.to_charlist() |> :inet_parse.address() + _ -> {:ok, rblhost |> String.to_charlist() |> :inet_res.lookup(:in, :a) |> Enum.random()} + end + end + + defp rblquery(query, type \\ :a) do + config = Config.get([:mrf_dnsrbl]) + + case get_rblhost_ip(config[:nameserver]) do + {:ok, rblnsip} -> + :inet_res.lookup(query, :in, type, + nameservers: [{rblnsip, config[:port]}], + timeout: @query_timeout, + retry: @query_retries + ) + + _ -> + [] + end + end +end From 5e963736cee55aa8f4bb9d9fba451ff3864ddaa8 Mon Sep 17 00:00:00 2001 From: Alex Gleason Date: Sun, 21 May 2023 15:26:02 -0500 Subject: [PATCH 57/93] Add AntiMentionSpamPolicy --- .../mrf/anti_mention_spam_policy.ex | 87 +++++++++++++++++++ .../mrf/anti_mention_spam_policy_test.exs | 65 ++++++++++++++ 2 files changed, 152 insertions(+) create mode 100644 lib/pleroma/web/activity_pub/mrf/anti_mention_spam_policy.ex create mode 100644 test/pleroma/web/activity_pub/mrf/anti_mention_spam_policy_test.exs diff --git a/lib/pleroma/web/activity_pub/mrf/anti_mention_spam_policy.ex b/lib/pleroma/web/activity_pub/mrf/anti_mention_spam_policy.ex new file mode 100644 index 000000000..ad97a1552 --- /dev/null +++ b/lib/pleroma/web/activity_pub/mrf/anti_mention_spam_policy.ex @@ -0,0 +1,87 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2022 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ActivityPub.MRF.AntiMentionSpamPolicy do + alias Pleroma.User + require Pleroma.Constants + + @behaviour Pleroma.Web.ActivityPub.MRF.Policy + + defp user_has_followers?(%User{} = u), do: u.follower_count > 0 + defp user_has_posted?(%User{} = u), do: u.note_count > 0 + + defp user_has_age?(%User{} = u) do + now = NaiveDateTime.utc_now() + diff = u.inserted_at |> NaiveDateTime.diff(now, :second) + diff > :timer.seconds(30) + end + + defp good_reputation?(%User{} = u) do + user_has_age?(u) and user_has_followers?(u) and user_has_posted?(u) + end + + # copied from HellthreadPolicy + defp get_recipient_count(message) do + recipients = (message["to"] || []) ++ (message["cc"] || []) + + follower_collection = + User.get_cached_by_ap_id(message["actor"] || message["attributedTo"]).follower_address + + if Enum.member?(recipients, Pleroma.Constants.as_public()) do + recipients = + recipients + |> List.delete(Pleroma.Constants.as_public()) + |> List.delete(follower_collection) + + {:public, length(recipients)} + else + recipients = + recipients + |> List.delete(follower_collection) + + {:not_public, length(recipients)} + end + end + + defp object_has_recipients?(%{"object" => object} = activity) do + {_, object_count} = get_recipient_count(object) + {_, activity_count} = get_recipient_count(activity) + object_count + activity_count > 0 + end + + defp object_has_recipients?(object) do + {_, count} = get_recipient_count(object) + count > 0 + end + + @impl true + def filter(%{"type" => "Create", "actor" => actor} = activity) do + with {:ok, %User{local: false} = u} <- User.get_or_fetch_by_ap_id(actor), + {:has_mentions, true} <- {:has_mentions, object_has_recipients?(activity)}, + {:good_reputation, true} <- {:good_reputation, good_reputation?(u)} do + {:ok, activity} + else + {:ok, %User{local: true}} -> + {:ok, activity} + + {:has_mentions, false} -> + {:ok, activity} + + {:good_reputation, false} -> + {:reject, "[AntiMentionSpamPolicy] User rejected"} + + {:error, _} -> + {:reject, "[AntiMentionSpamPolicy] Failed to get or fetch user by ap_id"} + + e -> + {:reject, "[AntiMentionSpamPolicy] Unhandled error #{inspect(e)}"} + end + end + + # in all other cases, pass through + def filter(message), do: {:ok, message} + + @impl true + def describe, do: {:ok, %{}} +end diff --git a/test/pleroma/web/activity_pub/mrf/anti_mention_spam_policy_test.exs b/test/pleroma/web/activity_pub/mrf/anti_mention_spam_policy_test.exs new file mode 100644 index 000000000..63947858c --- /dev/null +++ b/test/pleroma/web/activity_pub/mrf/anti_mention_spam_policy_test.exs @@ -0,0 +1,65 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2022 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ActivityPub.MRF.AntiMentionSpamPolicyTest do + use Pleroma.DataCase + import Pleroma.Factory + alias Pleroma.Web.ActivityPub.MRF.AntiMentionSpamPolicy + + test "it allows posts without mentions" do + user = insert(:user, local: false) + assert user.note_count == 0 + + message = %{ + "type" => "Create", + "actor" => user.ap_id + } + + {:ok, _message} = AntiMentionSpamPolicy.filter(message) + end + + test "it allows posts from users with followers, posts, and age" do + user = + insert(:user, + local: false, + follower_count: 1, + note_count: 1, + inserted_at: ~N[1970-01-01 00:00:00] + ) + + message = %{ + "type" => "Create", + "actor" => user.ap_id + } + + {:ok, _message} = AntiMentionSpamPolicy.filter(message) + end + + test "it allows posts from local users" do + user = insert(:user, local: true) + + message = %{ + "type" => "Create", + "actor" => user.ap_id + } + + {:ok, _message} = AntiMentionSpamPolicy.filter(message) + end + + test "it rejects posts with mentions from users without followers" do + user = insert(:user, local: false, follower_count: 0) + + message = %{ + "type" => "Create", + "actor" => user.ap_id, + "object" => %{ + "to" => ["https://pleroma.soykaf.com/users/1"], + "cc" => ["https://pleroma.soykaf.com/users/1"], + "actor" => user.ap_id + } + } + + {:reject, _message} = AntiMentionSpamPolicy.filter(message) + end +end From 64cacc3694c0441d3f3f5886b301bbf93f590cb6 Mon Sep 17 00:00:00 2001 From: Alex Gleason Date: Sun, 21 May 2023 19:31:56 -0500 Subject: [PATCH 58/93] AntiMentionSpamPolicy: fix user age check --- lib/pleroma/web/activity_pub/mrf/anti_mention_spam_policy.ex | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/lib/pleroma/web/activity_pub/mrf/anti_mention_spam_policy.ex b/lib/pleroma/web/activity_pub/mrf/anti_mention_spam_policy.ex index ad97a1552..0cb3313b2 100644 --- a/lib/pleroma/web/activity_pub/mrf/anti_mention_spam_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/anti_mention_spam_policy.ex @@ -12,9 +12,8 @@ defp user_has_followers?(%User{} = u), do: u.follower_count > 0 defp user_has_posted?(%User{} = u), do: u.note_count > 0 defp user_has_age?(%User{} = u) do - now = NaiveDateTime.utc_now() - diff = u.inserted_at |> NaiveDateTime.diff(now, :second) - diff > :timer.seconds(30) + diff = NaiveDateTime.utc_now() |> NaiveDateTime.diff(u.inserted_at, :second) + diff >= :timer.seconds(30) end defp good_reputation?(%User{} = u) do From 02d8ce8f0ba615fa0946064052113fb05dd0b6a2 Mon Sep 17 00:00:00 2001 From: Alex Gleason Date: Mon, 22 May 2023 15:50:34 -0500 Subject: [PATCH 59/93] AntiMentionSpamPolicy: remove followers check --- lib/pleroma/web/activity_pub/mrf/anti_mention_spam_policy.ex | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/pleroma/web/activity_pub/mrf/anti_mention_spam_policy.ex b/lib/pleroma/web/activity_pub/mrf/anti_mention_spam_policy.ex index 0cb3313b2..9cdb2077f 100644 --- a/lib/pleroma/web/activity_pub/mrf/anti_mention_spam_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/anti_mention_spam_policy.ex @@ -8,7 +8,6 @@ defmodule Pleroma.Web.ActivityPub.MRF.AntiMentionSpamPolicy do @behaviour Pleroma.Web.ActivityPub.MRF.Policy - defp user_has_followers?(%User{} = u), do: u.follower_count > 0 defp user_has_posted?(%User{} = u), do: u.note_count > 0 defp user_has_age?(%User{} = u) do @@ -17,7 +16,7 @@ defp user_has_age?(%User{} = u) do end defp good_reputation?(%User{} = u) do - user_has_age?(u) and user_has_followers?(u) and user_has_posted?(u) + user_has_age?(u) and user_has_posted?(u) end # copied from HellthreadPolicy From 0d092a3d4fd89a7f8df30f080087bd24ce53c597 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 27 May 2024 12:26:55 -0400 Subject: [PATCH 60/93] Changelog --- changelog.d/anti-mentionspam-mrf.add | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/anti-mentionspam-mrf.add diff --git a/changelog.d/anti-mentionspam-mrf.add b/changelog.d/anti-mentionspam-mrf.add new file mode 100644 index 000000000..9466f85f4 --- /dev/null +++ b/changelog.d/anti-mentionspam-mrf.add @@ -0,0 +1 @@ +Add Anti-mention Spam MRF backported from Rebased From cab6372d7a1bdf50436eff1b4023fd6e05586dbc Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 27 May 2024 12:31:29 -0400 Subject: [PATCH 61/93] Make user age limit configurable Switch to milliseconds for consistency with other configuration options in codebase --- config/config.exs | 2 ++ .../web/activity_pub/mrf/anti_mention_spam_policy.ex | 6 ++++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/config/config.exs b/config/config.exs index b93de52e1..51773d830 100644 --- a/config/config.exs +++ b/config/config.exs @@ -430,6 +430,8 @@ mention_parent: true, mention_quoted: true +config :pleroma, :mrf_antimentionspam, user_age_limit: 30_000 + config :pleroma, :rich_media, enabled: true, ignore_hosts: [], diff --git a/lib/pleroma/web/activity_pub/mrf/anti_mention_spam_policy.ex b/lib/pleroma/web/activity_pub/mrf/anti_mention_spam_policy.ex index 9cdb2077f..531e75ce8 100644 --- a/lib/pleroma/web/activity_pub/mrf/anti_mention_spam_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/anti_mention_spam_policy.ex @@ -3,6 +3,7 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.ActivityPub.MRF.AntiMentionSpamPolicy do + alias Pleroma.Config alias Pleroma.User require Pleroma.Constants @@ -11,8 +12,9 @@ defmodule Pleroma.Web.ActivityPub.MRF.AntiMentionSpamPolicy do defp user_has_posted?(%User{} = u), do: u.note_count > 0 defp user_has_age?(%User{} = u) do - diff = NaiveDateTime.utc_now() |> NaiveDateTime.diff(u.inserted_at, :second) - diff >= :timer.seconds(30) + user_age_limit = Config.get([:mrf_antimentionspam, :user_age_limit], 30_000) + diff = NaiveDateTime.utc_now() |> NaiveDateTime.diff(u.inserted_at, :millisecond) + diff >= user_age_limit end defp good_reputation?(%User{} = u) do From 1c699144d23aa4a86ff8b6ebef7d760ce9e3a4e2 Mon Sep 17 00:00:00 2001 From: Lain Soykaf Date: Mon, 27 May 2024 21:26:40 +0400 Subject: [PATCH 62/93] HttpSecurityPlug: Don't allow unsafe-eval by default --- config/config.exs | 3 +- config/test.exs | 1 + lib/pleroma/application.ex | 7 +- lib/pleroma/web/plugs/http_security_plug.ex | 49 +++-- .../web/plugs/http_security_plug_test.exs | 208 ++++++++++++++---- 5 files changed, 204 insertions(+), 64 deletions(-) diff --git a/config/config.exs b/config/config.exs index 4752bbbde..f861daf04 100644 --- a/config/config.exs +++ b/config/config.exs @@ -519,7 +519,8 @@ sts: false, sts_max_age: 31_536_000, ct_max_age: 2_592_000, - referrer_policy: "same-origin" + referrer_policy: "same-origin", + allow_unsafe_eval: false config :cors_plug, max_age: 86_400, diff --git a/config/test.exs b/config/test.exs index 3345bb3a9..b5c9c6e4a 100644 --- a/config/test.exs +++ b/config/test.exs @@ -154,6 +154,7 @@ config :pleroma, Pleroma.ScheduledActivity, config_impl: Pleroma.UnstubbedConfigMock config :pleroma, Pleroma.Web.RichMedia.Helpers, config_impl: Pleroma.StaticStubbedConfigMock config :pleroma, Pleroma.Uploaders.IPFS, config_impl: Pleroma.UnstubbedConfigMock +config :pleroma, Pleroma.Web.Plugs.HTTPSecurityPlug, config_impl: Pleroma.UnstubbedConfigMock peer_module = if String.to_integer(System.otp_release()) >= 25 do diff --git a/lib/pleroma/application.ex b/lib/pleroma/application.ex index d266d1836..0d9757b44 100644 --- a/lib/pleroma/application.ex +++ b/lib/pleroma/application.ex @@ -14,6 +14,7 @@ defmodule Pleroma.Application do @name Mix.Project.config()[:name] @version Mix.Project.config()[:version] @repository Mix.Project.config()[:source_url] + @compile_env Mix.env() def name, do: @name def version, do: @version @@ -51,7 +52,11 @@ def start(_type, _args) do Pleroma.HTML.compile_scrubbers() Pleroma.Config.Oban.warn() Config.DeprecationWarnings.warn() - Pleroma.Web.Plugs.HTTPSecurityPlug.warn_if_disabled() + + if @compile_env != :test do + Pleroma.Web.Plugs.HTTPSecurityPlug.warn_if_disabled() + end + Pleroma.ApplicationRequirements.verify!() load_custom_modules() Pleroma.Docs.JSON.compile() diff --git a/lib/pleroma/web/plugs/http_security_plug.ex b/lib/pleroma/web/plugs/http_security_plug.ex index a27dcd0ab..a1dc6c02a 100644 --- a/lib/pleroma/web/plugs/http_security_plug.ex +++ b/lib/pleroma/web/plugs/http_security_plug.ex @@ -3,26 +3,27 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.Plugs.HTTPSecurityPlug do - alias Pleroma.Config import Plug.Conn require Logger + @config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config) + def init(opts), do: opts def call(conn, _options) do - if Config.get([:http_security, :enabled]) do + if @config_impl.get([:http_security, :enabled]) do conn |> merge_resp_headers(headers()) - |> maybe_send_sts_header(Config.get([:http_security, :sts])) + |> maybe_send_sts_header(@config_impl.get([:http_security, :sts])) else conn end end def primary_frontend do - with %{"name" => frontend} <- Config.get([:frontends, :primary]), - available <- Config.get([:frontends, :available]), + with %{"name" => frontend} <- @config_impl.get([:frontends, :primary]), + available <- @config_impl.get([:frontends, :available]), %{} = primary_frontend <- Map.get(available, frontend) do {:ok, primary_frontend} end @@ -37,8 +38,8 @@ def custom_http_frontend_headers do end def headers do - referrer_policy = Config.get([:http_security, :referrer_policy]) - report_uri = Config.get([:http_security, :report_uri]) + referrer_policy = @config_impl.get([:http_security, :referrer_policy]) + report_uri = @config_impl.get([:http_security, :report_uri]) custom_http_frontend_headers = custom_http_frontend_headers() headers = [ @@ -86,10 +87,10 @@ def headers do @csp_start [Enum.join(static_csp_rules, ";") <> ";"] defp csp_string do - scheme = Config.get([Pleroma.Web.Endpoint, :url])[:scheme] + scheme = @config_impl.get([Pleroma.Web.Endpoint, :url])[:scheme] static_url = Pleroma.Web.Endpoint.static_url() websocket_url = Pleroma.Web.Endpoint.websocket_url() - report_uri = Config.get([:http_security, :report_uri]) + report_uri = @config_impl.get([:http_security, :report_uri]) img_src = "img-src 'self' data: blob:" media_src = "media-src 'self'" @@ -97,8 +98,8 @@ defp csp_string do # Strict multimedia CSP enforcement only when MediaProxy is enabled {img_src, media_src, connect_src} = - if Config.get([:media_proxy, :enabled]) && - !Config.get([:media_proxy, :proxy_opts, :redirect_on_failure]) do + if @config_impl.get([:media_proxy, :enabled]) && + !@config_impl.get([:media_proxy, :proxy_opts, :redirect_on_failure]) do sources = build_csp_multimedia_source_list() { @@ -115,17 +116,21 @@ defp csp_string do end connect_src = - if Config.get(:env) == :dev do + if @config_impl.get(:env) == :dev do [connect_src, " http://localhost:3035/"] else connect_src end script_src = - if Config.get(:env) == :dev do - "script-src 'self' 'unsafe-eval'" + if @config_impl.get([:http_security, :allow_unsafe_eval]) do + if @config_impl.get(:env) == :dev do + "script-src 'self' 'unsafe-eval'" + else + "script-src 'self' 'wasm-unsafe-eval'" + end else - "script-src 'self' 'wasm-unsafe-eval'" + "script-src 'self'" end report = if report_uri, do: ["report-uri ", report_uri, ";report-to csp-endpoint"] @@ -161,11 +166,11 @@ defp build_csp_param_from_whitelist(url), do: url defp build_csp_multimedia_source_list do media_proxy_whitelist = [:media_proxy, :whitelist] - |> Config.get() + |> @config_impl.get() |> build_csp_from_whitelist([]) - captcha_method = Config.get([Pleroma.Captcha, :method]) - captcha_endpoint = Config.get([captcha_method, :endpoint]) + captcha_method = @config_impl.get([Pleroma.Captcha, :method]) + captcha_endpoint = @config_impl.get([captcha_method, :endpoint]) base_endpoints = [ @@ -173,7 +178,7 @@ defp build_csp_multimedia_source_list do [Pleroma.Upload, :base_url], [Pleroma.Uploaders.S3, :public_endpoint] ] - |> Enum.map(&Config.get/1) + |> Enum.map(&@config_impl.get/1) [captcha_endpoint | base_endpoints] |> Enum.map(&build_csp_param/1) @@ -200,7 +205,7 @@ defp build_csp_param(url) when is_binary(url) do end def warn_if_disabled do - unless Config.get([:http_security, :enabled]) do + unless Pleroma.Config.get([:http_security, :enabled]) do Logger.warning(" .i;;;;i. iYcviii;vXY: @@ -245,8 +250,8 @@ def warn_if_disabled do end defp maybe_send_sts_header(conn, true) do - max_age_sts = Config.get([:http_security, :sts_max_age]) - max_age_ct = Config.get([:http_security, :ct_max_age]) + max_age_sts = @config_impl.get([:http_security, :sts_max_age]) + max_age_ct = @config_impl.get([:http_security, :ct_max_age]) merge_resp_headers(conn, [ {"strict-transport-security", "max-age=#{max_age_sts}; includeSubDomains"}, diff --git a/test/pleroma/web/plugs/http_security_plug_test.exs b/test/pleroma/web/plugs/http_security_plug_test.exs index c79170382..80ad1fa7d 100644 --- a/test/pleroma/web/plugs/http_security_plug_test.exs +++ b/test/pleroma/web/plugs/http_security_plug_test.exs @@ -3,14 +3,52 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.Plugs.HTTPSecurityPlugTest do - use Pleroma.Web.ConnCase + use Pleroma.Web.ConnCase, async: true alias Plug.Conn - describe "http security enabled" do - setup do: clear_config([:http_security, :enabled], true) + import Mox - test "it sends CSP headers when enabled", %{conn: conn} do + setup do + base_config = Pleroma.Config.get([:http_security]) + %{base_config: base_config} + end + + defp mock_config(config, additional \\ %{}) do + Pleroma.UnstubbedConfigMock + |> stub(:get, fn + [:http_security, key] -> config[key] + key -> additional[key] + end) + end + + describe "http security enabled" do + setup %{base_config: base_config} do + %{base_config: Keyword.put(base_config, :enabled, true)} + end + + test "it does not contain unsafe-eval", %{conn: conn, base_config: base_config} do + mock_config(base_config) + + conn = get(conn, "/api/v1/instance") + [header] = Conn.get_resp_header(conn, "content-security-policy") + refute header =~ ~r/unsafe-eval/ + end + + test "with allow_unsafe_eval set, it does contain it", %{conn: conn, base_config: base_config} do + base_config = + base_config + |> Keyword.put(:allow_unsafe_eval, true) + + mock_config(base_config) + + conn = get(conn, "/api/v1/instance") + [header] = Conn.get_resp_header(conn, "content-security-policy") + assert header =~ ~r/unsafe-eval/ + end + + test "it sends CSP headers when enabled", %{conn: conn, base_config: base_config} do + mock_config(base_config) conn = get(conn, "/api/v1/instance") refute Conn.get_resp_header(conn, "x-xss-protection") == [] @@ -22,8 +60,10 @@ test "it sends CSP headers when enabled", %{conn: conn} do refute Conn.get_resp_header(conn, "content-security-policy") == [] end - test "it sends STS headers when enabled", %{conn: conn} do - clear_config([:http_security, :sts], true) + test "it sends STS headers when enabled", %{conn: conn, base_config: base_config} do + base_config + |> Keyword.put(:sts, true) + |> mock_config() conn = get(conn, "/api/v1/instance") @@ -31,8 +71,10 @@ test "it sends STS headers when enabled", %{conn: conn} do refute Conn.get_resp_header(conn, "expect-ct") == [] end - test "it does not send STS headers when disabled", %{conn: conn} do - clear_config([:http_security, :sts], false) + test "it does not send STS headers when disabled", %{conn: conn, base_config: base_config} do + base_config + |> Keyword.put(:sts, false) + |> mock_config() conn = get(conn, "/api/v1/instance") @@ -40,19 +82,30 @@ test "it does not send STS headers when disabled", %{conn: conn} do assert Conn.get_resp_header(conn, "expect-ct") == [] end - test "referrer-policy header reflects configured value", %{conn: conn} do - resp = get(conn, "/api/v1/instance") + test "referrer-policy header reflects configured value", %{ + conn: conn, + base_config: base_config + } do + mock_config(base_config) + resp = get(conn, "/api/v1/instance") assert Conn.get_resp_header(resp, "referrer-policy") == ["same-origin"] - clear_config([:http_security, :referrer_policy], "no-referrer") + base_config + |> Keyword.put(:referrer_policy, "no-referrer") + |> mock_config resp = get(conn, "/api/v1/instance") assert Conn.get_resp_header(resp, "referrer-policy") == ["no-referrer"] end - test "it sends `report-to` & `report-uri` CSP response headers", %{conn: conn} do + test "it sends `report-to` & `report-uri` CSP response headers", %{ + conn: conn, + base_config: base_config + } do + mock_config(base_config) + conn = get(conn, "/api/v1/instance") [csp] = Conn.get_resp_header(conn, "content-security-policy") @@ -65,7 +118,11 @@ test "it sends `report-to` & `report-uri` CSP response headers", %{conn: conn} d "{\"endpoints\":[{\"url\":\"https://endpoint.com\"}],\"group\":\"csp-endpoint\",\"max-age\":10886400}" end - test "default values for img-src and media-src with disabled media proxy", %{conn: conn} do + test "default values for img-src and media-src with disabled media proxy", %{ + conn: conn, + base_config: base_config + } do + mock_config(base_config) conn = get(conn, "/api/v1/instance") [csp] = Conn.get_resp_header(conn, "content-security-policy") @@ -73,60 +130,129 @@ test "default values for img-src and media-src with disabled media proxy", %{con assert csp =~ "img-src 'self' data: blob: https:;" end - test "it sets the Service-Worker-Allowed header", %{conn: conn} do - clear_config([:http_security, :enabled], true) - clear_config([:frontends, :primary], %{"name" => "fedi-fe", "ref" => "develop"}) + test "it sets the Service-Worker-Allowed header", %{conn: conn, base_config: base_config} do + base_config + |> Keyword.put(:enabled, true) - clear_config([:frontends, :available], %{ - "fedi-fe" => %{ - "name" => "fedi-fe", - "custom-http-headers" => [{"service-worker-allowed", "/"}] - } - }) + additional_config = + %{} + |> Map.put([:frontends, :primary], %{"name" => "fedi-fe", "ref" => "develop"}) + |> Map.put( + [:frontends, :available], + %{ + "fedi-fe" => %{ + "name" => "fedi-fe", + "custom-http-headers" => [{"service-worker-allowed", "/"}] + } + } + ) + mock_config(base_config, additional_config) conn = get(conn, "/api/v1/instance") assert Conn.get_resp_header(conn, "service-worker-allowed") == ["/"] end end describe "img-src and media-src" do - setup do - clear_config([:http_security, :enabled], true) - clear_config([:media_proxy, :enabled], true) - clear_config([:media_proxy, :proxy_opts, :redirect_on_failure], false) + setup %{base_config: base_config} do + base_config = + base_config + |> Keyword.put(:enabled, true) + + additional_config = + %{} + |> Map.put([:media_proxy, :enabled], true) + |> Map.put([:media_proxy, :proxy_opts, :redirect_on_failure], false) + |> Map.put([:media_proxy, :whitelist], []) + + %{base_config: base_config, additional_config: additional_config} end - test "media_proxy with base_url", %{conn: conn} do + test "media_proxy with base_url", %{ + conn: conn, + base_config: base_config, + additional_config: additional_config + } do url = "https://example.com" - clear_config([:media_proxy, :base_url], url) + + additional_config = + additional_config + |> Map.put([:media_proxy, :base_url], url) + + mock_config(base_config, additional_config) + assert_media_img_src(conn, url) end - test "upload with base url", %{conn: conn} do + test "upload with base url", %{ + conn: conn, + base_config: base_config, + additional_config: additional_config + } do url = "https://example2.com" - clear_config([Pleroma.Upload, :base_url], url) + + additional_config = + additional_config + |> Map.put([Pleroma.Upload, :base_url], url) + + mock_config(base_config, additional_config) + assert_media_img_src(conn, url) end - test "with S3 public endpoint", %{conn: conn} do + test "with S3 public endpoint", %{ + conn: conn, + base_config: base_config, + additional_config: additional_config + } do url = "https://example3.com" - clear_config([Pleroma.Uploaders.S3, :public_endpoint], url) + + additional_config = + additional_config + |> Map.put([Pleroma.Uploaders.S3, :public_endpoint], url) + + mock_config(base_config, additional_config) assert_media_img_src(conn, url) end - test "with captcha endpoint", %{conn: conn} do - clear_config([Pleroma.Captcha.Mock, :endpoint], "https://captcha.com") + test "with captcha endpoint", %{ + conn: conn, + base_config: base_config, + additional_config: additional_config + } do + additional_config = + additional_config + |> Map.put([Pleroma.Captcha.Mock, :endpoint], "https://captcha.com") + |> Map.put([Pleroma.Captcha, :method], Pleroma.Captcha.Mock) + + mock_config(base_config, additional_config) assert_media_img_src(conn, "https://captcha.com") end - test "with media_proxy whitelist", %{conn: conn} do - clear_config([:media_proxy, :whitelist], ["https://example6.com", "https://example7.com"]) + test "with media_proxy whitelist", %{ + conn: conn, + base_config: base_config, + additional_config: additional_config + } do + additional_config = + additional_config + |> Map.put([:media_proxy, :whitelist], ["https://example6.com", "https://example7.com"]) + + mock_config(base_config, additional_config) assert_media_img_src(conn, "https://example7.com https://example6.com") end # TODO: delete after removing support bare domains for media proxy whitelist - test "with media_proxy bare domains whitelist (deprecated)", %{conn: conn} do - clear_config([:media_proxy, :whitelist], ["example4.com", "example5.com"]) + test "with media_proxy bare domains whitelist (deprecated)", %{ + conn: conn, + base_config: base_config, + additional_config: additional_config + } do + additional_config = + additional_config + |> Map.put([:media_proxy, :whitelist], ["example4.com", "example5.com"]) + + mock_config(base_config, additional_config) assert_media_img_src(conn, "example5.com example4.com") end end @@ -138,8 +264,10 @@ defp assert_media_img_src(conn, url) do assert csp =~ "img-src 'self' data: blob: #{url};" end - test "it does not send CSP headers when disabled", %{conn: conn} do - clear_config([:http_security, :enabled], false) + test "it does not send CSP headers when disabled", %{conn: conn, base_config: base_config} do + base_config + |> Keyword.put(:enabled, false) + |> mock_config conn = get(conn, "/api/v1/instance") From fc7ce339edc40cb791d321a20f01f2568337b845 Mon Sep 17 00:00:00 2001 From: Lain Soykaf Date: Mon, 27 May 2024 21:28:20 +0400 Subject: [PATCH 63/93] Cheatsheet: Add allow_unsafe_eval --- docs/configuration/cheatsheet.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/configuration/cheatsheet.md b/docs/configuration/cheatsheet.md index ca2ce6369..78997c4db 100644 --- a/docs/configuration/cheatsheet.md +++ b/docs/configuration/cheatsheet.md @@ -472,6 +472,7 @@ This will make Pleroma listen on `127.0.0.1` port `8080` and generate urls start * ``ct_max_age``: The maximum age for the `Expect-CT` header if sent. * ``referrer_policy``: The referrer policy to use, either `"same-origin"` or `"no-referrer"`. * ``report_uri``: Adds the specified url to `report-uri` and `report-to` group in CSP header. +* `allow_unsafe_eval`: Adds `wasm-unsafe-eval` to the CSP header. Needed for some non-essential frontend features like Flash emulation. ### Pleroma.Web.Plugs.RemoteIp From c67b41415b369d67c25356205bf69de2d99a291c Mon Sep 17 00:00:00 2001 From: Lain Soykaf Date: Sun, 11 Jun 2023 20:24:18 +0400 Subject: [PATCH 64/93] Changelog: Add changelog entry. --- changelog.d/3904.security | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3904.security diff --git a/changelog.d/3904.security b/changelog.d/3904.security new file mode 100644 index 000000000..04836d4e8 --- /dev/null +++ b/changelog.d/3904.security @@ -0,0 +1 @@ +HTTP Security: By default, don't allow unsafe-eval. The setting needs to be changed to allow Flash emulation. From 0847d9ebafa38007aeef0a6677588211994ab546 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 27 May 2024 14:35:30 +0000 Subject: [PATCH 65/93] Oban queue simplification --- changelog.d/oban-queues.change | 1 + config/config.exs | 12 +------ lib/pleroma/scheduled_activity.ex | 2 +- lib/pleroma/web/federator.ex | 2 +- .../workers/attachments_cleanup_worker.ex | 2 +- lib/pleroma/workers/backup_worker.ex | 2 +- .../workers/cron/new_users_digest_worker.ex | 2 +- lib/pleroma/workers/mailer_worker.ex | 2 +- lib/pleroma/workers/mute_expire_worker.ex | 2 +- lib/pleroma/workers/poll_worker.ex | 2 +- lib/pleroma/workers/purge_expired_activity.ex | 4 +-- lib/pleroma/workers/purge_expired_filter.ex | 4 +-- lib/pleroma/workers/purge_expired_token.ex | 2 +- lib/pleroma/workers/remote_fetcher_worker.ex | 2 +- .../workers/rich_media_expiration_worker.ex | 2 +- .../workers/scheduled_activity_worker.ex | 2 +- .../20240527144418_oban_queues_refactor.exs | 32 +++++++++++++++++++ 17 files changed, 50 insertions(+), 27 deletions(-) create mode 100644 changelog.d/oban-queues.change create mode 100644 priv/repo/migrations/20240527144418_oban_queues_refactor.exs diff --git a/changelog.d/oban-queues.change b/changelog.d/oban-queues.change new file mode 100644 index 000000000..16df6409a --- /dev/null +++ b/changelog.d/oban-queues.change @@ -0,0 +1 @@ +Oban queues have refactored to simplify the queue design diff --git a/config/config.exs b/config/config.exs index b93de52e1..b52021373 100644 --- a/config/config.exs +++ b/config/config.exs @@ -574,24 +574,14 @@ log: false, queues: [ activity_expiration: 10, - token_expiration: 5, - filter_expiration: 1, - backup: 1, federator_incoming: 5, federator_outgoing: 5, ingestion_queue: 50, web_push: 50, - mailer: 10, transmogrifier: 20, - scheduled_activities: 10, - poll_notifications: 10, background: 5, - remote_fetcher: 2, - attachments_cleanup: 1, - new_users_digest: 1, - mute_expire: 5, search_indexing: [limit: 10, paused: true], - rich_media_expiration: 2 + slow: 1 ], plugins: [Oban.Plugins.Pruner], crontab: [ diff --git a/lib/pleroma/scheduled_activity.ex b/lib/pleroma/scheduled_activity.ex index 63c6cb45b..c361d7d89 100644 --- a/lib/pleroma/scheduled_activity.ex +++ b/lib/pleroma/scheduled_activity.ex @@ -204,7 +204,7 @@ def due_activities(offset \\ 0) do def job_query(scheduled_activity_id) do from(j in Oban.Job, - where: j.queue == "scheduled_activities", + where: j.queue == "federator_outgoing", where: fragment("args ->> 'activity_id' = ?::text", ^to_string(scheduled_activity_id)) ) end diff --git a/lib/pleroma/web/federator.ex b/lib/pleroma/web/federator.ex index 1f2c3835a..4b30fd21d 100644 --- a/lib/pleroma/web/federator.ex +++ b/lib/pleroma/web/federator.ex @@ -44,7 +44,7 @@ def incoming_ap_doc(%{params: params, req_headers: req_headers}) do end def incoming_ap_doc(%{"type" => "Delete"} = params) do - ReceiverWorker.enqueue("incoming_ap_doc", %{"params" => params}, priority: 3) + ReceiverWorker.enqueue("incoming_ap_doc", %{"params" => params}, priority: 3, queue: :slow) end def incoming_ap_doc(params) do diff --git a/lib/pleroma/workers/attachments_cleanup_worker.ex b/lib/pleroma/workers/attachments_cleanup_worker.ex index 4c1764053..0b570b70b 100644 --- a/lib/pleroma/workers/attachments_cleanup_worker.ex +++ b/lib/pleroma/workers/attachments_cleanup_worker.ex @@ -8,7 +8,7 @@ defmodule Pleroma.Workers.AttachmentsCleanupWorker do alias Pleroma.Object alias Pleroma.Repo - use Pleroma.Workers.WorkerHelper, queue: "attachments_cleanup" + use Pleroma.Workers.WorkerHelper, queue: "slow" @impl Oban.Worker def perform(%Job{ diff --git a/lib/pleroma/workers/backup_worker.ex b/lib/pleroma/workers/backup_worker.ex index a485ddb4b..54ac31a3c 100644 --- a/lib/pleroma/workers/backup_worker.ex +++ b/lib/pleroma/workers/backup_worker.ex @@ -3,7 +3,7 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Workers.BackupWorker do - use Oban.Worker, queue: :backup, max_attempts: 1 + use Oban.Worker, queue: :slow, max_attempts: 1 alias Oban.Job alias Pleroma.User.Backup diff --git a/lib/pleroma/workers/cron/new_users_digest_worker.ex b/lib/pleroma/workers/cron/new_users_digest_worker.ex index 1c3e445aa..d2abb2d3b 100644 --- a/lib/pleroma/workers/cron/new_users_digest_worker.ex +++ b/lib/pleroma/workers/cron/new_users_digest_worker.ex @@ -9,7 +9,7 @@ defmodule Pleroma.Workers.Cron.NewUsersDigestWorker do import Ecto.Query - use Pleroma.Workers.WorkerHelper, queue: "mailer" + use Pleroma.Workers.WorkerHelper, queue: "background" @impl Oban.Worker def perform(_job) do diff --git a/lib/pleroma/workers/mailer_worker.ex b/lib/pleroma/workers/mailer_worker.ex index 940716558..652bf77e0 100644 --- a/lib/pleroma/workers/mailer_worker.ex +++ b/lib/pleroma/workers/mailer_worker.ex @@ -3,7 +3,7 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Workers.MailerWorker do - use Pleroma.Workers.WorkerHelper, queue: "mailer" + use Pleroma.Workers.WorkerHelper, queue: "background" @impl Oban.Worker def perform(%Job{args: %{"op" => "email", "encoded_email" => encoded_email, "config" => config}}) do diff --git a/lib/pleroma/workers/mute_expire_worker.ex b/lib/pleroma/workers/mute_expire_worker.ex index 8ce458d48..8ad287a7f 100644 --- a/lib/pleroma/workers/mute_expire_worker.ex +++ b/lib/pleroma/workers/mute_expire_worker.ex @@ -3,7 +3,7 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Workers.MuteExpireWorker do - use Pleroma.Workers.WorkerHelper, queue: "mute_expire" + use Pleroma.Workers.WorkerHelper, queue: "background" @impl Oban.Worker def perform(%Job{args: %{"op" => "unmute_user", "muter_id" => muter_id, "mutee_id" => mutee_id}}) do diff --git a/lib/pleroma/workers/poll_worker.ex b/lib/pleroma/workers/poll_worker.ex index 022d026f8..70df54193 100644 --- a/lib/pleroma/workers/poll_worker.ex +++ b/lib/pleroma/workers/poll_worker.ex @@ -6,7 +6,7 @@ defmodule Pleroma.Workers.PollWorker do @moduledoc """ Generates notifications when a poll ends. """ - use Pleroma.Workers.WorkerHelper, queue: "poll_notifications" + use Pleroma.Workers.WorkerHelper, queue: "background" alias Pleroma.Activity alias Pleroma.Notification diff --git a/lib/pleroma/workers/purge_expired_activity.ex b/lib/pleroma/workers/purge_expired_activity.ex index e554684fe..a65593b6e 100644 --- a/lib/pleroma/workers/purge_expired_activity.ex +++ b/lib/pleroma/workers/purge_expired_activity.ex @@ -7,7 +7,7 @@ defmodule Pleroma.Workers.PurgeExpiredActivity do Worker which purges expired activity. """ - use Oban.Worker, queue: :activity_expiration, max_attempts: 1, unique: [period: :infinity] + use Oban.Worker, queue: :slow, max_attempts: 1, unique: [period: :infinity] import Ecto.Query @@ -59,7 +59,7 @@ defp find_user(ap_id) do def get_expiration(id) do from(j in Oban.Job, where: j.state == "scheduled", - where: j.queue == "activity_expiration", + where: j.queue == "slow", where: fragment("?->>'activity_id' = ?", j.args, ^id) ) |> Pleroma.Repo.one() diff --git a/lib/pleroma/workers/purge_expired_filter.ex b/lib/pleroma/workers/purge_expired_filter.ex index 9114aeb7f..1f6931e4c 100644 --- a/lib/pleroma/workers/purge_expired_filter.ex +++ b/lib/pleroma/workers/purge_expired_filter.ex @@ -7,7 +7,7 @@ defmodule Pleroma.Workers.PurgeExpiredFilter do Worker which purges expired filters """ - use Oban.Worker, queue: :filter_expiration, max_attempts: 1, unique: [period: :infinity] + use Oban.Worker, queue: :background, max_attempts: 1, unique: [period: :infinity] import Ecto.Query @@ -38,7 +38,7 @@ def timeout(_job), do: :timer.seconds(5) def get_expiration(id) do from(j in Job, where: j.state == "scheduled", - where: j.queue == "filter_expiration", + where: j.queue == "background", where: fragment("?->'filter_id' = ?", j.args, ^id) ) |> Repo.one() diff --git a/lib/pleroma/workers/purge_expired_token.ex b/lib/pleroma/workers/purge_expired_token.ex index 2ccd9e80b..1854bf561 100644 --- a/lib/pleroma/workers/purge_expired_token.ex +++ b/lib/pleroma/workers/purge_expired_token.ex @@ -7,7 +7,7 @@ defmodule Pleroma.Workers.PurgeExpiredToken do Worker which purges expired OAuth tokens """ - use Oban.Worker, queue: :token_expiration, max_attempts: 1 + use Oban.Worker, queue: :background, max_attempts: 1 @spec enqueue(%{token_id: integer(), valid_until: DateTime.t(), mod: module()}) :: {:ok, Oban.Job.t()} | {:error, Ecto.Changeset.t()} diff --git a/lib/pleroma/workers/remote_fetcher_worker.ex b/lib/pleroma/workers/remote_fetcher_worker.ex index c26418483..ed04c54b2 100644 --- a/lib/pleroma/workers/remote_fetcher_worker.ex +++ b/lib/pleroma/workers/remote_fetcher_worker.ex @@ -5,7 +5,7 @@ defmodule Pleroma.Workers.RemoteFetcherWorker do alias Pleroma.Object.Fetcher - use Pleroma.Workers.WorkerHelper, queue: "remote_fetcher" + use Pleroma.Workers.WorkerHelper, queue: "background" @impl Oban.Worker def perform(%Job{args: %{"op" => "fetch_remote", "id" => id} = args}) do diff --git a/lib/pleroma/workers/rich_media_expiration_worker.ex b/lib/pleroma/workers/rich_media_expiration_worker.ex index d7ae497a7..0b74687cf 100644 --- a/lib/pleroma/workers/rich_media_expiration_worker.ex +++ b/lib/pleroma/workers/rich_media_expiration_worker.ex @@ -6,7 +6,7 @@ defmodule Pleroma.Workers.RichMediaExpirationWorker do alias Pleroma.Web.RichMedia.Card use Oban.Worker, - queue: :rich_media_expiration + queue: :background @impl Oban.Worker def perform(%Job{args: %{"url" => url} = _args}) do diff --git a/lib/pleroma/workers/scheduled_activity_worker.ex b/lib/pleroma/workers/scheduled_activity_worker.ex index 4df84d00f..ab62686f4 100644 --- a/lib/pleroma/workers/scheduled_activity_worker.ex +++ b/lib/pleroma/workers/scheduled_activity_worker.ex @@ -7,7 +7,7 @@ defmodule Pleroma.Workers.ScheduledActivityWorker do The worker to post scheduled activity. """ - use Pleroma.Workers.WorkerHelper, queue: "scheduled_activities" + use Pleroma.Workers.WorkerHelper, queue: "federator_outgoing" alias Pleroma.Repo alias Pleroma.ScheduledActivity diff --git a/priv/repo/migrations/20240527144418_oban_queues_refactor.exs b/priv/repo/migrations/20240527144418_oban_queues_refactor.exs new file mode 100644 index 000000000..64ee28dfd --- /dev/null +++ b/priv/repo/migrations/20240527144418_oban_queues_refactor.exs @@ -0,0 +1,32 @@ +defmodule Pleroma.Repo.Migrations.ObanQueuesRefactor do + use Ecto.Migration + + @changed_queues [ + {"attachments_cleanup", "slow"}, + {"mailer", "background"}, + {"mute_expire", "background"}, + {"poll_notifications", "background"}, + {"activity_expiration", "slow"}, + {"filter_expiration", "background"}, + {"token_expiration", "background"}, + {"remote_fetcher", "background"}, + {"rich_media_expiration", "background"} + ] + + def up do + Enum.each(@changed_queues, fn {old, new} -> + execute("UPDATE oban_jobs SET queue = '#{new}' WHERE queue = '#{old}';") + end) + + # Handled special as reverting this would not be ideal and leaving it is harmless + execute( + "UPDATE oban_jobs SET queue = 'federator_outgoing' WHERE queue = 'scheduled_activities';" + ) + end + + def down do + # Just move all slow queue jobs to background queue if we are reverting + # as the slow queue will not be processing jobs + execute("UPDATE oban_jobs SET queue = 'background' WHERE queue = 'slow';") + end +end From f63e44b8bc8e4e2f21fe21f1407a85d072dcab6d Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 27 May 2024 13:46:15 -0400 Subject: [PATCH 66/93] Fix Oban related tests --- test/pleroma/scheduled_activity_test.exs | 3 +-- .../scheduled_activity_controller_test.exs | 17 ++++++++++++----- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/test/pleroma/scheduled_activity_test.exs b/test/pleroma/scheduled_activity_test.exs index 4818e8bcf..aaf643cfc 100644 --- a/test/pleroma/scheduled_activity_test.exs +++ b/test/pleroma/scheduled_activity_test.exs @@ -31,8 +31,7 @@ test "scheduled activities with jobs when ScheduledActivity enabled" do {:ok, sa1} = ScheduledActivity.create(user, attrs) {:ok, sa2} = ScheduledActivity.create(user, attrs) - jobs = - Repo.all(from(j in Oban.Job, where: j.queue == "scheduled_activities", select: j.args)) + jobs = Repo.all(from(j in Oban.Job, where: j.queue == "federator_outgoing", select: j.args)) assert jobs == [%{"activity_id" => sa1.id}, %{"activity_id" => sa2.id}] end diff --git a/test/pleroma/web/mastodon_api/controllers/scheduled_activity_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/scheduled_activity_controller_test.exs index 632242221..2d6b2aee2 100644 --- a/test/pleroma/web/mastodon_api/controllers/scheduled_activity_controller_test.exs +++ b/test/pleroma/web/mastodon_api/controllers/scheduled_activity_controller_test.exs @@ -3,6 +3,7 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.MastodonAPI.ScheduledActivityControllerTest do + use Oban.Testing, repo: Pleroma.Repo use Pleroma.Web.ConnCase, async: true alias Pleroma.Repo @@ -78,7 +79,7 @@ test "updates a scheduled activity" do } ) - job = Repo.one(from(j in Oban.Job, where: j.queue == "scheduled_activities")) + job = Repo.one(from(j in Oban.Job, where: j.queue == "federator_outgoing")) assert job.args == %{"activity_id" => scheduled_activity.id} assert DateTime.truncate(job.scheduled_at, :second) == to_datetime(scheduled_at) @@ -124,9 +125,11 @@ test "deletes a scheduled activity" do } ) - job = Repo.one(from(j in Oban.Job, where: j.queue == "scheduled_activities")) - - assert job.args == %{"activity_id" => scheduled_activity.id} + assert_enqueued( + worker: Pleroma.Workers.ScheduledActivityWorker, + args: %{"activity_id" => scheduled_activity.id}, + queue: :federator_outgoing + ) res_conn = conn @@ -135,7 +138,11 @@ test "deletes a scheduled activity" do assert %{} = json_response_and_validate_schema(res_conn, 200) refute Repo.get(ScheduledActivity, scheduled_activity.id) - refute Repo.get(Oban.Job, job.id) + + refute_enqueued( + worker: Pleroma.Workers.ScheduledActivityWorker, + args: %{"activity_id" => scheduled_activity.id} + ) res_conn = conn From 29eac86dc0bb246e983afe4209332194bf11bed0 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 27 May 2024 13:53:22 -0400 Subject: [PATCH 67/93] Logger metadata changelog --- changelog.d/logger-metadata.add | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/logger-metadata.add diff --git a/changelog.d/logger-metadata.add b/changelog.d/logger-metadata.add new file mode 100644 index 000000000..6c627a972 --- /dev/null +++ b/changelog.d/logger-metadata.add @@ -0,0 +1 @@ +Logger metadata is now attached to some logs to help with troubleshooting and analysis From 6b8c15a4a1fdbc2a2a4ef194d9519e717470c632 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 27 May 2024 14:11:42 -0400 Subject: [PATCH 68/93] Remove MediaProxyWarmingPolicy config for ConcurrentLimiter as we are not using it --- config/config.exs | 1 - .../web/activity_pub/mrf/media_proxy_warming_policy.ex | 6 +----- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/config/config.exs b/config/config.exs index a025defeb..3c35f439a 100644 --- a/config/config.exs +++ b/config/config.exs @@ -902,7 +902,6 @@ config :pleroma, ConcurrentLimiter, [ {Pleroma.Web.RichMedia.Helpers, [max_running: 5, max_waiting: 5]}, - {Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy, [max_running: 5, max_waiting: 5]}, {Pleroma.Search, [max_running: 30, max_waiting: 50]} ] diff --git a/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex b/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex index e5eb6896a..0c5b53def 100644 --- a/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex @@ -22,11 +22,7 @@ defp prefetch(url) do Logger.debug("Prefetching #{inspect(url)} as #{inspect(prefetch_url)}") - if Pleroma.Config.get(:env) == :test do - fetch(prefetch_url) - else - Task.start(fn -> fetch(prefetch_url) end) - end + fetch(prefetch_url) end end From ba511a30b923cc9248686702f75a4041d69c7bee Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 27 May 2024 14:12:38 -0400 Subject: [PATCH 69/93] RichMedia use of ConcurrentLimiter was removed in the refactor --- config/config.exs | 1 - 1 file changed, 1 deletion(-) diff --git a/config/config.exs b/config/config.exs index 3c35f439a..e7e751d8a 100644 --- a/config/config.exs +++ b/config/config.exs @@ -901,7 +901,6 @@ process_chunk_size: 100 config :pleroma, ConcurrentLimiter, [ - {Pleroma.Web.RichMedia.Helpers, [max_running: 5, max_waiting: 5]}, {Pleroma.Search, [max_running: 30, max_waiting: 50]} ] From 81e44ced0c7251b5a6b585f297e1e00fad08c6d1 Mon Sep 17 00:00:00 2001 From: Lain Soykaf Date: Mon, 27 May 2024 22:13:20 +0400 Subject: [PATCH 70/93] HTTPSecurityPlug: Fix tests --- config/test.exs | 2 +- lib/pleroma/web/plugs/http_security_plug.ex | 4 ++-- test/pleroma/web/plugs/http_security_plug_test.exs | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/config/test.exs b/config/test.exs index b5c9c6e4a..6c88ad3c6 100644 --- a/config/test.exs +++ b/config/test.exs @@ -154,7 +154,7 @@ config :pleroma, Pleroma.ScheduledActivity, config_impl: Pleroma.UnstubbedConfigMock config :pleroma, Pleroma.Web.RichMedia.Helpers, config_impl: Pleroma.StaticStubbedConfigMock config :pleroma, Pleroma.Uploaders.IPFS, config_impl: Pleroma.UnstubbedConfigMock -config :pleroma, Pleroma.Web.Plugs.HTTPSecurityPlug, config_impl: Pleroma.UnstubbedConfigMock +config :pleroma, Pleroma.Web.Plugs.HTTPSecurityPlug, config_impl: Pleroma.StaticStubbedConfigMock peer_module = if String.to_integer(System.otp_release()) >= 25 do diff --git a/lib/pleroma/web/plugs/http_security_plug.ex b/lib/pleroma/web/plugs/http_security_plug.ex index a1dc6c02a..38f6c511e 100644 --- a/lib/pleroma/web/plugs/http_security_plug.ex +++ b/lib/pleroma/web/plugs/http_security_plug.ex @@ -116,7 +116,7 @@ defp csp_string do end connect_src = - if @config_impl.get(:env) == :dev do + if @config_impl.get([:env]) == :dev do [connect_src, " http://localhost:3035/"] else connect_src @@ -124,7 +124,7 @@ defp csp_string do script_src = if @config_impl.get([:http_security, :allow_unsafe_eval]) do - if @config_impl.get(:env) == :dev do + if @config_impl.get([:env]) == :dev do "script-src 'self' 'unsafe-eval'" else "script-src 'self' 'wasm-unsafe-eval'" diff --git a/test/pleroma/web/plugs/http_security_plug_test.exs b/test/pleroma/web/plugs/http_security_plug_test.exs index 80ad1fa7d..11a351a41 100644 --- a/test/pleroma/web/plugs/http_security_plug_test.exs +++ b/test/pleroma/web/plugs/http_security_plug_test.exs @@ -15,7 +15,7 @@ defmodule Pleroma.Web.Plugs.HTTPSecurityPlugTest do end defp mock_config(config, additional \\ %{}) do - Pleroma.UnstubbedConfigMock + Pleroma.StaticStubbedConfigMock |> stub(:get, fn [:http_security, key] -> config[key] key -> additional[key] From bb86a01b9b5889155f60f08143755dd101d925f0 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 27 May 2024 15:20:47 -0400 Subject: [PATCH 71/93] Credo --- lib/pleroma/web/activity_pub/activity_pub_controller.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/pleroma/web/activity_pub/activity_pub_controller.ex b/lib/pleroma/web/activity_pub/activity_pub_controller.ex index d2b2cae0b..e6161455d 100644 --- a/lib/pleroma/web/activity_pub/activity_pub_controller.ex +++ b/lib/pleroma/web/activity_pub/activity_pub_controller.ex @@ -522,7 +522,7 @@ defp set_requester_reachable(%Plug.Conn{} = conn, _) do conn end - defp log_inbox_metadata(conn = %{params: %{"actor" => actor, "type" => type}}, _) do + defp log_inbox_metadata(%{params: %{"actor" => actor, "type" => type}} = conn, _) do Logger.metadata(actor: actor, type: type) conn end From 73d58c22d4a9a87539cf1c3a33083464fc4b8540 Mon Sep 17 00:00:00 2001 From: Lain Soykaf Date: Tue, 28 May 2024 08:09:19 +0400 Subject: [PATCH 72/93] Linting --- lib/pleroma/web/activity_pub/activity_pub_controller.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/pleroma/web/activity_pub/activity_pub_controller.ex b/lib/pleroma/web/activity_pub/activity_pub_controller.ex index d2b2cae0b..e6161455d 100644 --- a/lib/pleroma/web/activity_pub/activity_pub_controller.ex +++ b/lib/pleroma/web/activity_pub/activity_pub_controller.ex @@ -522,7 +522,7 @@ defp set_requester_reachable(%Plug.Conn{} = conn, _) do conn end - defp log_inbox_metadata(conn = %{params: %{"actor" => actor, "type" => type}}, _) do + defp log_inbox_metadata(%{params: %{"actor" => actor, "type" => type}} = conn, _) do Logger.metadata(actor: actor, type: type) conn end From f5978da67633110acbc6494ff6f07b3f07424779 Mon Sep 17 00:00:00 2001 From: Lain Soykaf Date: Tue, 28 May 2024 14:00:25 +0400 Subject: [PATCH 73/93] HTTPSignaturePlugTest: Rewrite to use mox. --- config/test.exs | 4 + lib/pleroma/http_signatures_api.ex | 4 + lib/pleroma/web/plugs/http_signature_plug.ex | 19 +- .../web/plugs/http_signature_plug_test.exs | 219 +++++++++--------- test/support/data_case.ex | 1 + test/support/http_signatures_proxy.ex | 9 + test/support/mocks.ex | 1 + 7 files changed, 144 insertions(+), 113 deletions(-) create mode 100644 lib/pleroma/http_signatures_api.ex create mode 100644 test/support/http_signatures_proxy.ex diff --git a/config/test.exs b/config/test.exs index 6c88ad3c6..0d4c82e0e 100644 --- a/config/test.exs +++ b/config/test.exs @@ -155,6 +155,10 @@ config :pleroma, Pleroma.Web.RichMedia.Helpers, config_impl: Pleroma.StaticStubbedConfigMock config :pleroma, Pleroma.Uploaders.IPFS, config_impl: Pleroma.UnstubbedConfigMock config :pleroma, Pleroma.Web.Plugs.HTTPSecurityPlug, config_impl: Pleroma.StaticStubbedConfigMock +config :pleroma, Pleroma.Web.Plugs.HTTPSignaturePlug, config_impl: Pleroma.StaticStubbedConfigMock + +config :pleroma, Pleroma.Web.Plugs.HTTPSignaturePlug, + http_signatures_impl: Pleroma.StubbedHTTPSignaturesMock peer_module = if String.to_integer(System.otp_release()) >= 25 do diff --git a/lib/pleroma/http_signatures_api.ex b/lib/pleroma/http_signatures_api.ex new file mode 100644 index 000000000..8e73dc98e --- /dev/null +++ b/lib/pleroma/http_signatures_api.ex @@ -0,0 +1,4 @@ +defmodule Pleroma.HTTPSignaturesAPI do + @callback validate_conn(conn :: Plug.Conn.t()) :: boolean + @callback signature_for_conn(conn :: Plug.Conn.t()) :: map +end diff --git a/lib/pleroma/web/plugs/http_signature_plug.ex b/lib/pleroma/web/plugs/http_signature_plug.ex index 71b2a5f51..6bf2dd432 100644 --- a/lib/pleroma/web/plugs/http_signature_plug.ex +++ b/lib/pleroma/web/plugs/http_signature_plug.ex @@ -8,11 +8,17 @@ defmodule Pleroma.Web.Plugs.HTTPSignaturePlug do import Plug.Conn import Phoenix.Controller, only: [get_format: 1, text: 2] - alias Pleroma.Config alias Pleroma.Web.ActivityPub.MRF require Logger + @config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config) + @http_signatures_impl Application.compile_env( + :pleroma, + [__MODULE__, :http_signatures_impl], + HTTPSignatures + ) + def init(options) do options end @@ -41,7 +47,7 @@ defp validate_signature(conn, request_target) do |> put_req_header("(request-target)", request_target) |> put_req_header("@request-target", request_target) - HTTPSignatures.validate_conn(conn) + @http_signatures_impl.validate_conn(conn) end defp validate_signature(conn) do @@ -108,9 +114,9 @@ defp has_signature_header?(conn) do defp maybe_require_signature(%{assigns: %{valid_signature: true}} = conn), do: conn defp maybe_require_signature(%{remote_ip: remote_ip} = conn) do - if Pleroma.Config.get([:activitypub, :authorized_fetch_mode], false) do + if @config_impl.get([:activitypub, :authorized_fetch_mode], false) do exceptions = - Pleroma.Config.get([:activitypub, :authorized_fetch_mode_exceptions], []) + @config_impl.get([:activitypub, :authorized_fetch_mode_exceptions], []) |> Enum.map(&InetHelper.parse_cidr/1) if Enum.any?(exceptions, fn x -> InetCidr.contains?(x, remote_ip) end) do @@ -129,7 +135,8 @@ defp maybe_require_signature(%{remote_ip: remote_ip} = conn) do defp maybe_filter_requests(%{halted: true} = conn), do: conn defp maybe_filter_requests(conn) do - if Pleroma.Config.get([:activitypub, :authorized_fetch_mode], false) do + if @config_impl.get([:activitypub, :authorized_fetch_mode], false) and + conn.assigns[:actor_id] do %{host: host} = URI.parse(conn.assigns.actor_id) if MRF.subdomain_match?(rejected_domains(), host) do @@ -145,7 +152,7 @@ defp maybe_filter_requests(conn) do end defp rejected_domains do - Config.get([:instance, :rejected_instances]) + @config_impl.get([:instance, :rejected_instances]) |> Pleroma.Web.ActivityPub.MRF.instance_list_from_tuples() |> Pleroma.Web.ActivityPub.MRF.subdomains_regex() end diff --git a/test/pleroma/web/plugs/http_signature_plug_test.exs b/test/pleroma/web/plugs/http_signature_plug_test.exs index b871d956e..5f049dc45 100644 --- a/test/pleroma/web/plugs/http_signature_plug_test.exs +++ b/test/pleroma/web/plugs/http_signature_plug_test.exs @@ -3,89 +3,88 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.Plugs.HTTPSignaturePlugTest do - use Pleroma.Web.ConnCase + use Pleroma.Web.ConnCase, async: true alias Pleroma.Web.Plugs.HTTPSignaturePlug + alias Pleroma.StubbedHTTPSignaturesMock, as: HTTPSignaturesMock + alias Pleroma.StaticStubbedConfigMock, as: ConfigMock import Plug.Conn import Phoenix.Controller, only: [put_format: 2] - import Mock + import Mox - test "it call HTTPSignatures to check validity if the actor signed it" do + test "it calls HTTPSignatures to check validity if the actor signed it" do params = %{"actor" => "http://mastodon.example.org/users/admin"} conn = build_conn(:get, "/doesntmattter", params) - with_mock HTTPSignatures, - validate_conn: fn _ -> true end, - signature_for_conn: fn _ -> - %{"keyId" => "http://mastodon.example.org/users/admin#main-key"} - end do - conn = - conn - |> put_req_header( - "signature", - "keyId=\"http://mastodon.example.org/users/admin#main-key" - ) - |> put_format("activity+json") - |> HTTPSignaturePlug.call(%{}) + HTTPSignaturesMock + |> expect(:validate_conn, fn _ -> true end) - assert conn.assigns.valid_signature == true - assert conn.halted == false - assert called(HTTPSignatures.validate_conn(:_)) - end + conn = + conn + |> put_req_header( + "signature", + "keyId=\"http://mastodon.example.org/users/admin#main-key" + ) + |> put_format("activity+json") + |> HTTPSignaturePlug.call(%{}) + + assert conn.assigns.valid_signature == true + assert conn.halted == false end describe "requires a signature when `authorized_fetch_mode` is enabled" do setup do - clear_config([:activitypub, :authorized_fetch_mode], true) - params = %{"actor" => "http://mastodon.example.org/users/admin"} conn = build_conn(:get, "/doesntmattter", params) |> put_format("activity+json") [conn: conn] end - test "when signature header is present", %{conn: conn} do - with_mock HTTPSignatures, - validate_conn: fn _ -> false end, - signature_for_conn: fn _ -> - %{"keyId" => "http://mastodon.example.org/users/admin#main-key"} - end do - conn = - conn - |> put_req_header( - "signature", - "keyId=\"http://mastodon.example.org/users/admin#main-key" - ) - |> HTTPSignaturePlug.call(%{}) + test "when signature header is present", %{conn: orig_conn} do + ConfigMock + |> expect(:get, fn [:activitypub, :authorized_fetch_mode], false -> true end) + |> expect(:get, fn [:activitypub, :authorized_fetch_mode_exceptions], [] -> [] end) - assert conn.assigns.valid_signature == false - assert conn.halted == true - assert conn.status == 401 - assert conn.state == :sent - assert conn.resp_body == "Request not signed" - assert called(HTTPSignatures.validate_conn(:_)) - end + HTTPSignaturesMock + |> expect(:validate_conn, 2, fn _ -> false end) - with_mock HTTPSignatures, - validate_conn: fn _ -> true end, - signature_for_conn: fn _ -> - %{"keyId" => "http://mastodon.example.org/users/admin#main-key"} - end do - conn = - conn - |> put_req_header( - "signature", - "keyId=\"http://mastodon.example.org/users/admin#main-key" - ) - |> HTTPSignaturePlug.call(%{}) + conn = + orig_conn + |> put_req_header( + "signature", + "keyId=\"http://mastodon.example.org/users/admin#main-key" + ) + |> HTTPSignaturePlug.call(%{}) - assert conn.assigns.valid_signature == true - assert conn.halted == false - assert called(HTTPSignatures.validate_conn(:_)) - end + assert conn.assigns.valid_signature == false + assert conn.halted == true + assert conn.status == 401 + assert conn.state == :sent + assert conn.resp_body == "Request not signed" + + ConfigMock + |> expect(:get, fn [:activitypub, :authorized_fetch_mode], false -> true end) + + HTTPSignaturesMock + |> expect(:validate_conn, fn _ -> true end) + + conn = + orig_conn + |> put_req_header( + "signature", + "keyId=\"http://mastodon.example.org/users/admin#main-key" + ) + |> HTTPSignaturePlug.call(%{}) + + assert conn.assigns.valid_signature == true + assert conn.halted == false end test "halts the connection when `signature` header is not present", %{conn: conn} do + ConfigMock + |> expect(:get, fn [:activitypub, :authorized_fetch_mode], false -> true end) + |> expect(:get, fn [:activitypub, :authorized_fetch_mode_exceptions], [] -> [] end) + conn = HTTPSignaturePlug.call(conn, %{}) assert conn.assigns[:valid_signature] == nil assert conn.halted == true @@ -95,65 +94,71 @@ test "halts the connection when `signature` header is not present", %{conn: conn end test "exempts specific IPs from `authorized_fetch_mode_exceptions`", %{conn: conn} do - clear_config([:activitypub, :authorized_fetch_mode_exceptions], ["192.168.0.0/24"]) + ConfigMock + |> expect(:get, fn [:activitypub, :authorized_fetch_mode], false -> true end) + |> expect(:get, fn [:activitypub, :authorized_fetch_mode_exceptions], [] -> + ["192.168.0.0/24"] + end) + |> expect(:get, fn [:activitypub, :authorized_fetch_mode], false -> true end) - with_mock HTTPSignatures, validate_conn: fn _ -> false end do - conn = - conn - |> Map.put(:remote_ip, {192, 168, 0, 1}) - |> put_req_header( - "signature", - "keyId=\"http://mastodon.example.org/users/admin#main-key" - ) - |> HTTPSignaturePlug.call(%{}) + HTTPSignaturesMock + |> expect(:validate_conn, 2, fn _ -> false end) - assert conn.remote_ip == {192, 168, 0, 1} - assert conn.halted == false - assert called(HTTPSignatures.validate_conn(:_)) - end - end - end - - test "rejects requests from `rejected_instances` when `authorized_fetch_mode` is enabled" do - clear_config([:activitypub, :authorized_fetch_mode], true) - clear_config([:instance, :rejected_instances], [{"mastodon.example.org", "no reason"}]) - - with_mock HTTPSignatures, - validate_conn: fn _ -> true end, - signature_for_conn: fn _ -> - %{"keyId" => "http://mastodon.example.org/users/admin#main-key"} - end do conn = - build_conn(:get, "/doesntmattter", %{"actor" => "http://mastodon.example.org/users/admin"}) + conn + |> Map.put(:remote_ip, {192, 168, 0, 1}) |> put_req_header( "signature", "keyId=\"http://mastodon.example.org/users/admin#main-key" ) - |> put_format("activity+json") |> HTTPSignaturePlug.call(%{}) - assert conn.assigns.valid_signature == true - assert conn.halted == true - assert called(HTTPSignatures.validate_conn(:_)) - end - - with_mock HTTPSignatures, - validate_conn: fn _ -> true end, - signature_for_conn: fn _ -> - %{"keyId" => "http://allowed.example.org/users/admin#main-key"} - end do - conn = - build_conn(:get, "/doesntmattter", %{"actor" => "http://allowed.example.org/users/admin"}) - |> put_req_header( - "signature", - "keyId=\"http://allowed.example.org/users/admin#main-key" - ) - |> put_format("activity+json") - |> HTTPSignaturePlug.call(%{}) - - assert conn.assigns.valid_signature == true + assert conn.remote_ip == {192, 168, 0, 1} assert conn.halted == false - assert called(HTTPSignatures.validate_conn(:_)) end end + + test "rejects requests from `rejected_instances` when `authorized_fetch_mode` is enabled" do + ConfigMock + |> expect(:get, fn [:activitypub, :authorized_fetch_mode], false -> true end) + |> expect(:get, fn [:instance, :rejected_instances] -> + [{"mastodon.example.org", "no reason"}] + end) + + HTTPSignaturesMock + |> expect(:validate_conn, fn _ -> true end) + + conn = + build_conn(:get, "/doesntmattter", %{"actor" => "http://mastodon.example.org/users/admin"}) + |> put_req_header( + "signature", + "keyId=\"http://mastodon.example.org/users/admin#main-key" + ) + |> put_format("activity+json") + |> HTTPSignaturePlug.call(%{}) + + assert conn.assigns.valid_signature == true + assert conn.halted == true + + ConfigMock + |> expect(:get, fn [:activitypub, :authorized_fetch_mode], false -> true end) + |> expect(:get, fn [:instance, :rejected_instances] -> + [{"mastodon.example.org", "no reason"}] + end) + + HTTPSignaturesMock + |> expect(:validate_conn, fn _ -> true end) + + conn = + build_conn(:get, "/doesntmattter", %{"actor" => "http://allowed.example.org/users/admin"}) + |> put_req_header( + "signature", + "keyId=\"http://allowed.example.org/users/admin#main-key" + ) + |> put_format("activity+json") + |> HTTPSignaturePlug.call(%{}) + + assert conn.assigns.valid_signature == true + assert conn.halted == false + end end diff --git a/test/support/data_case.ex b/test/support/data_case.ex index 14403f0b8..52d4bef1a 100644 --- a/test/support/data_case.ex +++ b/test/support/data_case.ex @@ -116,6 +116,7 @@ def stub_pipeline do Mox.stub_with(Pleroma.Web.FederatorMock, Pleroma.Web.Federator) Mox.stub_with(Pleroma.ConfigMock, Pleroma.Config) Mox.stub_with(Pleroma.StaticStubbedConfigMock, Pleroma.Test.StaticConfig) + Mox.stub_with(Pleroma.StubbedHTTPSignaturesMock, Pleroma.Test.HTTPSignaturesProxy) end def ensure_local_uploader(context) do diff --git a/test/support/http_signatures_proxy.ex b/test/support/http_signatures_proxy.ex new file mode 100644 index 000000000..4c6b39d19 --- /dev/null +++ b/test/support/http_signatures_proxy.ex @@ -0,0 +1,9 @@ +defmodule Pleroma.Test.HTTPSignaturesProxy do + @behaviour Pleroma.HTTPSignaturesAPI + + @impl true + defdelegate validate_conn(conn), to: HTTPSignatures + + @impl true + defdelegate signature_for_conn(conn), to: HTTPSignatures +end diff --git a/test/support/mocks.ex b/test/support/mocks.ex index d906f0e1d..63cbc49ab 100644 --- a/test/support/mocks.ex +++ b/test/support/mocks.ex @@ -28,6 +28,7 @@ Mox.defmock(Pleroma.ConfigMock, for: Pleroma.Config.Getting) Mox.defmock(Pleroma.UnstubbedConfigMock, for: Pleroma.Config.Getting) Mox.defmock(Pleroma.StaticStubbedConfigMock, for: Pleroma.Config.Getting) +Mox.defmock(Pleroma.StubbedHTTPSignaturesMock, for: Pleroma.HTTPSignaturesAPI) Mox.defmock(Pleroma.LoggerMock, for: Pleroma.Logging) From 8066645f711f38986b3d0f9c0b34d6956563da6a Mon Sep 17 00:00:00 2001 From: Lain Soykaf Date: Tue, 28 May 2024 14:20:48 +0400 Subject: [PATCH 74/93] Linting --- test/pleroma/web/plugs/http_signature_plug_test.exs | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/test/pleroma/web/plugs/http_signature_plug_test.exs b/test/pleroma/web/plugs/http_signature_plug_test.exs index 5f049dc45..9d07270bb 100644 --- a/test/pleroma/web/plugs/http_signature_plug_test.exs +++ b/test/pleroma/web/plugs/http_signature_plug_test.exs @@ -4,13 +4,14 @@ defmodule Pleroma.Web.Plugs.HTTPSignaturePlugTest do use Pleroma.Web.ConnCase, async: true - alias Pleroma.Web.Plugs.HTTPSignaturePlug - alias Pleroma.StubbedHTTPSignaturesMock, as: HTTPSignaturesMock - alias Pleroma.StaticStubbedConfigMock, as: ConfigMock - import Plug.Conn - import Phoenix.Controller, only: [put_format: 2] + alias Pleroma.StaticStubbedConfigMock, as: ConfigMock + alias Pleroma.StubbedHTTPSignaturesMock, as: HTTPSignaturesMock + alias Pleroma.Web.Plugs.HTTPSignaturePlug + import Mox + import Phoenix.Controller, only: [put_format: 2] + import Plug.Conn test "it calls HTTPSignatures to check validity if the actor signed it" do params = %{"actor" => "http://mastodon.example.org/users/admin"} From 335691bae1a002c1a3bec956884fe665114285ec Mon Sep 17 00:00:00 2001 From: Lain Soykaf Date: Tue, 28 May 2024 14:38:44 +0400 Subject: [PATCH 75/93] Add changelog --- changelog.d/authorized-fetch-rejections.add | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/authorized-fetch-rejections.add diff --git a/changelog.d/authorized-fetch-rejections.add b/changelog.d/authorized-fetch-rejections.add new file mode 100644 index 000000000..66e15a979 --- /dev/null +++ b/changelog.d/authorized-fetch-rejections.add @@ -0,0 +1 @@ +Add an option to reject certain domains when authorized fetch is enabled. From 0b864c3696e47ba1def6047905dad9065b0bee0e Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Tue, 28 May 2024 08:49:34 -0400 Subject: [PATCH 76/93] Dialyzer: fix invalid @spec lib/pleroma/notification.ex:492:invalid_contract The @spec for the function does not match the success typing of the function. Function: Pleroma.Notification.get_notified_from_activity/2 Success typing: @spec get_notified_from_activity(_, _) :: [any()] --- lib/pleroma/notification.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/pleroma/notification.ex b/lib/pleroma/notification.ex index 4f714b25f..f521a2998 100644 --- a/lib/pleroma/notification.ex +++ b/lib/pleroma/notification.ex @@ -489,7 +489,7 @@ def create_poll_notifications(%Activity{} = activity) do NOTE: might be called for FAKE Activities, see ActivityPub.Utils.get_notified_from_object/1 """ - @spec get_notified_from_activity(Activity.t(), boolean()) :: {list(User.t()), list(User.t())} + @spec get_notified_from_activity(Activity.t(), boolean()) :: list(User.t()) def get_notified_from_activity(activity, local_only \\ true) def get_notified_from_activity(%Activity{data: %{"type" => type}} = activity, local_only) From 42c5f7c74e93b7b489456578f8285d06320c15dc Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Tue, 28 May 2024 08:55:18 -0400 Subject: [PATCH 77/93] Dialyzer: fix invalid @spec The callback already defines the @spec and these do not match it. lib/pleroma/upload/filter/exiftool/strip_location.ex:12:callback_spec_type_mismatch The @spec return type does not match the expected return type for filter/1 callback in Pleroma.Upload.Filter behaviour. Actual: @spec filter(...) :: {:ok, _} Expected: @spec filter(...) :: {:error, _} | {:ok, :filtered | :noop} | {:ok, :filtered, struct()} --- lib/pleroma/upload/filter/exiftool/strip_location.ex | 2 -- lib/pleroma/upload/filter/mogrifun.ex | 1 - lib/pleroma/upload/filter/mogrify.ex | 1 - 3 files changed, 4 deletions(-) diff --git a/lib/pleroma/upload/filter/exiftool/strip_location.ex b/lib/pleroma/upload/filter/exiftool/strip_location.ex index f2bcc4622..8becee712 100644 --- a/lib/pleroma/upload/filter/exiftool/strip_location.ex +++ b/lib/pleroma/upload/filter/exiftool/strip_location.ex @@ -9,8 +9,6 @@ defmodule Pleroma.Upload.Filter.Exiftool.StripLocation do """ @behaviour Pleroma.Upload.Filter - @spec filter(Pleroma.Upload.t()) :: {:ok, any()} | {:error, String.t()} - # Formats not compatible with exiftool at this time def filter(%Pleroma.Upload{content_type: "image/heic"}), do: {:ok, :noop} def filter(%Pleroma.Upload{content_type: "image/webp"}), do: {:ok, :noop} diff --git a/lib/pleroma/upload/filter/mogrifun.ex b/lib/pleroma/upload/filter/mogrifun.ex index a0f247b70..9716580a8 100644 --- a/lib/pleroma/upload/filter/mogrifun.ex +++ b/lib/pleroma/upload/filter/mogrifun.ex @@ -38,7 +38,6 @@ defmodule Pleroma.Upload.Filter.Mogrifun do [{"fill", "yellow"}, {"tint", "40"}] ] - @spec filter(Pleroma.Upload.t()) :: {:ok, atom()} | {:error, String.t()} def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do try do Filter.Mogrify.do_filter(file, [Enum.random(@filters)]) diff --git a/lib/pleroma/upload/filter/mogrify.ex b/lib/pleroma/upload/filter/mogrify.ex index 06efbf321..d1e166022 100644 --- a/lib/pleroma/upload/filter/mogrify.ex +++ b/lib/pleroma/upload/filter/mogrify.ex @@ -8,7 +8,6 @@ defmodule Pleroma.Upload.Filter.Mogrify do @type conversion :: action :: String.t() | {action :: String.t(), opts :: String.t()} @type conversions :: conversion() | [conversion()] - @spec filter(Pleroma.Upload.t()) :: {:ok, :atom} | {:error, String.t()} def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do try do do_filter(file, Pleroma.Config.get!([__MODULE__, :args])) From f8ce639e3f76257097793c666d3ebf8f22539a30 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Tue, 28 May 2024 09:30:19 -0400 Subject: [PATCH 78/93] Dialyzer: guard clause can never succeed lib/pleroma/web/activity_pub/mrf/dnsrbl_policy.ex:106:guard_fail The guard clause: when _ :: [ binary() | [string() | char()] | {string() | integer(), string()} | {{byte(), byte(), byte(), byte()}, integer(), binary()} | {integer(), integer(), integer(), string() | byte()} | {integer(), integer(), string(), string(), string(), string()} | {string(), string(), integer(), integer(), integer(), integer(), integer()} | {char(), char(), char(), char(), char(), char(), char(), char()} ] === nil can never succeed. --- lib/pleroma/web/activity_pub/mrf/dnsrbl_policy.ex | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/lib/pleroma/web/activity_pub/mrf/dnsrbl_policy.ex b/lib/pleroma/web/activity_pub/mrf/dnsrbl_policy.ex index 9543cc545..7c6bb888f 100644 --- a/lib/pleroma/web/activity_pub/mrf/dnsrbl_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/dnsrbl_policy.ex @@ -103,7 +103,11 @@ defp check_rbl(%{host: actor_host}, object) do {:ok, object} else Task.start(fn -> - reason = rblquery(query, :txt) || "undefined" + reason = + case rblquery(query, :txt) do + [[result]] -> result + _ -> "undefined" + end Logger.warning( "DNSRBL Rejected activity from #{actor_host} for reason: #{inspect(reason)}" From 18835bf7012e8e234eb27456a437f4d1e8796645 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Tue, 28 May 2024 09:38:36 -0400 Subject: [PATCH 79/93] Use the configured http client options for mediaproxy --- lib/pleroma/helpers/media_helper.ex | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/pleroma/helpers/media_helper.ex b/lib/pleroma/helpers/media_helper.ex index e44114d9d..0ac07fa41 100644 --- a/lib/pleroma/helpers/media_helper.ex +++ b/lib/pleroma/helpers/media_helper.ex @@ -25,7 +25,7 @@ def missing_dependencies do end def image_resize(url, options) do - with {:ok, env} <- HTTP.get(url, [], pool: :media), + with {:ok, env} <- HTTP.get(url, [], http_client_opts()), {:ok, resized} <- Operation.thumbnail_buffer(env.body, options.max_width, height: options.max_height, @@ -46,7 +46,7 @@ def image_resize(url, options) do def video_framegrab(url) do with executable when is_binary(executable) <- System.find_executable("ffmpeg"), false <- @cachex.exists?(:failed_media_helper_cache, url), - {:ok, env} <- HTTP.get(url, [], pool: :media), + {:ok, env} <- HTTP.get(url, [], http_client_opts()), {:ok, pid} <- StringIO.open(env.body) do body_stream = IO.binstream(pid, 1) @@ -84,4 +84,6 @@ def video_framegrab(url) do {:error, _} = error -> error end end + + defp http_client_opts, do: Pleroma.Config.get([:media_proxy, :proxy_opts, :http], pool: :media) end From 17ebb2df8404474ef66c6a38e974143166b5e49b Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Tue, 28 May 2024 09:43:35 -0400 Subject: [PATCH 80/93] Dialyzer: fix pattern matches preventing video thumbnailing from working lib/pleroma/web/media_proxy/media_proxy_controller.ex:154:pattern_match The pattern can never match the type. Pattern: {:ok, _thumbnail_binary} Type: {:error, boolean() | {:ffmpeg, :command_not_found}} --- changelog.d/video-thumbs.fix | 1 + lib/pleroma/helpers/media_helper.ex | 10 +++++----- 2 files changed, 6 insertions(+), 5 deletions(-) create mode 100644 changelog.d/video-thumbs.fix diff --git a/changelog.d/video-thumbs.fix b/changelog.d/video-thumbs.fix new file mode 100644 index 000000000..03e862f3d --- /dev/null +++ b/changelog.d/video-thumbs.fix @@ -0,0 +1 @@ +Video thumbnails were not being generated due to a negative cache lookup logic error diff --git a/lib/pleroma/helpers/media_helper.ex b/lib/pleroma/helpers/media_helper.ex index 0ac07fa41..8566ab3ea 100644 --- a/lib/pleroma/helpers/media_helper.ex +++ b/lib/pleroma/helpers/media_helper.ex @@ -45,7 +45,7 @@ def image_resize(url, options) do @spec video_framegrab(String.t()) :: {:ok, binary()} | {:error, any()} def video_framegrab(url) do with executable when is_binary(executable) <- System.find_executable("ffmpeg"), - false <- @cachex.exists?(:failed_media_helper_cache, url), + {:ok, false} <- @cachex.exists?(:failed_media_helper_cache, url), {:ok, env} <- HTTP.get(url, [], http_client_opts()), {:ok, pid} <- StringIO.open(env.body) do body_stream = IO.binstream(pid, 1) @@ -71,13 +71,13 @@ def video_framegrab(url) do end) case Task.yield(task, 5_000) do - nil -> + {:ok, result} -> + {:ok, result} + + _ -> Task.shutdown(task) @cachex.put(:failed_media_helper_cache, url, nil) {:error, {:ffmpeg, :timeout}} - - result -> - {:ok, result} end else nil -> {:error, {:ffmpeg, :command_not_found}} From 1b3c84e241f8ed1066d113346365ce489971ac14 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Tue, 28 May 2024 09:58:44 -0400 Subject: [PATCH 81/93] Dialyzer: no_local_return WebPushEncryption.send_web_push/4 was written to raise on erroroneus input, so we must guard against that. lib/pleroma/web/push/impl.ex:65:no_return Function push_message/4 has no local return. --- lib/pleroma/web/push/impl.ex | 26 ++++++++++++++++---------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/lib/pleroma/web/push/impl.ex b/lib/pleroma/web/push/impl.ex index 9e68d827b..53334e72c 100644 --- a/lib/pleroma/web/push/impl.ex +++ b/lib/pleroma/web/push/impl.ex @@ -63,19 +63,25 @@ def perform(_) do @doc "Push message to web" def push_message(body, sub, api_key, subscription) do - case WebPushEncryption.send_web_push(body, sub, api_key) do - {:ok, %{status: code}} when code in 400..499 -> - Logger.debug("Removing subscription record") - Repo.delete!(subscription) - :ok + try do + case WebPushEncryption.send_web_push(body, sub, api_key) do + {:ok, %{status: code}} when code in 400..499 -> + Logger.debug("Removing subscription record") + Repo.delete!(subscription) + :ok - {:ok, %{status: code}} when code in 200..299 -> - :ok + {:ok, %{status: code}} when code in 200..299 -> + :ok - {:ok, %{status: code}} -> - Logger.error("Web Push Notification failed with code: #{code}") - :error + {:ok, %{status: code}} -> + Logger.error("Web Push Notification failed with code: #{code}") + :error + error -> + Logger.error("Web Push Notification failed with #{inspect(error)}") + :error + end + rescue error -> Logger.error("Web Push Notification failed with #{inspect(error)}") :error From a041879eaaa7ca8ca421a89e303f58b5b68bc6da Mon Sep 17 00:00:00 2001 From: Lain Soykaf Date: Tue, 28 May 2024 17:04:43 +0400 Subject: [PATCH 82/93] Linting --- test/mix/tasks/pleroma/database_test.exs | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/test/mix/tasks/pleroma/database_test.exs b/test/mix/tasks/pleroma/database_test.exs index 01fd97e2d..5f5ab5195 100644 --- a/test/mix/tasks/pleroma/database_test.exs +++ b/test/mix/tasks/pleroma/database_test.exs @@ -352,12 +352,17 @@ test "with the --keep-threads option it keeps old threads with bookmarked posts" end test "We don't have unexpected tables which may contain objects that are referenced by activities" do - # We can delete orphaned activities. For that we look for the objects they reference in the 'objects', 'activities', and 'users' table. - # If someone adds another table with objects (idk, maybe with separate relations, or collections or w/e), then we need to make sure we - # add logic for that in the 'prune_objects' task so that we don't wrongly delete their corresponding activities. + # We can delete orphaned activities. For that we look for the objects + # they reference in the 'objects', 'activities', and 'users' table. + # If someone adds another table with objects (idk, maybe with separate + # relations, or collections or w/e), then we need to make sure we + # add logic for that in the 'prune_objects' task so that we don't + # wrongly delete their corresponding activities. # So when someone adds (or removes) a table, this test will fail. - # Either the table contains objects which can be referenced from the activities table - # => in that case the prune_objects job should be adapted so we don't delete activities who still have the referenced object. + # Either the table contains objects which can be referenced from the + # activities table + # => in that case the prune_objects job should be adapted so we don't + # delete activities who still have the referenced object. # Or it doesn't contain objects which can be referenced from the activities table # => in that case you can add/remove the table to/from this (sorted) list. @@ -463,7 +468,8 @@ test "it prunes orphaned activities with the --prune-orphaned-activities" do }) |> Repo.insert() - # The remote activities without existing reference, and only the remote activities without existing reference, are deleted + # The remote activities without existing reference, + # and only the remote activities without existing reference, are deleted # if, and only if, we provide the --prune-orphaned-activities option assert length(Repo.all(Activity)) == 5 Mix.Tasks.Pleroma.Database.run(["prune_objects"]) From 8743c6c640d395ff6d7d268df1e382ba0fb0ca96 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Tue, 28 May 2024 10:36:00 -0400 Subject: [PATCH 83/93] Dialyzer: The pattern can never match the type We will never pass :plain to query_with/4, so remove that match and change it to query_with/3 lib/pleroma/search/database_search.ex:127:pattern_match The pattern can never match the type. Pattern: _q, :rum, _search_query, :plain Type: %Ecto.Query{ :aliases => _, :assocs => _, :combinations => _, :distinct => _, :from => _, :group_bys => _, :havings => _, :joins => _, :limit => _, :lock => _, :offset => _, :order_bys => _, :prefix => _, :preloads => _, :select => _, :sources => _, :updates => _, :wheres => _, :windows => _, :with_ctes => _ }, :rum, _, :websearch --- lib/pleroma/search/database_search.ex | 36 +++------------------------ 1 file changed, 3 insertions(+), 33 deletions(-) diff --git a/lib/pleroma/search/database_search.ex b/lib/pleroma/search/database_search.ex index c6fe8a9bd..aef5d1e74 100644 --- a/lib/pleroma/search/database_search.ex +++ b/lib/pleroma/search/database_search.ex @@ -28,7 +28,7 @@ def search(user, search_query, options \\ []) do |> Activity.with_preloaded_object() |> Activity.restrict_deactivated_users() |> restrict_public(user) - |> query_with(index_type, search_query, :websearch) + |> query_with(index_type, search_query) |> maybe_restrict_local(user) |> maybe_restrict_author(author) |> maybe_restrict_blocked(user) @@ -88,25 +88,7 @@ defp restrict_public(q, _user) do ) end - defp query_with(q, :gin, search_query, :plain) do - %{rows: [[tsc]]} = - Ecto.Adapters.SQL.query!( - Pleroma.Repo, - "select current_setting('default_text_search_config')::regconfig::oid;" - ) - - from([a, o] in q, - where: - fragment( - "to_tsvector(?::oid::regconfig, ?->>'content') @@ plainto_tsquery(?)", - ^tsc, - o.data, - ^search_query - ) - ) - end - - defp query_with(q, :gin, search_query, :websearch) do + defp query_with(q, :gin, search_query) do %{rows: [[tsc]]} = Ecto.Adapters.SQL.query!( Pleroma.Repo, @@ -124,19 +106,7 @@ defp query_with(q, :gin, search_query, :websearch) do ) end - defp query_with(q, :rum, search_query, :plain) do - from([a, o] in q, - where: - fragment( - "? @@ plainto_tsquery(?)", - o.fts_content, - ^search_query - ), - order_by: [fragment("? <=> now()::date", o.inserted_at)] - ) - end - - defp query_with(q, :rum, search_query, :websearch) do + defp query_with(q, :rum, search_query) do from([a, o] in q, where: fragment( From 6551ca2db7a0907252bbc649c7d082b3edf92a93 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Tue, 28 May 2024 10:40:54 -0400 Subject: [PATCH 84/93] Dialyzer: overlapping_contract Wrong @spec name for remove_from_block/2 lib/pleroma/user.ex:2721:overlapping_contract Overloaded contract for Pleroma.User.add_to_block/2 has overlapping domains; such contracts are currently unsupported and are simply ignored. --- lib/pleroma/user.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/pleroma/user.ex b/lib/pleroma/user.ex index 6d6aa98b5..d94b68ce0 100644 --- a/lib/pleroma/user.ex +++ b/lib/pleroma/user.ex @@ -2727,7 +2727,7 @@ defp add_to_block(%User{} = user, %User{} = blocked) do end end - @spec add_to_block(User.t(), User.t()) :: + @spec remove_from_block(User.t(), User.t()) :: {:ok, UserRelationship.t()} | {:ok, nil} | {:error, Ecto.Changeset.t()} defp remove_from_block(%User{} = user, %User{} = blocked) do with {:ok, relationship} <- UserRelationship.delete_block(user, blocked) do From 6b6a2adb07c3b9a52cd0a5adf435a916088bb4d7 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Tue, 28 May 2024 10:49:43 -0400 Subject: [PATCH 85/93] Dialyzer: The function call will not succeed. :idna.encode/1 expects a charlist even though it will accept a binary string. That functionality is undocumented / not part of its typespec, so we should turn it into a charlist first. Also switch to using match?/2 lib/pleroma/user.ex:2056:call The function call will not succeed. :idna.encode(_host :: binary()) will never return since the success typing is: (string()) :: string() and the contract is (string()) :: string() --- lib/pleroma/user.ex | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/pleroma/user.ex b/lib/pleroma/user.ex index d94b68ce0..884c1f302 100644 --- a/lib/pleroma/user.ex +++ b/lib/pleroma/user.ex @@ -2053,7 +2053,8 @@ defp verify_field_link(field, profile_urls) do %{scheme: scheme, userinfo: nil, host: host} when not_empty_string(host) and scheme in ["http", "https"] <- URI.parse(value), - {:not_idn, true} <- {:not_idn, to_string(:idna.encode(host)) == host}, + {:not_idn, true} <- + {:not_idn, match?(^host, to_string(:idna.encode(to_charlist(host))))}, "me" <- Pleroma.Web.RelMe.maybe_put_rel_me(value, profile_urls) do CommonUtils.to_masto_date(NaiveDateTime.utc_now()) else From f663135724fac2ef12107369f94d8e030bc1b4a5 Mon Sep 17 00:00:00 2001 From: Lain Soykaf Date: Tue, 28 May 2024 18:54:36 +0400 Subject: [PATCH 86/93] DatabaseTest: Fix test. --- test/mix/tasks/pleroma/database_test.exs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/test/mix/tasks/pleroma/database_test.exs b/test/mix/tasks/pleroma/database_test.exs index 5f5ab5195..d773038cb 100644 --- a/test/mix/tasks/pleroma/database_test.exs +++ b/test/mix/tasks/pleroma/database_test.exs @@ -375,6 +375,7 @@ test "We don't have unexpected tables which may contain objects that are referen ["announcements"], ["apps"], ["backups"], + ["bookmark_folders"], ["bookmarks"], ["chat_message_references"], ["chats"], @@ -405,6 +406,8 @@ test "We don't have unexpected tables which may contain objects that are referen ["push_subscriptions"], ["registrations"], ["report_notes"], + ["rich_media_card"], + ["rules"], ["scheduled_activities"], ["schema_migrations"], ["thread_mutes"], From 79c418bcb7534ae3645ee0b7728f8e65a031f7a4 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Tue, 28 May 2024 11:07:28 -0400 Subject: [PATCH 87/93] Dialyzer: fix invalid @spec --- lib/pleroma/web/o_auth/token.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/pleroma/web/o_auth/token.ex b/lib/pleroma/web/o_auth/token.ex index a5ad2e909..9b1198b42 100644 --- a/lib/pleroma/web/o_auth/token.ex +++ b/lib/pleroma/web/o_auth/token.ex @@ -96,7 +96,7 @@ defp put_valid_until(changeset, attrs) do |> validate_required([:valid_until]) end - @spec create(App.t(), User.t(), map()) :: {:ok, Token} | {:error, Ecto.Changeset.t()} + @spec create(App.t(), User.t(), map()) :: {:ok, Token.t()} | {:error, Ecto.Changeset.t()} def create(%App{} = app, %User{} = user, attrs \\ %{}) do with {:ok, token} <- do_create(app, user, attrs) do if Pleroma.Config.get([:oauth2, :clean_expired_tokens]) do From 14b4bd69a83846b3c117624851b96b9fc30528bf Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Wed, 29 May 2024 10:44:34 -0400 Subject: [PATCH 88/93] Add additional flags to the Pleroma.Search.Indexer Mix task --- changelog.d/mix-indexer.add | 1 + lib/mix/tasks/pleroma/search/indexer.ex | 9 ++++++--- 2 files changed, 7 insertions(+), 3 deletions(-) create mode 100644 changelog.d/mix-indexer.add diff --git a/changelog.d/mix-indexer.add b/changelog.d/mix-indexer.add new file mode 100644 index 000000000..6effb959b --- /dev/null +++ b/changelog.d/mix-indexer.add @@ -0,0 +1 @@ +Permit passing --chunk and --step values to the Pleroma.Search.Indexer Mix task diff --git a/lib/mix/tasks/pleroma/search/indexer.ex b/lib/mix/tasks/pleroma/search/indexer.ex index 81a9fced6..2a52472f9 100644 --- a/lib/mix/tasks/pleroma/search/indexer.ex +++ b/lib/mix/tasks/pleroma/search/indexer.ex @@ -33,15 +33,18 @@ def run(["index" | options]) do OptionParser.parse( options, strict: [ - limit: :integer + chunk: :integer, + limit: :integer, + step: :integer ] ) start_pleroma() + chunk_size = Keyword.get(options, :chunk, 100) limit = Keyword.get(options, :limit, 100_000) + per_step = Keyword.get(options, :step, 1000) - per_step = 1000 chunks = max(div(limit, per_step), 1) 1..chunks @@ -65,7 +68,7 @@ def run(["index" | options]) do IO.puts("Got #{length(ids)} activities, adding to indexer") ids - |> Enum.chunk_every(100) + |> Enum.chunk_every(chunk_size) |> Enum.each(fn chunk -> IO.puts("Adding #{length(chunk)} activities to indexing queue") From 36b440d9bebb1ddba817cd9bd8f158ceadbe8aa2 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Wed, 29 May 2024 21:58:39 -0400 Subject: [PATCH 89/93] Update Bandit to 1.5.2 Lots of fixes, also requires Websock Adapter update due to internal module changes in Bandit 1.4.0. --- changelog.d/bandit_update_1.5.2.change | 1 + mix.exs | 3 ++- mix.lock | 16 ++++++++-------- 3 files changed, 11 insertions(+), 9 deletions(-) create mode 100644 changelog.d/bandit_update_1.5.2.change diff --git a/changelog.d/bandit_update_1.5.2.change b/changelog.d/bandit_update_1.5.2.change new file mode 100644 index 000000000..c4aae1636 --- /dev/null +++ b/changelog.d/bandit_update_1.5.2.change @@ -0,0 +1 @@ +Update Bandit to 1.5.2 diff --git a/mix.exs b/mix.exs index fe50139ef..a44daab8b 100644 --- a/mix.exs +++ b/mix.exs @@ -188,7 +188,8 @@ defp deps do {:exile, git: "https://github.com/akash-akya/exile.git", ref: "be87c33b02a7c3c5d22d2ece01fbd462355b28ef"}, - {:bandit, "~> 1.2"}, + {:bandit, "~> 1.5.2"}, + {:websock_adapter, "~> 0.5.6"}, ## dev & test {:ex_doc, "~> 0.22", only: :dev, runtime: false}, diff --git a/mix.lock b/mix.lock index 86545adcf..a55ad0126 100644 --- a/mix.lock +++ b/mix.lock @@ -1,6 +1,6 @@ %{ "accept": {:hex, :accept, "0.3.5", "b33b127abca7cc948bbe6caa4c263369abf1347cfa9d8e699c6d214660f10cd1", [:rebar3], [], "hexpm", "11b18c220bcc2eab63b5470c038ef10eb6783bcb1fcdb11aa4137defa5ac1bb8"}, - "bandit": {:hex, :bandit, "1.2.1", "aa485b4ac175065b8e0fb5864ddd5dd7b50d52336b36f61c82f484c3718b3d15", [:mix], [{:hpax, "~> 0.1.1", [hex: :hpax, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:thousand_island, "~> 1.0", [hex: :thousand_island, repo: "hexpm", optional: false]}, {:websock, "~> 0.5", [hex: :websock, repo: "hexpm", optional: false]}], "hexpm", "27393e590a407f1b7d51c5fee4737f139fe224a30449ce25061eac70f763896b"}, + "bandit": {:hex, :bandit, "1.5.2", "ed0a41c43a9e529c670d0fd48371db4027e7b80d43b1942893e17deb8bed0540", [:mix], [{:hpax, "~> 0.1.1", [hex: :hpax, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:thousand_island, "~> 1.0", [hex: :thousand_island, repo: "hexpm", optional: false]}, {:websock, "~> 0.5", [hex: :websock, repo: "hexpm", optional: false]}], "hexpm", "35ddbdce7e8a2a3c6b5093f7299d70832a43ed2f4a1852885a61d334cab1b4ad"}, "base62": {:hex, :base62, "1.2.2", "85c6627eb609317b70f555294045895ffaaeb1758666ab9ef9ca38865b11e629", [:mix], [{:custom_base, "~> 0.2.1", [hex: :custom_base, repo: "hexpm", optional: false]}], "hexpm", "d41336bda8eaa5be197f1e4592400513ee60518e5b9f4dcf38f4b4dae6f377bb"}, "bbcode_pleroma": {:hex, :bbcode_pleroma, "0.2.0", "d36f5bca6e2f62261c45be30fa9b92725c0655ad45c99025cb1c3e28e25803ef", [:mix], [{:nimble_parsec, "~> 0.5", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "19851074419a5fedb4ef49e1f01b30df504bb5dbb6d6adfc135238063bebd1c3"}, "bcrypt_elixir": {:hex, :bcrypt_elixir, "2.3.1", "5114d780459a04f2b4aeef52307de23de961b69e13a5cd98a911e39fda13f420", [:make, :mix], [{:comeonin, "~> 5.3", [hex: :comeonin, repo: "hexpm", optional: false]}, {:elixir_make, "~> 0.6", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "42182d5f46764def15bf9af83739e3bf4ad22661b1c34fc3e88558efced07279"}, @@ -19,9 +19,9 @@ "connection": {:hex, :connection, "1.1.0", "ff2a49c4b75b6fb3e674bfc5536451607270aac754ffd1bdfe175abe4a6d7a68", [:mix], [], "hexpm", "722c1eb0a418fbe91ba7bd59a47e28008a189d47e37e0e7bb85585a016b2869c"}, "cors_plug": {:hex, :cors_plug, "2.0.3", "316f806d10316e6d10f09473f19052d20ba0a0ce2a1d910ddf57d663dac402ae", [:mix], [{:plug, "~> 1.8", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "ee4ae1418e6ce117fc42c2ba3e6cbdca4e95ecd2fe59a05ec6884ca16d469aea"}, "covertool": {:hex, :covertool, "2.0.6", "4a291b4e3449025b0595d8f44c8d7635d4f48f033be2ce88d22a329f36f94a91", [:rebar3], [], "hexpm", "5db3fcd82180d8ea4ad857d4d1ab21a8d31b5aee0d60d2f6c0f9e25a411d1e21"}, - "cowboy": {:hex, :cowboy, "2.10.0", "ff9ffeff91dae4ae270dd975642997afe2a1179d94b1887863e43f681a203e26", [:make, :rebar3], [{:cowlib, "2.12.1", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "3afdccb7183cc6f143cb14d3cf51fa00e53db9ec80cdcd525482f5e99bc41d6b"}, + "cowboy": {:hex, :cowboy, "2.12.0", "f276d521a1ff88b2b9b4c54d0e753da6c66dd7be6c9fca3d9418b561828a3731", [:make, :rebar3], [{:cowlib, "2.13.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "8a7abe6d183372ceb21caa2709bec928ab2b72e18a3911aa1771639bef82651e"}, "cowboy_telemetry": {:hex, :cowboy_telemetry, "0.4.0", "f239f68b588efa7707abce16a84d0d2acf3a0f50571f8bb7f56a15865aae820c", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7d98bac1ee4565d31b62d59f8823dfd8356a169e7fcbb83831b8a5397404c9de"}, - "cowlib": {:hex, :cowlib, "2.12.1", "a9fa9a625f1d2025fe6b462cb865881329b5caff8f1854d1cbc9f9533f00e1e1", [:make, :rebar3], [], "hexpm", "163b73f6367a7341b33c794c4e88e7dbfe6498ac42dcd69ef44c5bc5507c8db0"}, + "cowlib": {:hex, :cowlib, "2.13.0", "db8f7505d8332d98ef50a3ef34b34c1afddec7506e4ee4dd4a3a266285d282ca", [:make, :rebar3], [], "hexpm", "e1e1284dc3fc030a64b1ad0d8382ae7e99da46c3246b815318a4b848873800a4"}, "credo": {:hex, :credo, "1.7.3", "05bb11eaf2f2b8db370ecaa6a6bda2ec49b2acd5e0418bc106b73b07128c0436", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "35ea675a094c934c22fb1dca3696f3c31f2728ae6ef5a53b5d648c11180a4535"}, "crontab": {:hex, :crontab, "1.1.8", "2ce0e74777dfcadb28a1debbea707e58b879e6aa0ffbf9c9bb540887bce43617", [:mix], [{:ecto, "~> 1.0 or ~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"}, "custom_base": {:hex, :custom_base, "0.2.1", "4a832a42ea0552299d81652aa0b1f775d462175293e99dfbe4d7dbaab785a706", [:mix], [], "hexpm", "8df019facc5ec9603e94f7270f1ac73ddf339f56ade76a721eaa57c1493ba463"}, @@ -102,9 +102,9 @@ "phoenix_swoosh": {:hex, :phoenix_swoosh, "1.2.1", "b74ccaa8046fbc388a62134360ee7d9742d5a8ae74063f34eb050279de7a99e1", [:mix], [{:finch, "~> 0.8", [hex: :finch, repo: "hexpm", optional: true]}, {:hackney, "~> 1.10", [hex: :hackney, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.6", [hex: :phoenix, repo: "hexpm", optional: true]}, {:phoenix_html, "~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_view, "~> 1.0 or ~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: false]}, {:swoosh, "~> 1.5", [hex: :swoosh, repo: "hexpm", optional: false]}], "hexpm", "4000eeba3f9d7d1a6bf56d2bd56733d5cadf41a7f0d8ffe5bb67e7d667e204a2"}, "phoenix_template": {:hex, :phoenix_template, "1.0.4", "e2092c132f3b5e5b2d49c96695342eb36d0ed514c5b252a77048d5969330d639", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "2c0c81f0e5c6753faf5cca2f229c9709919aba34fab866d3bc05060c9c444206"}, "phoenix_view": {:hex, :phoenix_view, "2.0.3", "4d32c4817fce933693741deeb99ef1392619f942633dde834a5163124813aad3", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}], "hexpm", "cd34049af41be2c627df99cd4eaa71fc52a328c0c3d8e7d4aa28f880c30e7f64"}, - "plug": {:hex, :plug, "1.15.3", "712976f504418f6dff0a3e554c40d705a9bcf89a7ccef92fc6a5ef8f16a30a97", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "cc4365a3c010a56af402e0809208873d113e9c38c401cabd88027ef4f5c01fd2"}, - "plug_cowboy": {:hex, :plug_cowboy, "2.6.2", "753611b23b29231fb916b0cdd96028084b12aff57bfd7b71781bd04b1dbeb5c9", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "951ed2433df22f4c97b85fdb145d4cee561f36b74854d64c06d896d7cd2921a7"}, - "plug_crypto": {:hex, :plug_crypto, "2.0.0", "77515cc10af06645abbfb5e6ad7a3e9714f805ae118fa1a70205f80d2d70fe73", [:mix], [], "hexpm", "53695bae57cc4e54566d993eb01074e4d894b65a3766f1c43e2c61a1b0f45ea9"}, + "plug": {:hex, :plug, "1.16.0", "1d07d50cb9bb05097fdf187b31cf087c7297aafc3fed8299aac79c128a707e47", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "cbf53aa1f5c4d758a7559c0bd6d59e286c2be0c6a1fac8cc3eee2f638243b93e"}, + "plug_cowboy": {:hex, :plug_cowboy, "2.7.1", "87677ffe3b765bc96a89be7960f81703223fe2e21efa42c125fcd0127dd9d6b2", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "02dbd5f9ab571b864ae39418db7811618506256f6d13b4a45037e5fe78dc5de3"}, + "plug_crypto": {:hex, :plug_crypto, "2.1.0", "f44309c2b06d249c27c8d3f65cfe08158ade08418cf540fd4f72d4d6863abb7b", [:mix], [], "hexpm", "131216a4b030b8f8ce0f26038bc4421ae60e4bb95c5cf5395e1421437824c4fa"}, "plug_static_index_html": {:hex, :plug_static_index_html, "1.0.0", "840123d4d3975585133485ea86af73cb2600afd7f2a976f9f5fd8b3808e636a0", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "79fd4fcf34d110605c26560cbae8f23c603ec4158c08298bd4360fdea90bb5cf"}, "poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm", "fec8660eb7733ee4117b85f55799fd3833eb769a6df71ccf8903e8dc5447cfce"}, "poolboy": {:hex, :poolboy, "1.5.2", "392b007a1693a64540cead79830443abf5762f5d30cf50bc95cb2c1aaafa006b", [:rebar3], [], "hexpm", "dad79704ce5440f3d5a3681c8590b9dc25d1a561e8f5a9c995281012860901e3"}, @@ -134,7 +134,7 @@ "telemetry_metrics_prometheus_core": {:hex, :telemetry_metrics_prometheus_core, "1.2.0", "b583c3f18508f5c5561b674d16cf5d9afd2ea3c04505b7d92baaeac93c1b8260", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}], "hexpm", "9cba950e1c4733468efbe3f821841f34ac05d28e7af7798622f88ecdbbe63ea3"}, "telemetry_poller": {:hex, :telemetry_poller, "1.0.0", "db91bb424e07f2bb6e73926fcafbfcbcb295f0193e0a00e825e589a0a47e8453", [:rebar3], [{:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "b3a24eafd66c3f42da30fc3ca7dda1e9d546c12250a2d60d7b81d264fbec4f6e"}, "tesla": {:hex, :tesla, "1.8.0", "d511a4f5c5e42538d97eef7c40ec4f3e44effdc5068206f42ed859e09e51d1fd", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:finch, "~> 0.13", [hex: :finch, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, ">= 1.0.0", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "4.4.2", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:msgpax, "~> 2.3", [hex: :msgpax, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "10501f360cd926a309501287470372af1a6e1cbed0f43949203a4c13300bc79f"}, - "thousand_island": {:hex, :thousand_island, "1.3.2", "bc27f9afba6e1a676dd36507d42e429935a142cf5ee69b8e3f90bff1383943cd", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "0e085b93012cd1057b378fce40cbfbf381ff6d957a382bfdd5eca1a98eec2535"}, + "thousand_island": {:hex, :thousand_island, "1.3.5", "6022b6338f1635b3d32406ff98d68b843ba73b3aa95cfc27154223244f3a6ca5", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "2be6954916fdfe4756af3239fb6b6d75d0b8063b5df03ba76fd8a4c87849e180"}, "timex": {:hex, :timex, "3.7.7", "3ed093cae596a410759104d878ad7b38e78b7c2151c6190340835515d4a46b8a", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "0ec4b09f25fe311321f9fc04144a7e3affe48eb29481d7a5583849b6c4dfa0a7"}, "toml": {:hex, :toml, "0.7.0", "fbcd773caa937d0c7a02c301a1feea25612720ac3fa1ccb8bfd9d30d822911de", [:mix], [], "hexpm", "0690246a2478c1defd100b0c9b89b4ea280a22be9a7b313a8a058a2408a2fa70"}, "trailing_format_plug": {:hex, :trailing_format_plug, "0.0.7", "64b877f912cf7273bed03379936df39894149e35137ac9509117e59866e10e45", [:mix], [{:plug, "> 0.12.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "bd4fde4c15f3e993a999e019d64347489b91b7a9096af68b2bdadd192afa693f"}, @@ -145,6 +145,6 @@ "vix": {:hex, :vix, "0.26.0", "027f10b6969b759318be84bd0bd8c88af877445e4e41cf96a0460392cea5399c", [:make, :mix], [{:castore, "~> 1.0 or ~> 0.1", [hex: :castore, repo: "hexpm", optional: false]}, {:cc_precompiler, "~> 0.2 or ~> 0.1.4", [hex: :cc_precompiler, repo: "hexpm", optional: false]}, {:elixir_make, "~> 0.8 or ~> 0.7.3", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:kino, "~> 0.7", [hex: :kino, repo: "hexpm", optional: true]}], "hexpm", "71b0a79ae7f199cacfc8e679b0e4ba25ee47dc02e182c5b9097efb29fbe14efd"}, "web_push_encryption": {:hex, :web_push_encryption, "0.3.1", "76d0e7375142dfee67391e7690e89f92578889cbcf2879377900b5620ee4708d", [:mix], [{:httpoison, "~> 1.0", [hex: :httpoison, repo: "hexpm", optional: false]}, {:jose, "~> 1.11.1", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm", "4f82b2e57622fb9337559058e8797cb0df7e7c9790793bdc4e40bc895f70e2a2"}, "websock": {:hex, :websock, "0.5.3", "2f69a6ebe810328555b6fe5c831a851f485e303a7c8ce6c5f675abeb20ebdadc", [:mix], [], "hexpm", "6105453d7fac22c712ad66fab1d45abdf049868f253cf719b625151460b8b453"}, - "websock_adapter": {:hex, :websock_adapter, "0.5.5", "9dfeee8269b27e958a65b3e235b7e447769f66b5b5925385f5a569269164a210", [:mix], [{:bandit, ">= 0.6.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.6", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:websock, "~> 0.5", [hex: :websock, repo: "hexpm", optional: false]}], "hexpm", "4b977ba4a01918acbf77045ff88de7f6972c2a009213c515a445c48f224ffce9"}, + "websock_adapter": {:hex, :websock_adapter, "0.5.6", "0437fe56e093fd4ac422de33bf8fc89f7bc1416a3f2d732d8b2c8fd54792fe60", [:mix], [{:bandit, ">= 0.6.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.6", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:websock, "~> 0.5", [hex: :websock, repo: "hexpm", optional: false]}], "hexpm", "e04378d26b0af627817ae84c92083b7e97aca3121196679b73c73b99d0d133ea"}, "websockex": {:hex, :websockex, "0.4.3", "92b7905769c79c6480c02daacaca2ddd49de936d912976a4d3c923723b647bf0", [:mix], [], "hexpm", "95f2e7072b85a3a4cc385602d42115b73ce0b74a9121d0d6dbbf557645ac53e4"}, } From b5fcb82bffd3f31cf1318c1504fcb97e56b892cd Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Thu, 30 May 2024 10:47:51 -0400 Subject: [PATCH 90/93] Test for missing FK indexes --- test/fixtures/unindexed_fk.sql | 27 +++++++++++++++++++++++++++ test/pleroma/schema_test.exs | 17 +++++++++++++++++ 2 files changed, 44 insertions(+) create mode 100644 test/fixtures/unindexed_fk.sql create mode 100644 test/pleroma/schema_test.exs diff --git a/test/fixtures/unindexed_fk.sql b/test/fixtures/unindexed_fk.sql new file mode 100644 index 000000000..3b71679cf --- /dev/null +++ b/test/fixtures/unindexed_fk.sql @@ -0,0 +1,27 @@ +-- Unindexed FK -- Missing indexes - For CI + +WITH y AS ( +SELECT +pg_catalog.format('%I', c1.relname) AS referencing_tbl, +pg_catalog.quote_ident(a1.attname) AS referencing_column, +(SELECT pg_get_expr(indpred, indrelid) FROM pg_catalog.pg_index WHERE indrelid = t.conrelid AND indkey[0] = t.conkey[1] AND indpred IS NOT NULL LIMIT 1) partial_statement +FROM pg_catalog.pg_constraint t +JOIN pg_catalog.pg_attribute a1 ON a1.attrelid = t.conrelid AND a1.attnum = t.conkey[1] +JOIN pg_catalog.pg_class c1 ON c1.oid = t.conrelid +JOIN pg_catalog.pg_namespace n1 ON n1.oid = c1.relnamespace +JOIN pg_catalog.pg_class c2 ON c2.oid = t.confrelid +JOIN pg_catalog.pg_namespace n2 ON n2.oid = c2.relnamespace +JOIN pg_catalog.pg_attribute a2 ON a2.attrelid = t.confrelid AND a2.attnum = t.confkey[1] +WHERE t.contype = 'f' +AND NOT EXISTS ( +SELECT 1 +FROM pg_catalog.pg_index i +WHERE i.indrelid = t.conrelid +AND i.indkey[0] = t.conkey[1] +AND indpred IS NULL +) +) +SELECT referencing_tbl || '.' || referencing_column as "column" +FROM y +WHERE (partial_statement IS NULL OR partial_statement <> ('(' || referencing_column || ' IS NOT NULL)')) +ORDER BY 1; \ No newline at end of file diff --git a/test/pleroma/schema_test.exs b/test/pleroma/schema_test.exs new file mode 100644 index 000000000..9bddd2031 --- /dev/null +++ b/test/pleroma/schema_test.exs @@ -0,0 +1,17 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2022 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.SchemaTest do + use Pleroma.DataCase, async: true + + alias Pleroma.Repo + + test "No unindexed foreign keys" do + query = File.read!("test/fixtures/unindexed_fk.sql") + + {:ok, result} = Repo.query(query) + + assert Enum.empty?(result.rows) + end +end From c20ac6d1adc224232422640d8bc11a80f5eff350 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Wed, 29 May 2024 21:27:35 -0400 Subject: [PATCH 91/93] Add missing foreign key indexes --- ...0240530011739_add_missing_foreign_keys.exs | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 priv/repo/migrations/20240530011739_add_missing_foreign_keys.exs diff --git a/priv/repo/migrations/20240530011739_add_missing_foreign_keys.exs b/priv/repo/migrations/20240530011739_add_missing_foreign_keys.exs new file mode 100644 index 000000000..158f9701b --- /dev/null +++ b/priv/repo/migrations/20240530011739_add_missing_foreign_keys.exs @@ -0,0 +1,20 @@ +defmodule Pleroma.Repo.Migrations.AddMissingForeignKeys do + use Ecto.Migration + + def change do + create_if_not_exists(index(:announcement_read_relationships, :announcement_id)) + create_if_not_exists(index(:bookmarks, :activity_id)) + create_if_not_exists(index(:bookmarks, :folder_id)) + create_if_not_exists(index(:chats, :recipient)) + create_if_not_exists(index(:mfa_tokens, :authorization_id)) + create_if_not_exists(index(:mfa_tokens, :user_id)) + create_if_not_exists(index(:notifications, :activity_id)) + create_if_not_exists(index(:oauth_authorizations, :app_id)) + create_if_not_exists(index(:oauth_authorizations, :user_id)) + create_if_not_exists(index(:password_reset_tokens, :user_id)) + create_if_not_exists(index(:push_subscriptions, :token_id)) + create_if_not_exists(index(:report_notes, :activity_id)) + create_if_not_exists(index(:report_notes, :user_id)) + create_if_not_exists(index(:user_notes, :target_id)) + end +end From 5f6e477ecaa941a79b22599aca169164b7241bcf Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Thu, 30 May 2024 10:51:50 -0400 Subject: [PATCH 92/93] Missing FKs changelog --- changelog.d/missing-fks.add | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/missing-fks.add diff --git a/changelog.d/missing-fks.add b/changelog.d/missing-fks.add new file mode 100644 index 000000000..cf74de03b --- /dev/null +++ b/changelog.d/missing-fks.add @@ -0,0 +1 @@ +Add missing indexes on foreign key relationships From f5065eaf99a76695df26966055f90368df7043f3 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Thu, 30 May 2024 11:09:42 -0400 Subject: [PATCH 93/93] Fix Logger.warn deprecation error on OTP25 --- changelog.d/mrf-nsfw-otp25.skip | 1 + lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/mrf-nsfw-otp25.skip diff --git a/changelog.d/mrf-nsfw-otp25.skip b/changelog.d/mrf-nsfw-otp25.skip new file mode 100644 index 000000000..e804f19a0 --- /dev/null +++ b/changelog.d/mrf-nsfw-otp25.skip @@ -0,0 +1 @@ +noop diff --git a/lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex b/lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex index f7863039b..3d1c273b9 100644 --- a/lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex @@ -64,7 +64,7 @@ def parse_url(url) do Jason.decode(body) else error -> - Logger.warn(""" + Logger.warning(""" [NsfwApiPolicy]: The API server failed. Skipping. #{inspect(error)} """)