Merge branch 'develop' of git.pleroma.social:pleroma/pleroma into auth-fetch-exception

This commit is contained in:
Lain Soykaf 2024-05-27 23:09:17 +04:00
commit 687ac4a850
65 changed files with 1490 additions and 189 deletions

View File

@ -0,0 +1 @@
HTTP Security: By default, don't allow unsafe-eval. The setting needs to be changed to allow Flash emulation.

0
changelog.d/3907.skip Normal file
View File

View File

@ -0,0 +1 @@
Add DNSRBL MRF

View File

@ -0,0 +1 @@
Add Anti-mention Spam MRF backported from Rebased

View File

@ -0,0 +1 @@
Logger metadata is now attached to some logs to help with troubleshooting and analysis

View File

@ -0,0 +1 @@
Oban queues have refactored to simplify the queue design

View File

@ -0,0 +1 @@
Update the documentation for configuring Prometheus metrics.

View File

@ -0,0 +1 @@
PromEx documentation

View File

@ -0,0 +1 @@
Add Qdrant/OpenAI embedding search

View File

@ -0,0 +1 @@
Display reposted replies with exclude_replies: true

View File

@ -0,0 +1 @@
Support honk-style attachment summaries as alt-text.

View File

@ -135,13 +135,13 @@
config :logger, :console, config :logger, :console,
level: :debug, level: :debug,
format: "\n$time $metadata[$level] $message\n", format: "\n$time $metadata[$level] $message\n",
metadata: [:request_id] metadata: [:actor, :path, :type, :user]
config :logger, :ex_syslogger, config :logger, :ex_syslogger,
level: :debug, level: :debug,
ident: "pleroma", ident: "pleroma",
format: "$metadata[$level] $message", format: "$metadata[$level] $message",
metadata: [:request_id] metadata: [:actor, :path, :type, :user]
config :mime, :types, %{ config :mime, :types, %{
"application/xml" => ["xml"], "application/xml" => ["xml"],
@ -410,6 +410,11 @@
accept: [], accept: [],
reject: [] reject: []
config :pleroma, :mrf_dnsrbl,
nameserver: "127.0.0.1",
port: 53,
zone: "bl.pleroma.com"
# threshold of 7 days # threshold of 7 days
config :pleroma, :mrf_object_age, config :pleroma, :mrf_object_age,
threshold: 604_800, threshold: 604_800,
@ -430,6 +435,8 @@
mention_parent: true, mention_parent: true,
mention_quoted: true mention_quoted: true
config :pleroma, :mrf_antimentionspam, user_age_limit: 30_000
config :pleroma, :rich_media, config :pleroma, :rich_media,
enabled: true, enabled: true,
ignore_hosts: [], ignore_hosts: [],
@ -512,7 +519,8 @@
sts: false, sts: false,
sts_max_age: 31_536_000, sts_max_age: 31_536_000,
ct_max_age: 2_592_000, ct_max_age: 2_592_000,
referrer_policy: "same-origin" referrer_policy: "same-origin",
allow_unsafe_eval: false
config :cors_plug, config :cors_plug,
max_age: 86_400, max_age: 86_400,
@ -574,24 +582,14 @@
log: false, log: false,
queues: [ queues: [
activity_expiration: 10, activity_expiration: 10,
token_expiration: 5,
filter_expiration: 1,
backup: 1,
federator_incoming: 5, federator_incoming: 5,
federator_outgoing: 5, federator_outgoing: 5,
ingestion_queue: 50, ingestion_queue: 50,
web_push: 50, web_push: 50,
mailer: 10,
transmogrifier: 20, transmogrifier: 20,
scheduled_activities: 10,
poll_notifications: 10,
background: 5, background: 5,
remote_fetcher: 2,
attachments_cleanup: 1,
new_users_digest: 1,
mute_expire: 5,
search_indexing: [limit: 10, paused: true], search_indexing: [limit: 10, paused: true],
rich_media_expiration: 2 slow: 1
], ],
plugins: [Oban.Plugins.Pruner], plugins: [Oban.Plugins.Pruner],
crontab: [ crontab: [
@ -926,6 +924,19 @@
config :pleroma, Pleroma.Uploaders.Uploader, timeout: 30_000 config :pleroma, Pleroma.Uploaders.Uploader, timeout: 30_000
config :pleroma, Pleroma.Search.QdrantSearch,
qdrant_url: "http://127.0.0.1:6333/",
qdrant_api_key: "",
openai_url: "http://127.0.0.1:11345",
# The healthcheck url has to be set to nil when used with the real openai
# API, as it doesn't have a healthcheck endpoint.
openai_healthcheck_url: "http://127.0.0.1:11345/health",
openai_model: "snowflake/snowflake-arctic-embed-xs",
openai_api_key: "",
qdrant_index_configuration: %{
vectors: %{size: 384, distance: "Cosine"}
}
# Import environment specific config. This must remain at the bottom # Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above. # of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs" import_config "#{Mix.env()}.exs"

View File

@ -35,8 +35,8 @@
# configured to run both http and https servers on # configured to run both http and https servers on
# different ports. # different ports.
# Do not include metadata nor timestamps in development logs # Do not include timestamps in development logs
config :logger, :console, format: "[$level] $message\n" config :logger, :console, format: "$metadata[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such # Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive. # in production as building large stacktraces may be expensive.

View File

@ -154,6 +154,7 @@
config :pleroma, Pleroma.ScheduledActivity, config_impl: Pleroma.UnstubbedConfigMock config :pleroma, Pleroma.ScheduledActivity, config_impl: Pleroma.UnstubbedConfigMock
config :pleroma, Pleroma.Web.RichMedia.Helpers, config_impl: Pleroma.StaticStubbedConfigMock config :pleroma, Pleroma.Web.RichMedia.Helpers, config_impl: Pleroma.StaticStubbedConfigMock
config :pleroma, Pleroma.Uploaders.IPFS, config_impl: Pleroma.UnstubbedConfigMock config :pleroma, Pleroma.Uploaders.IPFS, config_impl: Pleroma.UnstubbedConfigMock
config :pleroma, Pleroma.Web.Plugs.HTTPSecurityPlug, config_impl: Pleroma.StaticStubbedConfigMock
peer_module = peer_module =
if String.to_integer(System.otp_release()) >= 25 do if String.to_integer(System.otp_release()) >= 25 do

View File

@ -473,6 +473,7 @@ This will make Pleroma listen on `127.0.0.1` port `8080` and generate urls start
* ``ct_max_age``: The maximum age for the `Expect-CT` header if sent. * ``ct_max_age``: The maximum age for the `Expect-CT` header if sent.
* ``referrer_policy``: The referrer policy to use, either `"same-origin"` or `"no-referrer"`. * ``referrer_policy``: The referrer policy to use, either `"same-origin"` or `"no-referrer"`.
* ``report_uri``: Adds the specified url to `report-uri` and `report-to` group in CSP header. * ``report_uri``: Adds the specified url to `report-uri` and `report-to` group in CSP header.
* `allow_unsafe_eval`: Adds `wasm-unsafe-eval` to the CSP header. Needed for some non-essential frontend features like Flash emulation.
### Pleroma.Web.Plugs.RemoteIp ### Pleroma.Web.Plugs.RemoteIp

View File

@ -10,6 +10,30 @@ To use built-in search that has no external dependencies, set the search module
While it has no external dependencies, it has problems with performance and relevancy. While it has no external dependencies, it has problems with performance and relevancy.
## QdrantSearch
This uses the vector search engine [Qdrant](https://qdrant.tech) to search the posts in a vector space. This needs a way to generate embeddings and uses the [OpenAI API](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings). This is implemented by several project besides OpenAI itself, including the python-based fastembed-server found in `supplemental/search/fastembed-api`.
The default settings will support a setup where both the fastembed server and Qdrant run on the same system as pleroma. To use it, set the search provider and run the fastembed server, see the README in `supplemental/search/fastembed-api`:
> config :pleroma, Pleroma.Search, module: Pleroma.Search.QdrantSearch
Then, start the Qdrant server, see [here](https://qdrant.tech/documentation/quick-start/) for instructions.
You will also need to create the Qdrant index once by running `mix pleroma.search.indexer create_index`. Running `mix pleroma.search.indexer index` will retroactively index the last 100_000 activities.
### Indexing and model options
To see the available configuration options, check out the QdrantSearch section in `config/config.exs`.
The default indexing option work for the default model (`snowflake-arctic-embed-xs`). To optimize for a low memory footprint, adjust the index configuration as described in the [Qdrant docs](https://qdrant.tech/documentation/guides/optimize/). See also [this blog post](https://qdrant.tech/articles/memory-consumption/) that goes into detail.
Different embedding models will need different vector size settings. You can see a list of the models supported by the fastembed server [here](https://qdrant.github.io/fastembed/examples/Supported_Models), including their vector dimensions. These vector dimensions need to be set in the `qdrant_index_configuration`.
E.g, If you want to use `sentence-transformers/all-MiniLM-L6-v2` as a model, you will not need to adjust things, because it and `snowflake-arctic-embed-xs` are both 384 dimensional models. If you want to use `snowflake/snowflake-arctic-embed-l`, you will need to adjust the `size` parameter in the `qdrant_index_configuration` to 1024, as it has a dimension of 1024.
When using a different model, you will need do drop the index and recreate it (`mix pleroma.search.indexer drop_index` and `mix pleroma.search.indexer create_index`), as the different embeddings are not compatible with each other.
## Meilisearch ## Meilisearch
Note that it's quite a bit more memory hungry than PostgreSQL (around 4-5G for ~1.2 million Note that it's quite a bit more memory hungry than PostgreSQL (around 4-5G for ~1.2 million

View File

@ -1,44 +1,47 @@
# Prometheus Metrics # Prometheus / OpenTelemetry Metrics
Pleroma includes support for exporting metrics via the [prometheus_ex](https://github.com/deadtrickster/prometheus.ex) library. Pleroma includes support for exporting metrics via the [prom_ex](https://github.com/akoutmos/prom_ex) library.
The metrics are exposed by a dedicated webserver/port to improve privacy and security.
Config example: Config example:
``` ```
config :prometheus, Pleroma.Web.Endpoint.MetricsExporter, config :pleroma, Pleroma.PromEx,
enabled: true, disabled: false,
auth: {:basic, "myusername", "mypassword"}, manual_metrics_start_delay: :no_delay,
ip_whitelist: ["127.0.0.1"], drop_metrics_groups: [],
path: "/api/pleroma/app_metrics", grafana: [
format: :text host: System.get_env("GRAFANA_HOST", "http://localhost:3000"),
``` auth_token: System.get_env("GRAFANA_TOKEN"),
upload_dashboards_on_start: false,
* `enabled` (Pleroma extension) enables the endpoint folder_name: "BEAM",
* `ip_whitelist` (Pleroma extension) could be used to restrict access only to specified IPs annotate_app_lifecycle: true
* `auth` sets the authentication (`false` for no auth; configurable to HTTP Basic Auth, see [prometheus-plugs](https://github.com/deadtrickster/prometheus-plugs#exporting) documentation) ],
* `format` sets the output format (`:text` or `:protobuf`) metrics_server: [
* `path` sets the path to app metrics page port: 4021,
path: "/metrics",
protocol: :http,
## `/api/pleroma/app_metrics` pool_size: 5,
cowboy_opts: [],
### Exports Prometheus application metrics auth_strategy: :none
],
* Method: `GET` datasource: "Prometheus"
* Authentication: not required by default (see configuration options above)
* Params: none
* Response: text
## Grafana
### Config example
The following is a config example to use with [Grafana](https://grafana.com)
``` ```
- job_name: 'beam'
metrics_path: /api/pleroma/app_metrics PromEx supports the ability to automatically publish dashboards to your Grafana server as well as register Annotations. If you do not wish to configure this capability you must generate the dashboard JSON files and import them directly. You can find the mix commands in the upstream [documentation](https://hexdocs.pm/prom_ex/Mix.Tasks.PromEx.Dashboard.Export.html). You can find the list of modules enabled in Pleroma for which you should generate dashboards for by examining the contents of the `lib/pleroma/prom_ex.ex` module.
scheme: https
## prometheus.yml
The following is a bare minimum config example to use with [Prometheus](https://prometheus.io) or Prometheus-compatible software like [VictoriaMetrics](https://victoriametrics.com).
```
global:
scrape_interval: 15s
scrape_configs:
- job_name: 'pleroma'
scheme: http
static_configs: static_configs:
- targets: ['pleroma.soykaf.com'] - targets: ['pleroma.soykaf.com:4021']
``` ```

View File

@ -0,0 +1,80 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Mix.Tasks.Pleroma.Search.Indexer do
import Mix.Pleroma
import Ecto.Query
alias Pleroma.Workers.SearchIndexingWorker
def run(["create_index"]) do
start_pleroma()
with :ok <- Pleroma.Config.get([Pleroma.Search, :module]).create_index() do
IO.puts("Index created")
else
e -> IO.puts("Could not create index: #{inspect(e)}")
end
end
def run(["drop_index"]) do
start_pleroma()
with :ok <- Pleroma.Config.get([Pleroma.Search, :module]).drop_index() do
IO.puts("Index dropped")
else
e -> IO.puts("Could not drop index: #{inspect(e)}")
end
end
def run(["index" | options]) do
{options, [], []} =
OptionParser.parse(
options,
strict: [
limit: :integer
]
)
start_pleroma()
limit = Keyword.get(options, :limit, 100_000)
per_step = 1000
chunks = max(div(limit, per_step), 1)
1..chunks
|> Enum.each(fn step ->
q =
from(a in Pleroma.Activity,
limit: ^per_step,
offset: ^per_step * (^step - 1),
select: [:id],
order_by: [desc: :id]
)
{:ok, ids} =
Pleroma.Repo.transaction(fn ->
Pleroma.Repo.stream(q, timeout: :infinity)
|> Enum.map(fn a ->
a.id
end)
end)
IO.puts("Got #{length(ids)} activities, adding to indexer")
ids
|> Enum.chunk_every(100)
|> Enum.each(fn chunk ->
IO.puts("Adding #{length(chunk)} activities to indexing queue")
chunk
|> Enum.map(fn id ->
SearchIndexingWorker.new(%{"op" => "add_to_index", "activity" => id})
end)
|> Oban.insert_all()
end)
end)
end
end

View File

@ -14,6 +14,7 @@ defmodule Pleroma.Application do
@name Mix.Project.config()[:name] @name Mix.Project.config()[:name]
@version Mix.Project.config()[:version] @version Mix.Project.config()[:version]
@repository Mix.Project.config()[:source_url] @repository Mix.Project.config()[:source_url]
@compile_env Mix.env()
def name, do: @name def name, do: @name
def version, do: @version def version, do: @version
@ -51,7 +52,11 @@ def start(_type, _args) do
Pleroma.HTML.compile_scrubbers() Pleroma.HTML.compile_scrubbers()
Pleroma.Config.Oban.warn() Pleroma.Config.Oban.warn()
Config.DeprecationWarnings.warn() Config.DeprecationWarnings.warn()
Pleroma.Web.Plugs.HTTPSecurityPlug.warn_if_disabled()
if @compile_env != :test do
Pleroma.Web.Plugs.HTTPSecurityPlug.warn_if_disabled()
end
Pleroma.ApplicationRequirements.verify!() Pleroma.ApplicationRequirements.verify!()
load_custom_modules() load_custom_modules()
Pleroma.Docs.JSON.compile() Pleroma.Docs.JSON.compile()

View File

@ -204,7 +204,7 @@ def due_activities(offset \\ 0) do
def job_query(scheduled_activity_id) do def job_query(scheduled_activity_id) do
from(j in Oban.Job, from(j in Oban.Job,
where: j.queue == "scheduled_activities", where: j.queue == "federator_outgoing",
where: fragment("args ->> 'activity_id' = ?::text", ^to_string(scheduled_activity_id)) where: fragment("args ->> 'activity_id' = ?::text", ^to_string(scheduled_activity_id))
) )
end end

View File

@ -48,6 +48,12 @@ def add_to_index(_activity), do: :ok
@impl true @impl true
def remove_from_index(_object), do: :ok def remove_from_index(_object), do: :ok
@impl true
def create_index, do: :ok
@impl true
def drop_index, do: :ok
@impl true @impl true
def healthcheck_endpoints, do: nil def healthcheck_endpoints, do: nil

View File

@ -10,6 +10,12 @@ defmodule Pleroma.Search.Meilisearch do
@behaviour Pleroma.Search.SearchBackend @behaviour Pleroma.Search.SearchBackend
@impl true
def create_index, do: :ok
@impl true
def drop_index, do: :ok
defp meili_headers do defp meili_headers do
private_key = Config.get([Pleroma.Search.Meilisearch, :private_key]) private_key = Config.get([Pleroma.Search.Meilisearch, :private_key])

View File

@ -0,0 +1,182 @@
defmodule Pleroma.Search.QdrantSearch do
@behaviour Pleroma.Search.SearchBackend
import Ecto.Query
alias Pleroma.Activity
alias Pleroma.Config.Getting, as: Config
alias __MODULE__.OpenAIClient
alias __MODULE__.QdrantClient
import Pleroma.Search.Meilisearch, only: [object_to_search_data: 1]
import Pleroma.Search.DatabaseSearch, only: [maybe_fetch: 3]
@impl true
def create_index do
payload = Config.get([Pleroma.Search.QdrantSearch, :qdrant_index_configuration])
with {:ok, %{status: 200}} <- QdrantClient.put("/collections/posts", payload) do
:ok
else
e -> {:error, e}
end
end
@impl true
def drop_index do
with {:ok, %{status: 200}} <- QdrantClient.delete("/collections/posts") do
:ok
else
e -> {:error, e}
end
end
def get_embedding(text) do
with {:ok, %{body: %{"data" => [%{"embedding" => embedding}]}}} <-
OpenAIClient.post("/v1/embeddings", %{
input: text,
model: Config.get([Pleroma.Search.QdrantSearch, :openai_model])
}) do
{:ok, embedding}
else
_ ->
{:error, "Failed to get embedding"}
end
end
defp actor_from_activity(%{data: %{"actor" => actor}}) do
actor
end
defp actor_from_activity(_), do: nil
defp build_index_payload(activity, embedding) do
actor = actor_from_activity(activity)
published_at = activity.data["published"]
%{
points: [
%{
id: activity.id |> FlakeId.from_string() |> Ecto.UUID.cast!(),
vector: embedding,
payload: %{actor: actor, published_at: published_at}
}
]
}
end
defp build_search_payload(embedding, options) do
base = %{
vector: embedding,
limit: options[:limit] || 20,
offset: options[:offset] || 0
}
if author = options[:author] do
Map.put(base, :filter, %{
must: [%{key: "actor", match: %{value: author.ap_id}}]
})
else
base
end
end
@impl true
def add_to_index(activity) do
# This will only index public or unlisted notes
maybe_search_data = object_to_search_data(activity.object)
if activity.data["type"] == "Create" and maybe_search_data do
with {:ok, embedding} <- get_embedding(maybe_search_data.content),
{:ok, %{status: 200}} <-
QdrantClient.put(
"/collections/posts/points",
build_index_payload(activity, embedding)
) do
:ok
else
e -> {:error, e}
end
else
:ok
end
end
@impl true
def remove_from_index(object) do
activity = Activity.get_by_object_ap_id_with_object(object.data["id"])
id = activity.id |> FlakeId.from_string() |> Ecto.UUID.cast!()
with {:ok, %{status: 200}} <-
QdrantClient.post("/collections/posts/points/delete", %{"points" => [id]}) do
:ok
else
e -> {:error, e}
end
end
@impl true
def search(user, original_query, options) do
query = "Represent this sentence for searching relevant passages: #{original_query}"
with {:ok, embedding} <- get_embedding(query),
{:ok, %{body: %{"result" => result}}} <-
QdrantClient.post(
"/collections/posts/points/search",
build_search_payload(embedding, options)
) do
ids =
Enum.map(result, fn %{"id" => id} ->
Ecto.UUID.dump!(id)
end)
from(a in Activity, where: a.id in ^ids)
|> Activity.with_preloaded_object()
|> Activity.restrict_deactivated_users()
|> Ecto.Query.order_by([a], fragment("array_position(?, ?)", ^ids, a.id))
|> Pleroma.Repo.all()
|> maybe_fetch(user, original_query)
else
_ ->
[]
end
end
@impl true
def healthcheck_endpoints do
qdrant_health =
Config.get([Pleroma.Search.QdrantSearch, :qdrant_url])
|> URI.parse()
|> Map.put(:path, "/healthz")
|> URI.to_string()
openai_health = Config.get([Pleroma.Search.QdrantSearch, :openai_healthcheck_url])
[qdrant_health, openai_health] |> Enum.filter(& &1)
end
end
defmodule Pleroma.Search.QdrantSearch.OpenAIClient do
use Tesla
alias Pleroma.Config.Getting, as: Config
plug(Tesla.Middleware.BaseUrl, Config.get([Pleroma.Search.QdrantSearch, :openai_url]))
plug(Tesla.Middleware.JSON)
plug(Tesla.Middleware.Headers, [
{"Authorization",
"Bearer #{Pleroma.Config.get([Pleroma.Search.QdrantSearch, :openai_api_key])}"}
])
end
defmodule Pleroma.Search.QdrantSearch.QdrantClient do
use Tesla
alias Pleroma.Config.Getting, as: Config
plug(Tesla.Middleware.BaseUrl, Config.get([Pleroma.Search.QdrantSearch, :qdrant_url]))
plug(Tesla.Middleware.JSON)
plug(Tesla.Middleware.Headers, [
{"api-key", Pleroma.Config.get([Pleroma.Search.QdrantSearch, :qdrant_api_key])}
])
end

View File

@ -22,6 +22,16 @@ defmodule Pleroma.Search.SearchBackend do
""" """
@callback remove_from_index(object :: Pleroma.Object.t()) :: :ok | {:error, any()} @callback remove_from_index(object :: Pleroma.Object.t()) :: :ok | {:error, any()}
@doc """
Create the index
"""
@callback create_index() :: :ok | {:error, any()}
@doc """
Drop the index
"""
@callback drop_index() :: :ok | {:error, any()}
@doc """ @doc """
Healthcheck endpoints of search backend infrastructure to monitor for controlling Healthcheck endpoints of search backend infrastructure to monitor for controlling
processing of jobs in the Oban queue. processing of jobs in the Oban queue.

View File

@ -979,8 +979,9 @@ defp restrict_media(query, _), do: query
defp restrict_replies(query, %{exclude_replies: true}) do defp restrict_replies(query, %{exclude_replies: true}) do
from( from(
[_activity, object] in query, [activity, object] in query,
where: fragment("?->>'inReplyTo' is null", object.data) where:
fragment("?->>'inReplyTo' is null or ?->>'type' = 'Announce'", object.data, activity.data)
) )
end end

View File

@ -52,6 +52,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
when action in [:activity, :object] when action in [:activity, :object]
) )
plug(:log_inbox_metadata when action in [:inbox])
plug(:set_requester_reachable when action in [:inbox]) plug(:set_requester_reachable when action in [:inbox])
plug(:relay_active? when action in [:relay]) plug(:relay_active? when action in [:relay])
@ -521,6 +522,13 @@ defp set_requester_reachable(%Plug.Conn{} = conn, _) do
conn conn
end end
defp log_inbox_metadata(conn = %{params: %{"actor" => actor, "type" => type}}, _) do
Logger.metadata(actor: actor, type: type)
conn
end
defp log_inbox_metadata(conn, _), do: conn
def upload_media(%{assigns: %{user: %User{} = user}} = conn, %{"file" => file} = data) do def upload_media(%{assigns: %{user: %User{} = user}} = conn, %{"file" => file} = data) do
with {:ok, object} <- with {:ok, object} <-
ActivityPub.upload( ActivityPub.upload(

View File

@ -0,0 +1,87 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.AntiMentionSpamPolicy do
alias Pleroma.Config
alias Pleroma.User
require Pleroma.Constants
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
defp user_has_posted?(%User{} = u), do: u.note_count > 0
defp user_has_age?(%User{} = u) do
user_age_limit = Config.get([:mrf_antimentionspam, :user_age_limit], 30_000)
diff = NaiveDateTime.utc_now() |> NaiveDateTime.diff(u.inserted_at, :millisecond)
diff >= user_age_limit
end
defp good_reputation?(%User{} = u) do
user_has_age?(u) and user_has_posted?(u)
end
# copied from HellthreadPolicy
defp get_recipient_count(message) do
recipients = (message["to"] || []) ++ (message["cc"] || [])
follower_collection =
User.get_cached_by_ap_id(message["actor"] || message["attributedTo"]).follower_address
if Enum.member?(recipients, Pleroma.Constants.as_public()) do
recipients =
recipients
|> List.delete(Pleroma.Constants.as_public())
|> List.delete(follower_collection)
{:public, length(recipients)}
else
recipients =
recipients
|> List.delete(follower_collection)
{:not_public, length(recipients)}
end
end
defp object_has_recipients?(%{"object" => object} = activity) do
{_, object_count} = get_recipient_count(object)
{_, activity_count} = get_recipient_count(activity)
object_count + activity_count > 0
end
defp object_has_recipients?(object) do
{_, count} = get_recipient_count(object)
count > 0
end
@impl true
def filter(%{"type" => "Create", "actor" => actor} = activity) do
with {:ok, %User{local: false} = u} <- User.get_or_fetch_by_ap_id(actor),
{:has_mentions, true} <- {:has_mentions, object_has_recipients?(activity)},
{:good_reputation, true} <- {:good_reputation, good_reputation?(u)} do
{:ok, activity}
else
{:ok, %User{local: true}} ->
{:ok, activity}
{:has_mentions, false} ->
{:ok, activity}
{:good_reputation, false} ->
{:reject, "[AntiMentionSpamPolicy] User rejected"}
{:error, _} ->
{:reject, "[AntiMentionSpamPolicy] Failed to get or fetch user by ap_id"}
e ->
{:reject, "[AntiMentionSpamPolicy] Unhandled error #{inspect(e)}"}
end
end
# in all other cases, pass through
def filter(message), do: {:ok, message}
@impl true
def describe, do: {:ok, %{}}
end

View File

@ -0,0 +1,142 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.DNSRBLPolicy do
@moduledoc """
Dynamic activity filtering based on an RBL database
This MRF makes queries to a custom DNS server which will
respond with values indicating the classification of the domain
the activity originated from. This method has been widely used
in the email anti-spam industry for very fast reputation checks.
e.g., if the DNS response is 127.0.0.1 or empty, the domain is OK
Other values such as 127.0.0.2 may be used for specific classifications.
Information for why the host is blocked can be stored in a corresponding TXT record.
This method is fail-open so if the queries fail the activites are accepted.
An example of software meant for this purpsoe is rbldnsd which can be found
at http://www.corpit.ru/mjt/rbldnsd.html or mirrored at
https://git.pleroma.social/feld/rbldnsd
It is highly recommended that you run your own copy of rbldnsd and use an
external mechanism to sync/share the contents of the zone file. This is
important to keep the latency on the queries as low as possible and prevent
your DNS server from being attacked so it fails and content is permitted.
"""
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
alias Pleroma.Config
require Logger
@query_retries 1
@query_timeout 500
@impl true
def filter(%{"actor" => actor} = object) do
actor_info = URI.parse(actor)
with {:ok, object} <- check_rbl(actor_info, object) do
{:ok, object}
else
_ -> {:reject, "[DNSRBLPolicy]"}
end
end
@impl true
def filter(object), do: {:ok, object}
@impl true
def describe do
mrf_dnsrbl =
Config.get(:mrf_dnsrbl)
|> Enum.into(%{})
{:ok, %{mrf_dnsrbl: mrf_dnsrbl}}
end
@impl true
def config_description do
%{
key: :mrf_dnsrbl,
related_policy: "Pleroma.Web.ActivityPub.MRF.DNSRBLPolicy",
label: "MRF DNSRBL",
description: "DNS RealTime Blackhole Policy",
children: [
%{
key: :nameserver,
type: {:string},
description: "DNSRBL Nameserver to Query (IP or hostame)",
suggestions: ["127.0.0.1"]
},
%{
key: :port,
type: {:string},
description: "Nameserver port",
suggestions: ["53"]
},
%{
key: :zone,
type: {:string},
description: "Root zone for querying",
suggestions: ["bl.pleroma.com"]
}
]
}
end
defp check_rbl(%{host: actor_host}, object) do
with false <- match?(^actor_host, Pleroma.Web.Endpoint.host()),
zone when not is_nil(zone) <- Keyword.get(Config.get([:mrf_dnsrbl]), :zone) do
query =
Enum.join([actor_host, zone], ".")
|> String.to_charlist()
rbl_response = rblquery(query)
if Enum.empty?(rbl_response) do
{:ok, object}
else
Task.start(fn ->
reason = rblquery(query, :txt) || "undefined"
Logger.warning(
"DNSRBL Rejected activity from #{actor_host} for reason: #{inspect(reason)}"
)
end)
:error
end
else
_ -> {:ok, object}
end
end
defp get_rblhost_ip(rblhost) do
case rblhost |> String.to_charlist() |> :inet_parse.address() do
{:ok, _} -> rblhost |> String.to_charlist() |> :inet_parse.address()
_ -> {:ok, rblhost |> String.to_charlist() |> :inet_res.lookup(:in, :a) |> Enum.random()}
end
end
defp rblquery(query, type \\ :a) do
config = Config.get([:mrf_dnsrbl])
case get_rblhost_ip(config[:nameserver]) do
{:ok, rblnsip} ->
:inet_res.lookup(query, :in, type,
nameservers: [{rblnsip, config[:port]}],
timeout: @query_timeout,
retry: @query_retries
)
_ ->
[]
end
end
end

View File

@ -15,6 +15,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator do
field(:type, :string, default: "Link") field(:type, :string, default: "Link")
field(:mediaType, ObjectValidators.MIME, default: "application/octet-stream") field(:mediaType, ObjectValidators.MIME, default: "application/octet-stream")
field(:name, :string) field(:name, :string)
field(:summary, :string)
field(:blurhash, :string) field(:blurhash, :string)
embeds_many :url, UrlObjectValidator, primary_key: false do embeds_many :url, UrlObjectValidator, primary_key: false do
@ -44,7 +45,7 @@ def changeset(struct, data) do
|> fix_url() |> fix_url()
struct struct
|> cast(data, [:id, :type, :mediaType, :name, :blurhash]) |> cast(data, [:id, :type, :mediaType, :name, :summary, :blurhash])
|> cast_embed(:url, with: &url_changeset/2, required: true) |> cast_embed(:url, with: &url_changeset/2, required: true)
|> validate_inclusion(:type, ~w[Link Document Audio Image Video]) |> validate_inclusion(:type, ~w[Link Document Audio Image Video])
|> validate_required([:type, :mediaType]) |> validate_required([:type, :mediaType])

View File

@ -50,7 +50,11 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Attachment do
pleroma: %Schema{ pleroma: %Schema{
type: :object, type: :object,
properties: %{ properties: %{
mime_type: %Schema{type: :string, description: "mime type of the attachment"} mime_type: %Schema{type: :string, description: "mime type of the attachment"},
name: %Schema{
type: :string,
description: "Name of the attachment, typically the filename"
}
} }
} }
}, },

View File

@ -38,6 +38,8 @@ defmodule Pleroma.Web.Endpoint do
plug(Plug.Telemetry, event_prefix: [:phoenix, :endpoint]) plug(Plug.Telemetry, event_prefix: [:phoenix, :endpoint])
plug(Pleroma.Web.Plugs.LoggerMetadataPath)
plug(Pleroma.Web.Plugs.SetLocalePlug) plug(Pleroma.Web.Plugs.SetLocalePlug)
plug(CORSPlug) plug(CORSPlug)
plug(Pleroma.Web.Plugs.HTTPSecurityPlug) plug(Pleroma.Web.Plugs.HTTPSecurityPlug)

View File

@ -44,7 +44,7 @@ def incoming_ap_doc(%{params: params, req_headers: req_headers}) do
end end
def incoming_ap_doc(%{"type" => "Delete"} = params) do def incoming_ap_doc(%{"type" => "Delete"} = params) do
ReceiverWorker.enqueue("incoming_ap_doc", %{"params" => params}, priority: 3) ReceiverWorker.enqueue("incoming_ap_doc", %{"params" => params}, priority: 3, queue: :slow)
end end
def incoming_ap_doc(params) do def incoming_ap_doc(params) do

View File

@ -624,6 +624,19 @@ def render("attachment.json", %{attachment: attachment}) do
to_string(attachment["id"] || hash_id) to_string(attachment["id"] || hash_id)
end end
description =
if attachment["summary"] do
HTML.strip_tags(attachment["summary"])
else
attachment["name"]
end
name = if attachment["summary"], do: attachment["name"]
pleroma =
%{mime_type: media_type}
|> Maps.put_if_present(:name, name)
%{ %{
id: attachment_id, id: attachment_id,
url: href, url: href,
@ -631,8 +644,8 @@ def render("attachment.json", %{attachment: attachment}) do
preview_url: href_preview, preview_url: href_preview,
text_url: href, text_url: href,
type: type, type: type,
description: attachment["name"], description: description,
pleroma: %{mime_type: media_type}, pleroma: pleroma,
blurhash: attachment["blurhash"] blurhash: attachment["blurhash"]
} }
|> Maps.put_if_present(:meta, meta) |> Maps.put_if_present(:meta, meta)

View File

@ -3,26 +3,27 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.Plugs.HTTPSecurityPlug do defmodule Pleroma.Web.Plugs.HTTPSecurityPlug do
alias Pleroma.Config
import Plug.Conn import Plug.Conn
require Logger require Logger
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
def init(opts), do: opts def init(opts), do: opts
def call(conn, _options) do def call(conn, _options) do
if Config.get([:http_security, :enabled]) do if @config_impl.get([:http_security, :enabled]) do
conn conn
|> merge_resp_headers(headers()) |> merge_resp_headers(headers())
|> maybe_send_sts_header(Config.get([:http_security, :sts])) |> maybe_send_sts_header(@config_impl.get([:http_security, :sts]))
else else
conn conn
end end
end end
def primary_frontend do def primary_frontend do
with %{"name" => frontend} <- Config.get([:frontends, :primary]), with %{"name" => frontend} <- @config_impl.get([:frontends, :primary]),
available <- Config.get([:frontends, :available]), available <- @config_impl.get([:frontends, :available]),
%{} = primary_frontend <- Map.get(available, frontend) do %{} = primary_frontend <- Map.get(available, frontend) do
{:ok, primary_frontend} {:ok, primary_frontend}
end end
@ -37,8 +38,8 @@ def custom_http_frontend_headers do
end end
def headers do def headers do
referrer_policy = Config.get([:http_security, :referrer_policy]) referrer_policy = @config_impl.get([:http_security, :referrer_policy])
report_uri = Config.get([:http_security, :report_uri]) report_uri = @config_impl.get([:http_security, :report_uri])
custom_http_frontend_headers = custom_http_frontend_headers() custom_http_frontend_headers = custom_http_frontend_headers()
headers = [ headers = [
@ -86,10 +87,10 @@ def headers do
@csp_start [Enum.join(static_csp_rules, ";") <> ";"] @csp_start [Enum.join(static_csp_rules, ";") <> ";"]
defp csp_string do defp csp_string do
scheme = Config.get([Pleroma.Web.Endpoint, :url])[:scheme] scheme = @config_impl.get([Pleroma.Web.Endpoint, :url])[:scheme]
static_url = Pleroma.Web.Endpoint.static_url() static_url = Pleroma.Web.Endpoint.static_url()
websocket_url = Pleroma.Web.Endpoint.websocket_url() websocket_url = Pleroma.Web.Endpoint.websocket_url()
report_uri = Config.get([:http_security, :report_uri]) report_uri = @config_impl.get([:http_security, :report_uri])
img_src = "img-src 'self' data: blob:" img_src = "img-src 'self' data: blob:"
media_src = "media-src 'self'" media_src = "media-src 'self'"
@ -97,8 +98,8 @@ defp csp_string do
# Strict multimedia CSP enforcement only when MediaProxy is enabled # Strict multimedia CSP enforcement only when MediaProxy is enabled
{img_src, media_src, connect_src} = {img_src, media_src, connect_src} =
if Config.get([:media_proxy, :enabled]) && if @config_impl.get([:media_proxy, :enabled]) &&
!Config.get([:media_proxy, :proxy_opts, :redirect_on_failure]) do !@config_impl.get([:media_proxy, :proxy_opts, :redirect_on_failure]) do
sources = build_csp_multimedia_source_list() sources = build_csp_multimedia_source_list()
{ {
@ -115,17 +116,21 @@ defp csp_string do
end end
connect_src = connect_src =
if Config.get(:env) == :dev do if @config_impl.get([:env]) == :dev do
[connect_src, " http://localhost:3035/"] [connect_src, " http://localhost:3035/"]
else else
connect_src connect_src
end end
script_src = script_src =
if Config.get(:env) == :dev do if @config_impl.get([:http_security, :allow_unsafe_eval]) do
"script-src 'self' 'unsafe-eval'" if @config_impl.get([:env]) == :dev do
"script-src 'self' 'unsafe-eval'"
else
"script-src 'self' 'wasm-unsafe-eval'"
end
else else
"script-src 'self' 'wasm-unsafe-eval'" "script-src 'self'"
end end
report = if report_uri, do: ["report-uri ", report_uri, ";report-to csp-endpoint"] report = if report_uri, do: ["report-uri ", report_uri, ";report-to csp-endpoint"]
@ -161,11 +166,11 @@ defp build_csp_param_from_whitelist(url), do: url
defp build_csp_multimedia_source_list do defp build_csp_multimedia_source_list do
media_proxy_whitelist = media_proxy_whitelist =
[:media_proxy, :whitelist] [:media_proxy, :whitelist]
|> Config.get() |> @config_impl.get()
|> build_csp_from_whitelist([]) |> build_csp_from_whitelist([])
captcha_method = Config.get([Pleroma.Captcha, :method]) captcha_method = @config_impl.get([Pleroma.Captcha, :method])
captcha_endpoint = Config.get([captcha_method, :endpoint]) captcha_endpoint = @config_impl.get([captcha_method, :endpoint])
base_endpoints = base_endpoints =
[ [
@ -173,7 +178,7 @@ defp build_csp_multimedia_source_list do
[Pleroma.Upload, :base_url], [Pleroma.Upload, :base_url],
[Pleroma.Uploaders.S3, :public_endpoint] [Pleroma.Uploaders.S3, :public_endpoint]
] ]
|> Enum.map(&Config.get/1) |> Enum.map(&@config_impl.get/1)
[captcha_endpoint | base_endpoints] [captcha_endpoint | base_endpoints]
|> Enum.map(&build_csp_param/1) |> Enum.map(&build_csp_param/1)
@ -200,7 +205,7 @@ defp build_csp_param(url) when is_binary(url) do
end end
def warn_if_disabled do def warn_if_disabled do
unless Config.get([:http_security, :enabled]) do unless Pleroma.Config.get([:http_security, :enabled]) do
Logger.warning(" Logger.warning("
.i;;;;i. .i;;;;i.
iYcviii;vXY: iYcviii;vXY:
@ -245,8 +250,8 @@ def warn_if_disabled do
end end
defp maybe_send_sts_header(conn, true) do defp maybe_send_sts_header(conn, true) do
max_age_sts = Config.get([:http_security, :sts_max_age]) max_age_sts = @config_impl.get([:http_security, :sts_max_age])
max_age_ct = Config.get([:http_security, :ct_max_age]) max_age_ct = @config_impl.get([:http_security, :ct_max_age])
merge_resp_headers(conn, [ merge_resp_headers(conn, [
{"strict-transport-security", "max-age=#{max_age_sts}; includeSubDomains"}, {"strict-transport-security", "max-age=#{max_age_sts}; includeSubDomains"},

View File

@ -0,0 +1,12 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.Plugs.LoggerMetadataPath do
def init(opts), do: opts
def call(conn, _) do
Logger.metadata(path: conn.request_path)
conn
end
end

View File

@ -0,0 +1,18 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.Plugs.LoggerMetadataUser do
alias Pleroma.User
def init(opts), do: opts
def call(%{assigns: %{user: user = %User{}}} = conn, _) do
Logger.metadata(user: user.nickname)
conn
end
def call(conn, _) do
conn
end
end

View File

@ -29,6 +29,7 @@ defmodule Pleroma.Web.Router do
pipeline :browser do pipeline :browser do
plug(:accepts, ["html"]) plug(:accepts, ["html"])
plug(:fetch_session) plug(:fetch_session)
plug(Pleroma.Web.Plugs.LoggerMetadataUser)
end end
pipeline :oauth do pipeline :oauth do
@ -67,12 +68,14 @@ defmodule Pleroma.Web.Router do
plug(:fetch_session) plug(:fetch_session)
plug(:authenticate) plug(:authenticate)
plug(OpenApiSpex.Plug.PutApiSpec, module: Pleroma.Web.ApiSpec) plug(OpenApiSpex.Plug.PutApiSpec, module: Pleroma.Web.ApiSpec)
plug(Pleroma.Web.Plugs.LoggerMetadataUser)
end end
pipeline :no_auth_or_privacy_expectations_api do pipeline :no_auth_or_privacy_expectations_api do
plug(:base_api) plug(:base_api)
plug(:after_auth) plug(:after_auth)
plug(Pleroma.Web.Plugs.IdempotencyPlug) plug(Pleroma.Web.Plugs.IdempotencyPlug)
plug(Pleroma.Web.Plugs.LoggerMetadataUser)
end end
# Pipeline for app-related endpoints (no user auth checks — app-bound tokens must be supported) # Pipeline for app-related endpoints (no user auth checks — app-bound tokens must be supported)
@ -83,12 +86,14 @@ defmodule Pleroma.Web.Router do
pipeline :api do pipeline :api do
plug(:expect_public_instance_or_user_authentication) plug(:expect_public_instance_or_user_authentication)
plug(:no_auth_or_privacy_expectations_api) plug(:no_auth_or_privacy_expectations_api)
plug(Pleroma.Web.Plugs.LoggerMetadataUser)
end end
pipeline :authenticated_api do pipeline :authenticated_api do
plug(:expect_user_authentication) plug(:expect_user_authentication)
plug(:no_auth_or_privacy_expectations_api) plug(:no_auth_or_privacy_expectations_api)
plug(Pleroma.Web.Plugs.EnsureAuthenticatedPlug) plug(Pleroma.Web.Plugs.EnsureAuthenticatedPlug)
plug(Pleroma.Web.Plugs.LoggerMetadataUser)
end end
pipeline :admin_api do pipeline :admin_api do
@ -99,6 +104,7 @@ defmodule Pleroma.Web.Router do
plug(Pleroma.Web.Plugs.EnsureAuthenticatedPlug) plug(Pleroma.Web.Plugs.EnsureAuthenticatedPlug)
plug(Pleroma.Web.Plugs.UserIsStaffPlug) plug(Pleroma.Web.Plugs.UserIsStaffPlug)
plug(Pleroma.Web.Plugs.IdempotencyPlug) plug(Pleroma.Web.Plugs.IdempotencyPlug)
plug(Pleroma.Web.Plugs.LoggerMetadataUser)
end end
pipeline :require_admin do pipeline :require_admin do
@ -179,6 +185,7 @@ defmodule Pleroma.Web.Router do
plug(:browser) plug(:browser)
plug(:authenticate) plug(:authenticate)
plug(Pleroma.Web.Plugs.EnsureUserTokenAssignsPlug) plug(Pleroma.Web.Plugs.EnsureUserTokenAssignsPlug)
plug(Pleroma.Web.Plugs.LoggerMetadataUser)
end end
pipeline :well_known do pipeline :well_known do
@ -193,6 +200,7 @@ defmodule Pleroma.Web.Router do
pipeline :pleroma_api do pipeline :pleroma_api do
plug(:accepts, ["html", "json"]) plug(:accepts, ["html", "json"])
plug(OpenApiSpex.Plug.PutApiSpec, module: Pleroma.Web.ApiSpec) plug(OpenApiSpex.Plug.PutApiSpec, module: Pleroma.Web.ApiSpec)
plug(Pleroma.Web.Plugs.LoggerMetadataUser)
end end
pipeline :mailbox_preview do pipeline :mailbox_preview do

View File

@ -8,7 +8,7 @@ defmodule Pleroma.Workers.AttachmentsCleanupWorker do
alias Pleroma.Object alias Pleroma.Object
alias Pleroma.Repo alias Pleroma.Repo
use Pleroma.Workers.WorkerHelper, queue: "attachments_cleanup" use Pleroma.Workers.WorkerHelper, queue: "slow"
@impl Oban.Worker @impl Oban.Worker
def perform(%Job{ def perform(%Job{

View File

@ -3,7 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.BackupWorker do defmodule Pleroma.Workers.BackupWorker do
use Oban.Worker, queue: :backup, max_attempts: 1 use Oban.Worker, queue: :slow, max_attempts: 1
alias Oban.Job alias Oban.Job
alias Pleroma.User.Backup alias Pleroma.User.Backup

View File

@ -9,7 +9,7 @@ defmodule Pleroma.Workers.Cron.NewUsersDigestWorker do
import Ecto.Query import Ecto.Query
use Pleroma.Workers.WorkerHelper, queue: "mailer" use Pleroma.Workers.WorkerHelper, queue: "background"
@impl Oban.Worker @impl Oban.Worker
def perform(_job) do def perform(_job) do

View File

@ -3,7 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.MailerWorker do defmodule Pleroma.Workers.MailerWorker do
use Pleroma.Workers.WorkerHelper, queue: "mailer" use Pleroma.Workers.WorkerHelper, queue: "background"
@impl Oban.Worker @impl Oban.Worker
def perform(%Job{args: %{"op" => "email", "encoded_email" => encoded_email, "config" => config}}) do def perform(%Job{args: %{"op" => "email", "encoded_email" => encoded_email, "config" => config}}) do

View File

@ -3,7 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.MuteExpireWorker do defmodule Pleroma.Workers.MuteExpireWorker do
use Pleroma.Workers.WorkerHelper, queue: "mute_expire" use Pleroma.Workers.WorkerHelper, queue: "background"
@impl Oban.Worker @impl Oban.Worker
def perform(%Job{args: %{"op" => "unmute_user", "muter_id" => muter_id, "mutee_id" => mutee_id}}) do def perform(%Job{args: %{"op" => "unmute_user", "muter_id" => muter_id, "mutee_id" => mutee_id}}) do

View File

@ -6,7 +6,7 @@ defmodule Pleroma.Workers.PollWorker do
@moduledoc """ @moduledoc """
Generates notifications when a poll ends. Generates notifications when a poll ends.
""" """
use Pleroma.Workers.WorkerHelper, queue: "poll_notifications" use Pleroma.Workers.WorkerHelper, queue: "background"
alias Pleroma.Activity alias Pleroma.Activity
alias Pleroma.Notification alias Pleroma.Notification

View File

@ -7,7 +7,7 @@ defmodule Pleroma.Workers.PurgeExpiredActivity do
Worker which purges expired activity. Worker which purges expired activity.
""" """
use Oban.Worker, queue: :activity_expiration, max_attempts: 1, unique: [period: :infinity] use Oban.Worker, queue: :slow, max_attempts: 1, unique: [period: :infinity]
import Ecto.Query import Ecto.Query
@ -59,7 +59,7 @@ defp find_user(ap_id) do
def get_expiration(id) do def get_expiration(id) do
from(j in Oban.Job, from(j in Oban.Job,
where: j.state == "scheduled", where: j.state == "scheduled",
where: j.queue == "activity_expiration", where: j.queue == "slow",
where: fragment("?->>'activity_id' = ?", j.args, ^id) where: fragment("?->>'activity_id' = ?", j.args, ^id)
) )
|> Pleroma.Repo.one() |> Pleroma.Repo.one()

View File

@ -7,7 +7,7 @@ defmodule Pleroma.Workers.PurgeExpiredFilter do
Worker which purges expired filters Worker which purges expired filters
""" """
use Oban.Worker, queue: :filter_expiration, max_attempts: 1, unique: [period: :infinity] use Oban.Worker, queue: :background, max_attempts: 1, unique: [period: :infinity]
import Ecto.Query import Ecto.Query
@ -38,7 +38,7 @@ def timeout(_job), do: :timer.seconds(5)
def get_expiration(id) do def get_expiration(id) do
from(j in Job, from(j in Job,
where: j.state == "scheduled", where: j.state == "scheduled",
where: j.queue == "filter_expiration", where: j.queue == "background",
where: fragment("?->'filter_id' = ?", j.args, ^id) where: fragment("?->'filter_id' = ?", j.args, ^id)
) )
|> Repo.one() |> Repo.one()

View File

@ -7,7 +7,7 @@ defmodule Pleroma.Workers.PurgeExpiredToken do
Worker which purges expired OAuth tokens Worker which purges expired OAuth tokens
""" """
use Oban.Worker, queue: :token_expiration, max_attempts: 1 use Oban.Worker, queue: :background, max_attempts: 1
@spec enqueue(%{token_id: integer(), valid_until: DateTime.t(), mod: module()}) :: @spec enqueue(%{token_id: integer(), valid_until: DateTime.t(), mod: module()}) ::
{:ok, Oban.Job.t()} | {:error, Ecto.Changeset.t()} {:ok, Oban.Job.t()} | {:error, Ecto.Changeset.t()}

View File

@ -5,7 +5,7 @@
defmodule Pleroma.Workers.RemoteFetcherWorker do defmodule Pleroma.Workers.RemoteFetcherWorker do
alias Pleroma.Object.Fetcher alias Pleroma.Object.Fetcher
use Pleroma.Workers.WorkerHelper, queue: "remote_fetcher" use Pleroma.Workers.WorkerHelper, queue: "background"
@impl Oban.Worker @impl Oban.Worker
def perform(%Job{args: %{"op" => "fetch_remote", "id" => id} = args}) do def perform(%Job{args: %{"op" => "fetch_remote", "id" => id} = args}) do

View File

@ -6,7 +6,7 @@ defmodule Pleroma.Workers.RichMediaExpirationWorker do
alias Pleroma.Web.RichMedia.Card alias Pleroma.Web.RichMedia.Card
use Oban.Worker, use Oban.Worker,
queue: :rich_media_expiration queue: :background
@impl Oban.Worker @impl Oban.Worker
def perform(%Job{args: %{"url" => url} = _args}) do def perform(%Job{args: %{"url" => url} = _args}) do

View File

@ -7,7 +7,7 @@ defmodule Pleroma.Workers.ScheduledActivityWorker do
The worker to post scheduled activity. The worker to post scheduled activity.
""" """
use Pleroma.Workers.WorkerHelper, queue: "scheduled_activities" use Pleroma.Workers.WorkerHelper, queue: "federator_outgoing"
alias Pleroma.Repo alias Pleroma.Repo
alias Pleroma.ScheduledActivity alias Pleroma.ScheduledActivity

View File

@ -5973,3 +5973,87 @@ msgstr ""
msgctxt "config label at :pleroma-:instance > :languages" msgctxt "config label at :pleroma-:instance > :languages"
msgid "Languages" msgid "Languages"
msgstr "" msgstr ""
#: lib/pleroma/docs/translator.ex:5
#, elixir-autogen, elixir-format
msgctxt "config description at :pleroma-:mrf_emoji"
msgid "Reject or force-unlisted emojis whose URLs or names match a keyword or [Regex](https://hexdocs.pm/elixir/Regex.html)."
msgstr ""
#: lib/pleroma/docs/translator.ex:5
#, elixir-autogen, elixir-format
msgctxt "config description at :pleroma-:mrf_emoji > :federated_timeline_removal_shortcode"
msgid " A list of patterns which result in message with emojis whose shortcodes match being removed from federated timelines (a.k.a unlisted). This will apply only to statuses.\n\n Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.\n"
msgstr ""
#: lib/pleroma/docs/translator.ex:5
#, elixir-autogen, elixir-format
msgctxt "config description at :pleroma-:mrf_emoji > :federated_timeline_removal_url"
msgid " A list of patterns which result in message with emojis whose URLs match being removed from federated timelines (a.k.a unlisted). This will apply only to statuses.\n\n Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.\n"
msgstr ""
#: lib/pleroma/docs/translator.ex:5
#, elixir-autogen, elixir-format
msgctxt "config description at :pleroma-:mrf_emoji > :remove_shortcode"
msgid " A list of patterns which result in emoji whose shortcode matches being removed from the message. This will apply to statuses, emoji reactions, and user profiles.\n\n Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.\n"
msgstr ""
#: lib/pleroma/docs/translator.ex:5
#, elixir-autogen, elixir-format
msgctxt "config description at :pleroma-:mrf_emoji > :remove_url"
msgid " A list of patterns which result in emoji whose URL matches being removed from the message. This will apply to statuses, emoji reactions, and user profiles.\n\n Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.\n"
msgstr ""
#: lib/pleroma/docs/translator.ex:5
#, elixir-autogen, elixir-format
msgctxt "config description at :pleroma-Pleroma.User.Backup > :process_chunk_size"
msgid "The number of activities to fetch in the backup job for each chunk."
msgstr ""
#: lib/pleroma/docs/translator.ex:5
#, elixir-autogen, elixir-format
msgctxt "config description at :pleroma-Pleroma.User.Backup > :process_wait_time"
msgid "The amount of time to wait for backup to report progress, in milliseconds. If no progress is received from the backup job for that much time, terminate it and deem it failed."
msgstr ""
#: lib/pleroma/docs/translator.ex:5
#, elixir-autogen, elixir-format
msgctxt "config label at :pleroma-:mrf_emoji"
msgid "MRF Emoji"
msgstr ""
#: lib/pleroma/docs/translator.ex:5
#, elixir-autogen, elixir-format
msgctxt "config label at :pleroma-:mrf_emoji > :federated_timeline_removal_shortcode"
msgid "Federated timeline removal shortcode"
msgstr ""
#: lib/pleroma/docs/translator.ex:5
#, elixir-autogen, elixir-format
msgctxt "config label at :pleroma-:mrf_emoji > :federated_timeline_removal_url"
msgid "Federated timeline removal url"
msgstr ""
#: lib/pleroma/docs/translator.ex:5
#, elixir-autogen, elixir-format
msgctxt "config label at :pleroma-:mrf_emoji > :remove_shortcode"
msgid "Remove shortcode"
msgstr ""
#: lib/pleroma/docs/translator.ex:5
#, elixir-autogen, elixir-format
msgctxt "config label at :pleroma-:mrf_emoji > :remove_url"
msgid "Remove url"
msgstr ""
#: lib/pleroma/docs/translator.ex:5
#, elixir-autogen, elixir-format
msgctxt "config label at :pleroma-Pleroma.User.Backup > :process_chunk_size"
msgid "Process Chunk Size"
msgstr ""
#: lib/pleroma/docs/translator.ex:5
#, elixir-autogen, elixir-format
msgctxt "config label at :pleroma-Pleroma.User.Backup > :process_wait_time"
msgid "Process Wait Time"
msgstr ""

View File

@ -110,7 +110,7 @@ msgstr ""
msgid "Can't display this activity" msgid "Can't display this activity"
msgstr "" msgstr ""
#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:334 #: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:346
#, elixir-autogen, elixir-format #, elixir-autogen, elixir-format
msgid "Can't find user" msgid "Can't find user"
msgstr "" msgstr ""
@ -198,7 +198,7 @@ msgstr ""
msgid "Invalid password." msgid "Invalid password."
msgstr "" msgstr ""
#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:267 #: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:279
#, elixir-autogen, elixir-format #, elixir-autogen, elixir-format
msgid "Invalid request" msgid "Invalid request"
msgstr "" msgstr ""
@ -225,7 +225,7 @@ msgstr ""
#: lib/pleroma/web/feed/tag_controller.ex:16 #: lib/pleroma/web/feed/tag_controller.ex:16
#: lib/pleroma/web/feed/user_controller.ex:69 #: lib/pleroma/web/feed/user_controller.ex:69
#: lib/pleroma/web/o_status/o_status_controller.ex:132 #: lib/pleroma/web/o_status/o_status_controller.ex:132
#: lib/pleroma/web/plugs/uploaded_media.ex:104 #: lib/pleroma/web/plugs/uploaded_media.ex:84
#, elixir-autogen, elixir-format #, elixir-autogen, elixir-format
msgid "Not found" msgid "Not found"
msgstr "" msgstr ""
@ -235,7 +235,7 @@ msgstr ""
msgid "Poll's author can't vote" msgid "Poll's author can't vote"
msgstr "" msgstr ""
#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:499 #: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:511
#: lib/pleroma/web/mastodon_api/controllers/fallback_controller.ex:20 #: lib/pleroma/web/mastodon_api/controllers/fallback_controller.ex:20
#: lib/pleroma/web/mastodon_api/controllers/poll_controller.ex:39 #: lib/pleroma/web/mastodon_api/controllers/poll_controller.ex:39
#: lib/pleroma/web/mastodon_api/controllers/poll_controller.ex:51 #: lib/pleroma/web/mastodon_api/controllers/poll_controller.ex:51
@ -341,7 +341,7 @@ msgstr ""
msgid "CAPTCHA expired" msgid "CAPTCHA expired"
msgstr "" msgstr ""
#: lib/pleroma/web/plugs/uploaded_media.ex:77 #: lib/pleroma/web/plugs/uploaded_media.ex:57
#, elixir-autogen, elixir-format #, elixir-autogen, elixir-format
msgid "Failed" msgid "Failed"
msgstr "" msgstr ""
@ -361,7 +361,7 @@ msgstr ""
msgid "Insufficient permissions: %{permissions}." msgid "Insufficient permissions: %{permissions}."
msgstr "" msgstr ""
#: lib/pleroma/web/plugs/uploaded_media.ex:131 #: lib/pleroma/web/plugs/uploaded_media.ex:111
#, elixir-autogen, elixir-format #, elixir-autogen, elixir-format
msgid "Internal Error" msgid "Internal Error"
msgstr "" msgstr ""
@ -557,7 +557,7 @@ msgstr ""
msgid "Access denied" msgid "Access denied"
msgstr "" msgstr ""
#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:331 #: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:343
#, elixir-autogen, elixir-format #, elixir-autogen, elixir-format
msgid "This API requires an authenticated user" msgid "This API requires an authenticated user"
msgstr "" msgstr ""
@ -567,7 +567,7 @@ msgstr ""
msgid "User is not an admin." msgid "User is not an admin."
msgstr "" msgstr ""
#: lib/pleroma/user/backup.ex:73 #: lib/pleroma/user/backup.ex:78
#, elixir-format #, elixir-format
msgid "Last export was less than a day ago" msgid "Last export was less than a day ago"
msgid_plural "Last export was less than %{days} days ago" msgid_plural "Last export was less than %{days} days ago"
@ -607,3 +607,23 @@ msgstr ""
#, elixir-autogen, elixir-format #, elixir-autogen, elixir-format
msgid "User isn't privileged." msgid "User isn't privileged."
msgstr "" msgstr ""
#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:267
#, elixir-autogen, elixir-format
msgid "Bio is too long"
msgstr ""
#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:270
#, elixir-autogen, elixir-format
msgid "Name is too long"
msgstr ""
#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:273
#, elixir-autogen, elixir-format
msgid "One or more field entries are too long"
msgstr ""
#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:276
#, elixir-autogen, elixir-format
msgid "Too many field entries"
msgstr ""

View File

@ -219,3 +219,43 @@ msgstr ""
#, elixir-autogen, elixir-format #, elixir-autogen, elixir-format
msgid "read:mutes" msgid "read:mutes"
msgstr "" msgstr ""
#: lib/pleroma/web/api_spec/scopes/translator.ex:5
#, elixir-autogen, elixir-format
msgid "push"
msgstr ""
#: lib/pleroma/web/api_spec/scopes/translator.ex:5
#, elixir-autogen, elixir-format
msgid "read:backups"
msgstr ""
#: lib/pleroma/web/api_spec/scopes/translator.ex:5
#, elixir-autogen, elixir-format
msgid "read:chats"
msgstr ""
#: lib/pleroma/web/api_spec/scopes/translator.ex:5
#, elixir-autogen, elixir-format
msgid "read:media"
msgstr ""
#: lib/pleroma/web/api_spec/scopes/translator.ex:5
#, elixir-autogen, elixir-format
msgid "read:reports"
msgstr ""
#: lib/pleroma/web/api_spec/scopes/translator.ex:5
#, elixir-autogen, elixir-format
msgid "write:chats"
msgstr ""
#: lib/pleroma/web/api_spec/scopes/translator.ex:5
#, elixir-autogen, elixir-format
msgid "write:follow"
msgstr ""
#: lib/pleroma/web/api_spec/scopes/translator.ex:5
#, elixir-autogen, elixir-format
msgid "write:reports"
msgstr ""

View File

@ -0,0 +1,32 @@
defmodule Pleroma.Repo.Migrations.ObanQueuesRefactor do
use Ecto.Migration
@changed_queues [
{"attachments_cleanup", "slow"},
{"mailer", "background"},
{"mute_expire", "background"},
{"poll_notifications", "background"},
{"activity_expiration", "slow"},
{"filter_expiration", "background"},
{"token_expiration", "background"},
{"remote_fetcher", "background"},
{"rich_media_expiration", "background"}
]
def up do
Enum.each(@changed_queues, fn {old, new} ->
execute("UPDATE oban_jobs SET queue = '#{new}' WHERE queue = '#{old}';")
end)
# Handled special as reverting this would not be ideal and leaving it is harmless
execute(
"UPDATE oban_jobs SET queue = 'federator_outgoing' WHERE queue = 'scheduled_activities';"
)
end
def down do
# Just move all slow queue jobs to background queue if we are reverting
# as the slow queue will not be processing jobs
execute("UPDATE oban_jobs SET queue = 'background' WHERE queue = 'slow';")
end
end

View File

@ -0,0 +1,9 @@
FROM python:3.9
WORKDIR /code
COPY fastembed-server.py /workdir/fastembed-server.py
COPY requirements.txt /workdir/requirements.txt
RUN pip install -r /workdir/requirements.txt
CMD ["python", "/workdir/fastembed-server.py"]

View File

@ -0,0 +1,6 @@
# About
This is a minimal implementation of the [OpenAI Embeddings API](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings) meant to be used with the QdrantSearch backend.
# Usage
The easiest way to run it is to just use docker compose with `docker compose up`. This starts the server on the default configured port. Different models can be used, for a full list of supported models, check the [fastembed documentation](https://qdrant.github.io/fastembed/examples/Supported_Models/). The first time a model is requested it will be downloaded, which can take a few seconds.

View File

@ -0,0 +1,5 @@
services:
web:
build: .
ports:
- "11345:11345"

View File

@ -0,0 +1,27 @@
from fastembed import TextEmbedding
from fastapi import FastAPI
from pydantic import BaseModel
models = {}
app = FastAPI()
class EmbeddingRequest(BaseModel):
model: str
input: str
@app.post("/v1/embeddings")
def embeddings(request: EmbeddingRequest):
model = models.get(request.model) or TextEmbedding(request.model)
models[request.model] = model
embeddings = next(model.embed(request.input)).tolist()
return {"data": [{"embedding": embeddings}]}
@app.get("/health")
def health():
return {"status": "ok"}
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=11345)

View File

@ -0,0 +1,4 @@
fastapi==0.111.0
fastembed==0.2.7
pydantic==1.10.15
uvicorn==0.29.0

View File

@ -31,8 +31,7 @@ test "scheduled activities with jobs when ScheduledActivity enabled" do
{:ok, sa1} = ScheduledActivity.create(user, attrs) {:ok, sa1} = ScheduledActivity.create(user, attrs)
{:ok, sa2} = ScheduledActivity.create(user, attrs) {:ok, sa2} = ScheduledActivity.create(user, attrs)
jobs = jobs = Repo.all(from(j in Oban.Job, where: j.queue == "federator_outgoing", select: j.args))
Repo.all(from(j in Oban.Job, where: j.queue == "scheduled_activities", select: j.args))
assert jobs == [%{"activity_id" => sa1.id}, %{"activity_id" => sa2.id}] assert jobs == [%{"activity_id" => sa1.id}, %{"activity_id" => sa2.id}]
end end

View File

@ -0,0 +1,199 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Search.QdrantSearchTest do
use Pleroma.DataCase, async: true
use Oban.Testing, repo: Pleroma.Repo
import Pleroma.Factory
import Mox
alias Pleroma.Search.QdrantSearch
alias Pleroma.UnstubbedConfigMock, as: Config
alias Pleroma.Web.CommonAPI
alias Pleroma.Workers.SearchIndexingWorker
describe "Qdrant search" do
test "returns the correct healthcheck endpoints" do
# No openai healthcheck URL
Config
|> expect(:get, 2, fn
[Pleroma.Search.QdrantSearch, key], nil ->
%{qdrant_url: "https://qdrant.url"}[key]
end)
[health_endpoint] = QdrantSearch.healthcheck_endpoints()
assert "https://qdrant.url/healthz" == health_endpoint
# Set openai healthcheck URL
Config
|> expect(:get, 2, fn
[Pleroma.Search.QdrantSearch, key], nil ->
%{qdrant_url: "https://qdrant.url", openai_healthcheck_url: "https://openai.url/health"}[
key
]
end)
[_, health_endpoint] = QdrantSearch.healthcheck_endpoints()
assert "https://openai.url/health" == health_endpoint
end
test "searches for a term by encoding it and sending it to qdrant" do
user = insert(:user)
{:ok, activity} =
CommonAPI.post(user, %{
status: "guys i just don't wanna leave the swamp",
visibility: "public"
})
Config
|> expect(:get, 3, fn
[Pleroma.Search, :module], nil ->
QdrantSearch
[Pleroma.Search.QdrantSearch, key], nil ->
%{
openai_model: "a_model",
openai_url: "https://openai.url",
qdrant_url: "https://qdrant.url"
}[key]
end)
Tesla.Mock.mock(fn
%{url: "https://openai.url/v1/embeddings", method: :post} ->
Tesla.Mock.json(%{
data: [%{embedding: [1, 2, 3]}]
})
%{url: "https://qdrant.url/collections/posts/points/search", method: :post, body: body} ->
data = Jason.decode!(body)
refute data["filter"]
Tesla.Mock.json(%{
result: [%{"id" => activity.id |> FlakeId.from_string() |> Ecto.UUID.cast!()}]
})
end)
results = QdrantSearch.search(nil, "guys i just don't wanna leave the swamp", %{})
assert results == [activity]
end
test "for a given actor, ask for only relevant matches" do
user = insert(:user)
{:ok, activity} =
CommonAPI.post(user, %{
status: "guys i just don't wanna leave the swamp",
visibility: "public"
})
Config
|> expect(:get, 3, fn
[Pleroma.Search, :module], nil ->
QdrantSearch
[Pleroma.Search.QdrantSearch, key], nil ->
%{
openai_model: "a_model",
openai_url: "https://openai.url",
qdrant_url: "https://qdrant.url"
}[key]
end)
Tesla.Mock.mock(fn
%{url: "https://openai.url/v1/embeddings", method: :post} ->
Tesla.Mock.json(%{
data: [%{embedding: [1, 2, 3]}]
})
%{url: "https://qdrant.url/collections/posts/points/search", method: :post, body: body} ->
data = Jason.decode!(body)
assert data["filter"] == %{
"must" => [%{"key" => "actor", "match" => %{"value" => user.ap_id}}]
}
Tesla.Mock.json(%{
result: [%{"id" => activity.id |> FlakeId.from_string() |> Ecto.UUID.cast!()}]
})
end)
results =
QdrantSearch.search(nil, "guys i just don't wanna leave the swamp", %{author: user})
assert results == [activity]
end
test "indexes a public post on creation, deletes from the index on deletion" do
user = insert(:user)
Tesla.Mock.mock(fn
%{method: :post, url: "https://openai.url/v1/embeddings"} ->
send(self(), "posted_to_openai")
Tesla.Mock.json(%{
data: [%{embedding: [1, 2, 3]}]
})
%{method: :put, url: "https://qdrant.url/collections/posts/points", body: body} ->
send(self(), "posted_to_qdrant")
data = Jason.decode!(body)
%{"points" => [%{"vector" => vector, "payload" => payload}]} = data
assert vector == [1, 2, 3]
assert payload["actor"]
assert payload["published_at"]
Tesla.Mock.json("ok")
%{method: :post, url: "https://qdrant.url/collections/posts/points/delete"} ->
send(self(), "deleted_from_qdrant")
Tesla.Mock.json("ok")
end)
Config
|> expect(:get, 6, fn
[Pleroma.Search, :module], nil ->
QdrantSearch
[Pleroma.Search.QdrantSearch, key], nil ->
%{
openai_model: "a_model",
openai_url: "https://openai.url",
qdrant_url: "https://qdrant.url"
}[key]
end)
{:ok, activity} =
CommonAPI.post(user, %{
status: "guys i just don't wanna leave the swamp",
visibility: "public"
})
args = %{"op" => "add_to_index", "activity" => activity.id}
assert_enqueued(
worker: SearchIndexingWorker,
args: args
)
assert :ok = perform_job(SearchIndexingWorker, args)
assert_received("posted_to_openai")
assert_received("posted_to_qdrant")
{:ok, _} = CommonAPI.delete(activity.id, user)
delete_args = %{"op" => "remove_from_index", "object" => activity.object.id}
assert_enqueued(worker: SearchIndexingWorker, args: delete_args)
assert :ok = perform_job(SearchIndexingWorker, delete_args)
assert_received("deleted_from_qdrant")
end
end
end

View File

@ -0,0 +1,65 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.AntiMentionSpamPolicyTest do
use Pleroma.DataCase
import Pleroma.Factory
alias Pleroma.Web.ActivityPub.MRF.AntiMentionSpamPolicy
test "it allows posts without mentions" do
user = insert(:user, local: false)
assert user.note_count == 0
message = %{
"type" => "Create",
"actor" => user.ap_id
}
{:ok, _message} = AntiMentionSpamPolicy.filter(message)
end
test "it allows posts from users with followers, posts, and age" do
user =
insert(:user,
local: false,
follower_count: 1,
note_count: 1,
inserted_at: ~N[1970-01-01 00:00:00]
)
message = %{
"type" => "Create",
"actor" => user.ap_id
}
{:ok, _message} = AntiMentionSpamPolicy.filter(message)
end
test "it allows posts from local users" do
user = insert(:user, local: true)
message = %{
"type" => "Create",
"actor" => user.ap_id
}
{:ok, _message} = AntiMentionSpamPolicy.filter(message)
end
test "it rejects posts with mentions from users without followers" do
user = insert(:user, local: false, follower_count: 0)
message = %{
"type" => "Create",
"actor" => user.ap_id,
"object" => %{
"to" => ["https://pleroma.soykaf.com/users/1"],
"cc" => ["https://pleroma.soykaf.com/users/1"],
"actor" => user.ap_id
}
}
{:reject, _message} = AntiMentionSpamPolicy.filter(message)
end
end

View File

@ -27,19 +27,22 @@ test "fails without url" do
end end
test "works with honkerific attachments" do test "works with honkerific attachments" do
attachment = %{ honk = %{
"mediaType" => "", "mediaType" => "",
"name" => "", "summary" => "Select your spirit chonk",
"summary" => "298p3RG7j27tfsZ9RQ.jpg", "name" => "298p3RG7j27tfsZ9RQ.jpg",
"type" => "Document", "type" => "Document",
"url" => "https://honk.tedunangst.com/d/298p3RG7j27tfsZ9RQ.jpg" "url" => "https://honk.tedunangst.com/d/298p3RG7j27tfsZ9RQ.jpg"
} }
assert {:ok, attachment} = assert {:ok, attachment} =
AttachmentValidator.cast_and_validate(attachment) honk
|> AttachmentValidator.cast_and_validate()
|> Ecto.Changeset.apply_action(:insert) |> Ecto.Changeset.apply_action(:insert)
assert attachment.mediaType == "application/octet-stream" assert attachment.mediaType == "application/octet-stream"
assert attachment.summary == "Select your spirit chonk"
assert attachment.name == "298p3RG7j27tfsZ9RQ.jpg"
end end
test "works with an unknown but valid mime type" do test "works with an unknown but valid mime type" do

View File

@ -3,6 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.MastodonAPI.ScheduledActivityControllerTest do defmodule Pleroma.Web.MastodonAPI.ScheduledActivityControllerTest do
use Oban.Testing, repo: Pleroma.Repo
use Pleroma.Web.ConnCase, async: true use Pleroma.Web.ConnCase, async: true
alias Pleroma.Repo alias Pleroma.Repo
@ -78,7 +79,7 @@ test "updates a scheduled activity" do
} }
) )
job = Repo.one(from(j in Oban.Job, where: j.queue == "scheduled_activities")) job = Repo.one(from(j in Oban.Job, where: j.queue == "federator_outgoing"))
assert job.args == %{"activity_id" => scheduled_activity.id} assert job.args == %{"activity_id" => scheduled_activity.id}
assert DateTime.truncate(job.scheduled_at, :second) == to_datetime(scheduled_at) assert DateTime.truncate(job.scheduled_at, :second) == to_datetime(scheduled_at)
@ -124,9 +125,11 @@ test "deletes a scheduled activity" do
} }
) )
job = Repo.one(from(j in Oban.Job, where: j.queue == "scheduled_activities")) assert_enqueued(
worker: Pleroma.Workers.ScheduledActivityWorker,
assert job.args == %{"activity_id" => scheduled_activity.id} args: %{"activity_id" => scheduled_activity.id},
queue: :federator_outgoing
)
res_conn = res_conn =
conn conn
@ -135,7 +138,11 @@ test "deletes a scheduled activity" do
assert %{} = json_response_and_validate_schema(res_conn, 200) assert %{} = json_response_and_validate_schema(res_conn, 200)
refute Repo.get(ScheduledActivity, scheduled_activity.id) refute Repo.get(ScheduledActivity, scheduled_activity.id)
refute Repo.get(Oban.Job, job.id)
refute_enqueued(
worker: Pleroma.Workers.ScheduledActivityWorker,
args: %{"activity_id" => scheduled_activity.id}
)
res_conn = res_conn =
conn conn

View File

@ -591,45 +591,78 @@ test "create mentions from the 'tag' field" do
assert mention.url == recipient.ap_id assert mention.url == recipient.ap_id
end end
test "attachments" do describe "attachments" do
object = %{ test "Complete Mastodon style" do
"type" => "Image", object = %{
"url" => [ "type" => "Image",
%{ "url" => [
"mediaType" => "image/png", %{
"href" => "someurl", "mediaType" => "image/png",
"width" => 200, "href" => "someurl",
"height" => 100 "width" => 200,
} "height" => 100
], }
"blurhash" => "UJJ8X[xYW,%Jtq%NNFbXB5j]IVM|9GV=WHRn", ],
"uuid" => 6 "blurhash" => "UJJ8X[xYW,%Jtq%NNFbXB5j]IVM|9GV=WHRn",
} "uuid" => 6
}
expected = %{ expected = %{
id: "1638338801", id: "1638338801",
type: "image", type: "image",
url: "someurl", url: "someurl",
remote_url: "someurl", remote_url: "someurl",
preview_url: "someurl", preview_url: "someurl",
text_url: "someurl", text_url: "someurl",
description: nil, description: nil,
pleroma: %{mime_type: "image/png"}, pleroma: %{mime_type: "image/png"},
meta: %{original: %{width: 200, height: 100, aspect: 2}}, meta: %{original: %{width: 200, height: 100, aspect: 2}},
blurhash: "UJJ8X[xYW,%Jtq%NNFbXB5j]IVM|9GV=WHRn" blurhash: "UJJ8X[xYW,%Jtq%NNFbXB5j]IVM|9GV=WHRn"
} }
api_spec = Pleroma.Web.ApiSpec.spec() api_spec = Pleroma.Web.ApiSpec.spec()
assert expected == StatusView.render("attachment.json", %{attachment: object}) assert expected == StatusView.render("attachment.json", %{attachment: object})
assert_schema(expected, "Attachment", api_spec) assert_schema(expected, "Attachment", api_spec)
# If theres a "id", use that instead of the generated one # If theres a "id", use that instead of the generated one
object = Map.put(object, "id", 2) object = Map.put(object, "id", 2)
result = StatusView.render("attachment.json", %{attachment: object}) result = StatusView.render("attachment.json", %{attachment: object})
assert %{id: "2"} = result assert %{id: "2"} = result
assert_schema(result, "Attachment", api_spec) assert_schema(result, "Attachment", api_spec)
end
test "Honkerific" do
object = %{
"type" => "Image",
"url" => [
%{
"mediaType" => "image/png",
"href" => "someurl"
}
],
"name" => "fool.jpeg",
"summary" => "they have played us for absolute fools."
}
expected = %{
blurhash: nil,
description: "they have played us for absolute fools.",
id: "1638338801",
pleroma: %{mime_type: "image/png", name: "fool.jpeg"},
preview_url: "someurl",
remote_url: "someurl",
text_url: "someurl",
type: "image",
url: "someurl"
}
api_spec = Pleroma.Web.ApiSpec.spec()
assert expected == StatusView.render("attachment.json", %{attachment: object})
assert_schema(expected, "Attachment", api_spec)
end
end end
test "put the url advertised in the Activity in to the url attribute" do test "put the url advertised in the Activity in to the url attribute" do

View File

@ -3,14 +3,52 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.Plugs.HTTPSecurityPlugTest do defmodule Pleroma.Web.Plugs.HTTPSecurityPlugTest do
use Pleroma.Web.ConnCase use Pleroma.Web.ConnCase, async: true
alias Plug.Conn alias Plug.Conn
describe "http security enabled" do import Mox
setup do: clear_config([:http_security, :enabled], true)
test "it sends CSP headers when enabled", %{conn: conn} do setup do
base_config = Pleroma.Config.get([:http_security])
%{base_config: base_config}
end
defp mock_config(config, additional \\ %{}) do
Pleroma.StaticStubbedConfigMock
|> stub(:get, fn
[:http_security, key] -> config[key]
key -> additional[key]
end)
end
describe "http security enabled" do
setup %{base_config: base_config} do
%{base_config: Keyword.put(base_config, :enabled, true)}
end
test "it does not contain unsafe-eval", %{conn: conn, base_config: base_config} do
mock_config(base_config)
conn = get(conn, "/api/v1/instance")
[header] = Conn.get_resp_header(conn, "content-security-policy")
refute header =~ ~r/unsafe-eval/
end
test "with allow_unsafe_eval set, it does contain it", %{conn: conn, base_config: base_config} do
base_config =
base_config
|> Keyword.put(:allow_unsafe_eval, true)
mock_config(base_config)
conn = get(conn, "/api/v1/instance")
[header] = Conn.get_resp_header(conn, "content-security-policy")
assert header =~ ~r/unsafe-eval/
end
test "it sends CSP headers when enabled", %{conn: conn, base_config: base_config} do
mock_config(base_config)
conn = get(conn, "/api/v1/instance") conn = get(conn, "/api/v1/instance")
refute Conn.get_resp_header(conn, "x-xss-protection") == [] refute Conn.get_resp_header(conn, "x-xss-protection") == []
@ -22,8 +60,10 @@ test "it sends CSP headers when enabled", %{conn: conn} do
refute Conn.get_resp_header(conn, "content-security-policy") == [] refute Conn.get_resp_header(conn, "content-security-policy") == []
end end
test "it sends STS headers when enabled", %{conn: conn} do test "it sends STS headers when enabled", %{conn: conn, base_config: base_config} do
clear_config([:http_security, :sts], true) base_config
|> Keyword.put(:sts, true)
|> mock_config()
conn = get(conn, "/api/v1/instance") conn = get(conn, "/api/v1/instance")
@ -31,8 +71,10 @@ test "it sends STS headers when enabled", %{conn: conn} do
refute Conn.get_resp_header(conn, "expect-ct") == [] refute Conn.get_resp_header(conn, "expect-ct") == []
end end
test "it does not send STS headers when disabled", %{conn: conn} do test "it does not send STS headers when disabled", %{conn: conn, base_config: base_config} do
clear_config([:http_security, :sts], false) base_config
|> Keyword.put(:sts, false)
|> mock_config()
conn = get(conn, "/api/v1/instance") conn = get(conn, "/api/v1/instance")
@ -40,19 +82,30 @@ test "it does not send STS headers when disabled", %{conn: conn} do
assert Conn.get_resp_header(conn, "expect-ct") == [] assert Conn.get_resp_header(conn, "expect-ct") == []
end end
test "referrer-policy header reflects configured value", %{conn: conn} do test "referrer-policy header reflects configured value", %{
resp = get(conn, "/api/v1/instance") conn: conn,
base_config: base_config
} do
mock_config(base_config)
resp = get(conn, "/api/v1/instance")
assert Conn.get_resp_header(resp, "referrer-policy") == ["same-origin"] assert Conn.get_resp_header(resp, "referrer-policy") == ["same-origin"]
clear_config([:http_security, :referrer_policy], "no-referrer") base_config
|> Keyword.put(:referrer_policy, "no-referrer")
|> mock_config
resp = get(conn, "/api/v1/instance") resp = get(conn, "/api/v1/instance")
assert Conn.get_resp_header(resp, "referrer-policy") == ["no-referrer"] assert Conn.get_resp_header(resp, "referrer-policy") == ["no-referrer"]
end end
test "it sends `report-to` & `report-uri` CSP response headers", %{conn: conn} do test "it sends `report-to` & `report-uri` CSP response headers", %{
conn: conn,
base_config: base_config
} do
mock_config(base_config)
conn = get(conn, "/api/v1/instance") conn = get(conn, "/api/v1/instance")
[csp] = Conn.get_resp_header(conn, "content-security-policy") [csp] = Conn.get_resp_header(conn, "content-security-policy")
@ -65,7 +118,11 @@ test "it sends `report-to` & `report-uri` CSP response headers", %{conn: conn} d
"{\"endpoints\":[{\"url\":\"https://endpoint.com\"}],\"group\":\"csp-endpoint\",\"max-age\":10886400}" "{\"endpoints\":[{\"url\":\"https://endpoint.com\"}],\"group\":\"csp-endpoint\",\"max-age\":10886400}"
end end
test "default values for img-src and media-src with disabled media proxy", %{conn: conn} do test "default values for img-src and media-src with disabled media proxy", %{
conn: conn,
base_config: base_config
} do
mock_config(base_config)
conn = get(conn, "/api/v1/instance") conn = get(conn, "/api/v1/instance")
[csp] = Conn.get_resp_header(conn, "content-security-policy") [csp] = Conn.get_resp_header(conn, "content-security-policy")
@ -73,60 +130,129 @@ test "default values for img-src and media-src with disabled media proxy", %{con
assert csp =~ "img-src 'self' data: blob: https:;" assert csp =~ "img-src 'self' data: blob: https:;"
end end
test "it sets the Service-Worker-Allowed header", %{conn: conn} do test "it sets the Service-Worker-Allowed header", %{conn: conn, base_config: base_config} do
clear_config([:http_security, :enabled], true) base_config
clear_config([:frontends, :primary], %{"name" => "fedi-fe", "ref" => "develop"}) |> Keyword.put(:enabled, true)
clear_config([:frontends, :available], %{ additional_config =
"fedi-fe" => %{ %{}
"name" => "fedi-fe", |> Map.put([:frontends, :primary], %{"name" => "fedi-fe", "ref" => "develop"})
"custom-http-headers" => [{"service-worker-allowed", "/"}] |> Map.put(
} [:frontends, :available],
}) %{
"fedi-fe" => %{
"name" => "fedi-fe",
"custom-http-headers" => [{"service-worker-allowed", "/"}]
}
}
)
mock_config(base_config, additional_config)
conn = get(conn, "/api/v1/instance") conn = get(conn, "/api/v1/instance")
assert Conn.get_resp_header(conn, "service-worker-allowed") == ["/"] assert Conn.get_resp_header(conn, "service-worker-allowed") == ["/"]
end end
end end
describe "img-src and media-src" do describe "img-src and media-src" do
setup do setup %{base_config: base_config} do
clear_config([:http_security, :enabled], true) base_config =
clear_config([:media_proxy, :enabled], true) base_config
clear_config([:media_proxy, :proxy_opts, :redirect_on_failure], false) |> Keyword.put(:enabled, true)
additional_config =
%{}
|> Map.put([:media_proxy, :enabled], true)
|> Map.put([:media_proxy, :proxy_opts, :redirect_on_failure], false)
|> Map.put([:media_proxy, :whitelist], [])
%{base_config: base_config, additional_config: additional_config}
end end
test "media_proxy with base_url", %{conn: conn} do test "media_proxy with base_url", %{
conn: conn,
base_config: base_config,
additional_config: additional_config
} do
url = "https://example.com" url = "https://example.com"
clear_config([:media_proxy, :base_url], url)
additional_config =
additional_config
|> Map.put([:media_proxy, :base_url], url)
mock_config(base_config, additional_config)
assert_media_img_src(conn, url) assert_media_img_src(conn, url)
end end
test "upload with base url", %{conn: conn} do test "upload with base url", %{
conn: conn,
base_config: base_config,
additional_config: additional_config
} do
url = "https://example2.com" url = "https://example2.com"
clear_config([Pleroma.Upload, :base_url], url)
additional_config =
additional_config
|> Map.put([Pleroma.Upload, :base_url], url)
mock_config(base_config, additional_config)
assert_media_img_src(conn, url) assert_media_img_src(conn, url)
end end
test "with S3 public endpoint", %{conn: conn} do test "with S3 public endpoint", %{
conn: conn,
base_config: base_config,
additional_config: additional_config
} do
url = "https://example3.com" url = "https://example3.com"
clear_config([Pleroma.Uploaders.S3, :public_endpoint], url)
additional_config =
additional_config
|> Map.put([Pleroma.Uploaders.S3, :public_endpoint], url)
mock_config(base_config, additional_config)
assert_media_img_src(conn, url) assert_media_img_src(conn, url)
end end
test "with captcha endpoint", %{conn: conn} do test "with captcha endpoint", %{
clear_config([Pleroma.Captcha.Mock, :endpoint], "https://captcha.com") conn: conn,
base_config: base_config,
additional_config: additional_config
} do
additional_config =
additional_config
|> Map.put([Pleroma.Captcha.Mock, :endpoint], "https://captcha.com")
|> Map.put([Pleroma.Captcha, :method], Pleroma.Captcha.Mock)
mock_config(base_config, additional_config)
assert_media_img_src(conn, "https://captcha.com") assert_media_img_src(conn, "https://captcha.com")
end end
test "with media_proxy whitelist", %{conn: conn} do test "with media_proxy whitelist", %{
clear_config([:media_proxy, :whitelist], ["https://example6.com", "https://example7.com"]) conn: conn,
base_config: base_config,
additional_config: additional_config
} do
additional_config =
additional_config
|> Map.put([:media_proxy, :whitelist], ["https://example6.com", "https://example7.com"])
mock_config(base_config, additional_config)
assert_media_img_src(conn, "https://example7.com https://example6.com") assert_media_img_src(conn, "https://example7.com https://example6.com")
end end
# TODO: delete after removing support bare domains for media proxy whitelist # TODO: delete after removing support bare domains for media proxy whitelist
test "with media_proxy bare domains whitelist (deprecated)", %{conn: conn} do test "with media_proxy bare domains whitelist (deprecated)", %{
clear_config([:media_proxy, :whitelist], ["example4.com", "example5.com"]) conn: conn,
base_config: base_config,
additional_config: additional_config
} do
additional_config =
additional_config
|> Map.put([:media_proxy, :whitelist], ["example4.com", "example5.com"])
mock_config(base_config, additional_config)
assert_media_img_src(conn, "example5.com example4.com") assert_media_img_src(conn, "example5.com example4.com")
end end
end end
@ -138,8 +264,10 @@ defp assert_media_img_src(conn, url) do
assert csp =~ "img-src 'self' data: blob: #{url};" assert csp =~ "img-src 'self' data: blob: #{url};"
end end
test "it does not send CSP headers when disabled", %{conn: conn} do test "it does not send CSP headers when disabled", %{conn: conn, base_config: base_config} do
clear_config([:http_security, :enabled], false) base_config
|> Keyword.put(:enabled, false)
|> mock_config
conn = get(conn, "/api/v1/instance") conn = get(conn, "/api/v1/instance")