Merge branch 'develop' of git.pleroma.social:pleroma/pleroma into auth-fetch-exception
This commit is contained in:
commit
d3e85da0fd
|
@ -1,8 +1,8 @@
|
|||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base
|
||||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp-24
|
||||
|
||||
variables: &global_variables
|
||||
# Only used for the release
|
||||
ELIXIR_VER: 1.12.3
|
||||
ELIXIR_VER: 1.13.4
|
||||
POSTGRES_DB: pleroma_test
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
|
@ -72,7 +72,7 @@ check-changelog:
|
|||
tags:
|
||||
- amd64
|
||||
|
||||
build-1.12.3:
|
||||
build-1.13.4:
|
||||
extends:
|
||||
- .build_changes_policy
|
||||
- .using-ci-base
|
||||
|
@ -85,7 +85,7 @@ build-1.15.7-otp-25:
|
|||
- .build_changes_policy
|
||||
- .using-ci-base
|
||||
stage: build
|
||||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15
|
||||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25
|
||||
allow_failure: true
|
||||
script:
|
||||
- mix compile --force
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
ARG ELIXIR_IMG=hexpm/elixir
|
||||
ARG ELIXIR_VER=1.12.3
|
||||
ARG ERLANG_VER=24.2.1
|
||||
ARG ALPINE_VER=3.17.0
|
||||
ARG ELIXIR_VER=1.13.4
|
||||
ARG ERLANG_VER=24.3.4.15
|
||||
ARG ALPINE_VER=3.17.5
|
||||
|
||||
FROM ${ELIXIR_IMG}:${ELIXIR_VER}-erlang-${ERLANG_VER}-alpine-${ALPINE_VER} as build
|
||||
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
Uploader: Add support for uploading attachments using IPFS
|
|
@ -0,0 +1 @@
|
|||
Add NSFW-detecting MRF
|
|
@ -0,0 +1 @@
|
|||
Elixir 1.13 is the minimum required version.
|
|
@ -0,0 +1 @@
|
|||
Fix webfinger spoofing.
|
|
@ -0,0 +1 @@
|
|||
pleroma_ctl: Use realpath(1) instead of readlink(1)
|
|
@ -0,0 +1 @@
|
|||
A 422 error is returned when attempting to reply to a deleted status
|
|
@ -0,0 +1 @@
|
|||
Parsing of RichMedia TTLs for Amazon URLs when query parameters are nil
|
|
@ -0,0 +1 @@
|
|||
Monitoring of search backend health to control the processing of jobs in the search indexing Oban queue
|
|
@ -0,0 +1 @@
|
|||
Add "status" notification type
|
|
@ -0,0 +1 @@
|
|||
Fix validate_webfinger when running a different domain for Webfinger
|
|
@ -0,0 +1,8 @@
|
|||
FROM elixir:1.13.4-otp-24
|
||||
|
||||
# Single RUN statement, otherwise intermediate images are created
|
||||
# https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#run
|
||||
RUN apt-get update &&\
|
||||
apt-get install -y libmagic-dev cmake libimage-exiftool-perl ffmpeg &&\
|
||||
mix local.hex --force &&\
|
||||
mix local.rebar --force
|
|
@ -0,0 +1 @@
|
|||
docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp-24 --push .
|
|
@ -1 +1 @@
|
|||
docker buildx build --platform linux/amd64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25 --push .
|
||||
docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25 --push .
|
||||
|
|
|
@ -82,6 +82,10 @@
|
|||
# region: "us-east-1", # may be required for Amazon AWS
|
||||
scheme: "https://"
|
||||
|
||||
config :pleroma, Pleroma.Uploaders.IPFS,
|
||||
post_gateway_url: nil,
|
||||
get_gateway_url: nil
|
||||
|
||||
config :pleroma, :emoji,
|
||||
shortcode_globs: ["/emoji/custom/**/*.png"],
|
||||
pack_extensions: [".png", ".gif"],
|
||||
|
@ -411,6 +415,13 @@
|
|||
threshold: 604_800,
|
||||
actions: [:delist, :strip_followers]
|
||||
|
||||
config :pleroma, :mrf_nsfw_api,
|
||||
url: "http://127.0.0.1:5000/",
|
||||
threshold: 0.7,
|
||||
mark_sensitive: true,
|
||||
unlist: false,
|
||||
reject: false
|
||||
|
||||
config :pleroma, :mrf_follow_bot, follower_nickname: nil
|
||||
|
||||
config :pleroma, :mrf_inline_quote, template: "<bdi>RT:</bdi> {url}"
|
||||
|
@ -579,7 +590,7 @@
|
|||
attachments_cleanup: 1,
|
||||
new_users_digest: 1,
|
||||
mute_expire: 5,
|
||||
search_indexing: 10,
|
||||
search_indexing: [limit: 10, paused: true],
|
||||
rich_media_expiration: 2
|
||||
],
|
||||
plugins: [Oban.Plugins.Pruner],
|
||||
|
|
|
@ -136,6 +136,31 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
group: :pleroma,
|
||||
key: Pleroma.Uploaders.IPFS,
|
||||
type: :group,
|
||||
description: "IPFS uploader-related settings",
|
||||
children: [
|
||||
%{
|
||||
key: :get_gateway_url,
|
||||
type: :string,
|
||||
description: "GET Gateway URL",
|
||||
suggestions: [
|
||||
"https://ipfs.mydomain.com/{CID}",
|
||||
"https://{CID}.ipfs.mydomain.com/"
|
||||
]
|
||||
},
|
||||
%{
|
||||
key: :post_gateway_url,
|
||||
type: :string,
|
||||
description: "POST Gateway URL",
|
||||
suggestions: [
|
||||
"http://localhost:5001/"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
group: :pleroma,
|
||||
key: Pleroma.Uploaders.S3,
|
||||
|
|
|
@ -153,6 +153,7 @@
|
|||
config :pleroma, Pleroma.Upload, config_impl: Pleroma.UnstubbedConfigMock
|
||||
config :pleroma, Pleroma.ScheduledActivity, config_impl: Pleroma.UnstubbedConfigMock
|
||||
config :pleroma, Pleroma.Web.RichMedia.Helpers, config_impl: Pleroma.StaticStubbedConfigMock
|
||||
config :pleroma, Pleroma.Uploaders.IPFS, config_impl: Pleroma.UnstubbedConfigMock
|
||||
|
||||
peer_module =
|
||||
if String.to_integer(System.otp_release()) >= 25 do
|
||||
|
|
|
@ -662,6 +662,19 @@ config :ex_aws, :s3,
|
|||
host: "s3.eu-central-1.amazonaws.com"
|
||||
```
|
||||
|
||||
#### Pleroma.Uploaders.IPFS
|
||||
|
||||
* `post_gateway_url`: URL with port of POST Gateway (unauthenticated)
|
||||
* `get_gateway_url`: URL of public GET Gateway
|
||||
|
||||
Example:
|
||||
|
||||
```elixir
|
||||
config :pleroma, Pleroma.Uploaders.IPFS,
|
||||
post_gateway_url: "http://localhost:5001",
|
||||
get_gateway_url: "http://{CID}.ipfs.mydomain.com"
|
||||
```
|
||||
|
||||
### Upload filters
|
||||
|
||||
#### Pleroma.Upload.Filter.AnonymizeFilename
|
||||
|
|
|
@ -295,9 +295,7 @@ See [Admin-API](admin_api.md)
|
|||
"id": "9umDrYheeY451cQnEe",
|
||||
"name": "Read later",
|
||||
"emoji": "🕓",
|
||||
"source": {
|
||||
"emoji": "🕓"
|
||||
}
|
||||
"emoji_url": null
|
||||
}
|
||||
]
|
||||
```
|
||||
|
|
|
@ -14,7 +14,7 @@ Note: This article is potentially outdated because at this time we may not have
|
|||
|
||||
- PostgreSQL 11.0以上 (Ubuntu16.04では9.5しか提供されていないので,[](https://www.postgresql.org/download/linux/ubuntu/)こちらから新しいバージョンを入手してください)
|
||||
- `postgresql-contrib` 11.0以上 (同上)
|
||||
- Elixir 1.8 以上 ([Debianのリポジトリからインストールしないこと!!! ここからインストールすること!](https://elixir-lang.org/install.html#unix-and-unix-like)。または [asdf](https://github.com/asdf-vm/asdf) をpleromaユーザーでインストールしてください)
|
||||
- Elixir 1.13 以上 ([Debianのリポジトリからインストールしないこと!!! ここからインストールすること!](https://elixir-lang.org/install.html#unix-and-unix-like)。または [asdf](https://github.com/asdf-vm/asdf) をpleromaユーザーでインストールしてください)
|
||||
- `erlang-dev`
|
||||
- `erlang-nox`
|
||||
- `git`
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
## Required dependencies
|
||||
|
||||
* PostgreSQL >=11.0
|
||||
* Elixir >=1.11.0 <1.15
|
||||
* Elixir >=1.13.0 <1.15
|
||||
* Erlang OTP >=22.2.0 (supported: <27)
|
||||
* git
|
||||
* file / libmagic
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
[Unit]
|
||||
Description=NSFW API
|
||||
After=docker.service
|
||||
Requires=docker.service
|
||||
|
||||
[Service]
|
||||
TimeoutStartSec=0
|
||||
Restart=always
|
||||
ExecStartPre=-/usr/bin/docker stop %n
|
||||
ExecStartPre=-/usr/bin/docker rm %n
|
||||
ExecStartPre=/usr/bin/docker pull eugencepoi/nsfw_api:latest
|
||||
ExecStart=/usr/bin/docker run --rm -p 127.0.0.1:5000:5000/tcp --env PORT=5000 --name %n eugencepoi/nsfw_api:latest
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -109,7 +109,8 @@ def start(_type, _args) do
|
|||
streamer_registry() ++
|
||||
background_migrators() ++
|
||||
shout_child(shout_enabled?()) ++
|
||||
[Pleroma.Gopher.Server]
|
||||
[Pleroma.Gopher.Server] ++
|
||||
[Pleroma.Search.Healthcheck]
|
||||
|
||||
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
|
||||
# for other strategies and supported options
|
||||
|
@ -162,7 +163,8 @@ defp cachex_children do
|
|||
expiration: chat_message_id_idempotency_key_expiration(),
|
||||
limit: 500_000
|
||||
),
|
||||
build_cachex("rel_me", limit: 2500)
|
||||
build_cachex("rel_me", limit: 2500),
|
||||
build_cachex("host_meta", default_ttl: :timer.minutes(120), limit: 5000)
|
||||
]
|
||||
end
|
||||
|
||||
|
|
|
@ -73,6 +73,7 @@ def unread_notifications_count(%User{id: user_id}) do
|
|||
pleroma:report
|
||||
reblog
|
||||
poll
|
||||
status
|
||||
}
|
||||
|
||||
def changeset(%Notification{} = notification, attrs) do
|
||||
|
@ -375,10 +376,15 @@ def create_notifications(_), do: {:ok, []}
|
|||
defp do_create_notifications(%Activity{} = activity) do
|
||||
enabled_receivers = get_notified_from_activity(activity)
|
||||
|
||||
enabled_subscribers = get_notified_subscribers_from_activity(activity)
|
||||
|
||||
notifications =
|
||||
Enum.map(enabled_receivers, fn user ->
|
||||
create_notification(activity, user)
|
||||
end)
|
||||
(Enum.map(enabled_receivers, fn user ->
|
||||
create_notification(activity, user)
|
||||
end) ++
|
||||
Enum.map(enabled_subscribers -- enabled_receivers, fn user ->
|
||||
create_notification(activity, user, type: "status")
|
||||
end))
|
||||
|> Enum.reject(&is_nil/1)
|
||||
|
||||
{:ok, notifications}
|
||||
|
@ -511,7 +517,25 @@ def get_notified_from_activity(%Activity{data: %{"type" => type}} = activity, lo
|
|||
Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
|
||||
end
|
||||
|
||||
def get_notified_from_activity(_, _local_only), do: {[], []}
|
||||
def get_notified_from_activity(_, _local_only), do: []
|
||||
|
||||
def get_notified_subscribers_from_activity(activity, local_only \\ true)
|
||||
|
||||
def get_notified_subscribers_from_activity(
|
||||
%Activity{data: %{"type" => "Create"}} = activity,
|
||||
local_only
|
||||
) do
|
||||
notification_enabled_ap_ids =
|
||||
[]
|
||||
|> Utils.maybe_notify_subscribers(activity)
|
||||
|
||||
potential_receivers =
|
||||
User.get_users_from_set(notification_enabled_ap_ids, local_only: local_only)
|
||||
|
||||
Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
|
||||
end
|
||||
|
||||
def get_notified_subscribers_from_activity(_, _), do: []
|
||||
|
||||
# For some activities, only notify the author of the object
|
||||
def get_potential_receiver_ap_ids(%{data: %{"type" => type, "object" => object_id}})
|
||||
|
@ -554,7 +578,6 @@ def get_potential_receiver_ap_ids(activity) do
|
|||
[]
|
||||
|> Utils.maybe_notify_to_recipients(activity)
|
||||
|> Utils.maybe_notify_mentioned_recipients(activity)
|
||||
|> Utils.maybe_notify_subscribers(activity)
|
||||
|> Utils.maybe_notify_followers(activity)
|
||||
|> Enum.uniq()
|
||||
end
|
||||
|
|
|
@ -10,8 +10,12 @@ def remove_from_index(%Pleroma.Object{id: object_id}) do
|
|||
end
|
||||
|
||||
def search(query, options) do
|
||||
search_module = Pleroma.Config.get([Pleroma.Search, :module], Pleroma.Activity)
|
||||
|
||||
search_module = Pleroma.Config.get([Pleroma.Search, :module])
|
||||
search_module.search(options[:for_user], query, options)
|
||||
end
|
||||
|
||||
def healthcheck_endpoints do
|
||||
search_module = Pleroma.Config.get([Pleroma.Search, :module])
|
||||
search_module.healthcheck_endpoints
|
||||
end
|
||||
end
|
||||
|
|
|
@ -48,6 +48,9 @@ def add_to_index(_activity), do: :ok
|
|||
@impl true
|
||||
def remove_from_index(_object), do: :ok
|
||||
|
||||
@impl true
|
||||
def healthcheck_endpoints, do: nil
|
||||
|
||||
def maybe_restrict_author(query, %User{} = author) do
|
||||
Activity.Queries.by_author(query, author)
|
||||
end
|
||||
|
|
|
@ -0,0 +1,86 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
defmodule Pleroma.Search.Healthcheck do
|
||||
@doc """
|
||||
Monitors health of search backend to control processing of events based on health and availability.
|
||||
"""
|
||||
use GenServer
|
||||
require Logger
|
||||
|
||||
@queue :search_indexing
|
||||
@tick :timer.seconds(5)
|
||||
@timeout :timer.seconds(2)
|
||||
|
||||
def start_link(_) do
|
||||
GenServer.start_link(__MODULE__, [], name: __MODULE__)
|
||||
end
|
||||
|
||||
@impl true
|
||||
def init(_) do
|
||||
state = %{healthy: false}
|
||||
{:ok, state, {:continue, :start}}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_continue(:start, state) do
|
||||
tick()
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_info(:check, state) do
|
||||
urls = Pleroma.Search.healthcheck_endpoints()
|
||||
|
||||
new_state =
|
||||
if check(urls) do
|
||||
Oban.resume_queue(queue: @queue)
|
||||
Map.put(state, :healthy, true)
|
||||
else
|
||||
Oban.pause_queue(queue: @queue)
|
||||
Map.put(state, :healthy, false)
|
||||
end
|
||||
|
||||
maybe_log_state_change(state, new_state)
|
||||
|
||||
tick()
|
||||
{:noreply, new_state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_call(:state, _from, state) do
|
||||
{:reply, state, state, :hibernate}
|
||||
end
|
||||
|
||||
def state, do: GenServer.call(__MODULE__, :state)
|
||||
|
||||
def check([]), do: true
|
||||
|
||||
def check(urls) when is_list(urls) do
|
||||
Enum.all?(
|
||||
urls,
|
||||
fn url ->
|
||||
case Pleroma.HTTP.get(url, [], recv_timeout: @timeout) do
|
||||
{:ok, %{status: 200}} -> true
|
||||
_ -> false
|
||||
end
|
||||
end
|
||||
)
|
||||
end
|
||||
|
||||
def check(_), do: true
|
||||
|
||||
defp tick do
|
||||
Process.send_after(self(), :check, @tick)
|
||||
end
|
||||
|
||||
defp maybe_log_state_change(%{healthy: true}, %{healthy: false}) do
|
||||
Logger.error("Pausing Oban queue #{@queue} due to search backend healthcheck failure")
|
||||
end
|
||||
|
||||
defp maybe_log_state_change(%{healthy: false}, %{healthy: true}) do
|
||||
Logger.info("Resuming Oban queue #{@queue} due to search backend healthcheck pass")
|
||||
end
|
||||
|
||||
defp maybe_log_state_change(_, _), do: :ok
|
||||
end
|
|
@ -178,4 +178,15 @@ def add_to_index(activity) do
|
|||
def remove_from_index(object) do
|
||||
meili_delete("/indexes/objects/documents/#{object.id}")
|
||||
end
|
||||
|
||||
@impl true
|
||||
def healthcheck_endpoints do
|
||||
endpoint =
|
||||
Config.get([Pleroma.Search.Meilisearch, :url])
|
||||
|> URI.parse()
|
||||
|> Map.put(:path, "/health")
|
||||
|> URI.to_string()
|
||||
|
||||
[endpoint]
|
||||
end
|
||||
end
|
||||
|
|
|
@ -21,4 +21,12 @@ defmodule Pleroma.Search.SearchBackend do
|
|||
from index.
|
||||
"""
|
||||
@callback remove_from_index(object :: Pleroma.Object.t()) :: :ok | {:error, any()}
|
||||
|
||||
@doc """
|
||||
Healthcheck endpoints of search backend infrastructure to monitor for controlling
|
||||
processing of jobs in the Oban queue.
|
||||
|
||||
It is expected a 200 response is healthy and other responses are unhealthy.
|
||||
"""
|
||||
@callback healthcheck_endpoints :: list() | nil
|
||||
end
|
||||
|
|
|
@ -239,8 +239,12 @@ defp url_from_spec(%__MODULE__{name: name}, base_url, {:file, path}) do
|
|||
""
|
||||
end
|
||||
|
||||
[base_url, path]
|
||||
|> Path.join()
|
||||
if String.contains?(base_url, Pleroma.Uploaders.IPFS.placeholder()) do
|
||||
String.replace(base_url, Pleroma.Uploaders.IPFS.placeholder(), path)
|
||||
else
|
||||
[base_url, path]
|
||||
|> Path.join()
|
||||
end
|
||||
end
|
||||
|
||||
defp url_from_spec(_upload, _base_url, {:url, url}), do: url
|
||||
|
@ -277,6 +281,9 @@ def base_url do
|
|||
Path.join([upload_base_url, bucket_with_namespace])
|
||||
end
|
||||
|
||||
Pleroma.Uploaders.IPFS ->
|
||||
@config_impl.get([Pleroma.Uploaders.IPFS, :get_gateway_url])
|
||||
|
||||
_ ->
|
||||
public_endpoint || upload_base_url || Pleroma.Web.Endpoint.url() <> "/media/"
|
||||
end
|
||||
|
|
|
@ -0,0 +1,77 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Uploaders.IPFS do
|
||||
@behaviour Pleroma.Uploaders.Uploader
|
||||
require Logger
|
||||
|
||||
alias Tesla.Multipart
|
||||
|
||||
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
|
||||
|
||||
defp get_final_url(method) do
|
||||
config = @config_impl.get([__MODULE__])
|
||||
post_base_url = Keyword.get(config, :post_gateway_url)
|
||||
|
||||
Path.join([post_base_url, method])
|
||||
end
|
||||
|
||||
def put_file_endpoint do
|
||||
get_final_url("/api/v0/add")
|
||||
end
|
||||
|
||||
def delete_file_endpoint do
|
||||
get_final_url("/api/v0/files/rm")
|
||||
end
|
||||
|
||||
@placeholder "{CID}"
|
||||
def placeholder, do: @placeholder
|
||||
|
||||
@impl true
|
||||
def get_file(file) do
|
||||
b_url = Pleroma.Upload.base_url()
|
||||
|
||||
if String.contains?(b_url, @placeholder) do
|
||||
{:ok, {:url, String.replace(b_url, @placeholder, URI.decode(file))}}
|
||||
else
|
||||
{:error, "IPFS Get URL doesn't contain 'cid' placeholder"}
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def put_file(%Pleroma.Upload{} = upload) do
|
||||
mp =
|
||||
Multipart.new()
|
||||
|> Multipart.add_content_type_param("charset=utf-8")
|
||||
|> Multipart.add_file(upload.tempfile)
|
||||
|
||||
case Pleroma.HTTP.post(put_file_endpoint(), mp, [], params: ["cid-version": "1"]) do
|
||||
{:ok, ret} ->
|
||||
case Jason.decode(ret.body) do
|
||||
{:ok, ret} ->
|
||||
if Map.has_key?(ret, "Hash") do
|
||||
{:ok, {:file, ret["Hash"]}}
|
||||
else
|
||||
{:error, "JSON doesn't contain Hash key"}
|
||||
end
|
||||
|
||||
error ->
|
||||
Logger.error("#{__MODULE__}: #{inspect(error)}")
|
||||
{:error, "JSON decode failed"}
|
||||
end
|
||||
|
||||
error ->
|
||||
Logger.error("#{__MODULE__}: #{inspect(error)}")
|
||||
{:error, "IPFS Gateway upload failed"}
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def delete_file(file) do
|
||||
case Pleroma.HTTP.post(delete_file_endpoint(), "", [], params: [arg: file]) do
|
||||
{:ok, %{status: 204}} -> :ok
|
||||
error -> {:error, inspect(error)}
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,265 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.ActivityPub.MRF.NsfwApiPolicy do
|
||||
@moduledoc """
|
||||
Hide, delete, or mark sensitive NSFW content with artificial intelligence.
|
||||
|
||||
Requires a NSFW API server, configured like so:
|
||||
|
||||
config :pleroma, Pleroma.Web.ActivityPub.MRF.NsfwMRF,
|
||||
url: "http://127.0.0.1:5000/",
|
||||
threshold: 0.7,
|
||||
mark_sensitive: true,
|
||||
unlist: false,
|
||||
reject: false
|
||||
|
||||
The NSFW API server must implement an HTTP endpoint like this:
|
||||
|
||||
curl http://localhost:5000/?url=https://fedi.com/images/001.jpg
|
||||
|
||||
Returning a response like this:
|
||||
|
||||
{"score", 0.314}
|
||||
|
||||
Where a score is 0-1, with `1` being definitely NSFW.
|
||||
|
||||
A good API server is here: https://github.com/EugenCepoi/nsfw_api
|
||||
You can run it with Docker with a one-liner:
|
||||
|
||||
docker run -it -p 127.0.0.1:5000:5000/tcp --env PORT=5000 eugencepoi/nsfw_api:latest
|
||||
|
||||
Options:
|
||||
|
||||
- `url`: Base URL of the API server. Default: "http://127.0.0.1:5000/"
|
||||
- `threshold`: Lowest score to take action on. Default: `0.7`
|
||||
- `mark_sensitive`: Mark sensitive all detected NSFW content? Default: `true`
|
||||
- `unlist`: Unlist all detected NSFW content? Default: `false`
|
||||
- `reject`: Reject all detected NSFW content (takes precedence)? Default: `false`
|
||||
"""
|
||||
alias Pleroma.Config
|
||||
alias Pleroma.Constants
|
||||
alias Pleroma.HTTP
|
||||
alias Pleroma.User
|
||||
|
||||
require Logger
|
||||
require Pleroma.Constants
|
||||
|
||||
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
|
||||
@policy :mrf_nsfw_api
|
||||
|
||||
def build_request_url(url) do
|
||||
Config.get([@policy, :url])
|
||||
|> URI.parse()
|
||||
|> fix_path()
|
||||
|> Map.put(:query, "url=#{url}")
|
||||
|> URI.to_string()
|
||||
end
|
||||
|
||||
def parse_url(url) do
|
||||
request = build_request_url(url)
|
||||
|
||||
with {:ok, %Tesla.Env{body: body}} <- HTTP.get(request) do
|
||||
Jason.decode(body)
|
||||
else
|
||||
error ->
|
||||
Logger.warn("""
|
||||
[NsfwApiPolicy]: The API server failed. Skipping.
|
||||
#{inspect(error)}
|
||||
""")
|
||||
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
def check_url_nsfw(url) when is_binary(url) do
|
||||
threshold = Config.get([@policy, :threshold])
|
||||
|
||||
case parse_url(url) do
|
||||
{:ok, %{"score" => score}} when score >= threshold ->
|
||||
{:nsfw, %{url: url, score: score, threshold: threshold}}
|
||||
|
||||
{:ok, %{"score" => score}} ->
|
||||
{:sfw, %{url: url, score: score, threshold: threshold}}
|
||||
|
||||
_ ->
|
||||
{:sfw, %{url: url, score: nil, threshold: threshold}}
|
||||
end
|
||||
end
|
||||
|
||||
def check_url_nsfw(%{"href" => url}) when is_binary(url) do
|
||||
check_url_nsfw(url)
|
||||
end
|
||||
|
||||
def check_url_nsfw(url) do
|
||||
threshold = Config.get([@policy, :threshold])
|
||||
{:sfw, %{url: url, score: nil, threshold: threshold}}
|
||||
end
|
||||
|
||||
def check_attachment_nsfw(%{"url" => urls} = attachment) when is_list(urls) do
|
||||
if Enum.all?(urls, &match?({:sfw, _}, check_url_nsfw(&1))) do
|
||||
{:sfw, attachment}
|
||||
else
|
||||
{:nsfw, attachment}
|
||||
end
|
||||
end
|
||||
|
||||
def check_attachment_nsfw(%{"url" => url} = attachment) when is_binary(url) do
|
||||
case check_url_nsfw(url) do
|
||||
{:sfw, _} -> {:sfw, attachment}
|
||||
{:nsfw, _} -> {:nsfw, attachment}
|
||||
end
|
||||
end
|
||||
|
||||
def check_attachment_nsfw(attachment), do: {:sfw, attachment}
|
||||
|
||||
def check_object_nsfw(%{"attachment" => attachments} = object) when is_list(attachments) do
|
||||
if Enum.all?(attachments, &match?({:sfw, _}, check_attachment_nsfw(&1))) do
|
||||
{:sfw, object}
|
||||
else
|
||||
{:nsfw, object}
|
||||
end
|
||||
end
|
||||
|
||||
def check_object_nsfw(%{"object" => %{} = child_object} = object) do
|
||||
case check_object_nsfw(child_object) do
|
||||
{:sfw, _} -> {:sfw, object}
|
||||
{:nsfw, _} -> {:nsfw, object}
|
||||
end
|
||||
end
|
||||
|
||||
def check_object_nsfw(object), do: {:sfw, object}
|
||||
|
||||
@impl true
|
||||
def filter(object) do
|
||||
with {:sfw, object} <- check_object_nsfw(object) do
|
||||
{:ok, object}
|
||||
else
|
||||
{:nsfw, _data} -> handle_nsfw(object)
|
||||
_ -> {:reject, "NSFW: Attachment rejected"}
|
||||
end
|
||||
end
|
||||
|
||||
defp handle_nsfw(object) do
|
||||
if Config.get([@policy, :reject]) do
|
||||
{:reject, object}
|
||||
else
|
||||
{:ok,
|
||||
object
|
||||
|> maybe_unlist()
|
||||
|> maybe_mark_sensitive()}
|
||||
end
|
||||
end
|
||||
|
||||
defp maybe_unlist(object) do
|
||||
if Config.get([@policy, :unlist]) do
|
||||
unlist(object)
|
||||
else
|
||||
object
|
||||
end
|
||||
end
|
||||
|
||||
defp maybe_mark_sensitive(object) do
|
||||
if Config.get([@policy, :mark_sensitive]) do
|
||||
mark_sensitive(object)
|
||||
else
|
||||
object
|
||||
end
|
||||
end
|
||||
|
||||
def unlist(%{"to" => to, "cc" => cc, "actor" => actor} = object) do
|
||||
with %User{} = user <- User.get_cached_by_ap_id(actor) do
|
||||
to =
|
||||
[user.follower_address | to]
|
||||
|> List.delete(Constants.as_public())
|
||||
|> Enum.uniq()
|
||||
|
||||
cc =
|
||||
[Constants.as_public() | cc]
|
||||
|> List.delete(user.follower_address)
|
||||
|> Enum.uniq()
|
||||
|
||||
object
|
||||
|> Map.put("to", to)
|
||||
|> Map.put("cc", cc)
|
||||
else
|
||||
_ -> raise "[NsfwApiPolicy]: Could not find user #{actor}"
|
||||
end
|
||||
end
|
||||
|
||||
def mark_sensitive(%{"object" => child_object} = object) when is_map(child_object) do
|
||||
Map.put(object, "object", mark_sensitive(child_object))
|
||||
end
|
||||
|
||||
def mark_sensitive(object) when is_map(object) do
|
||||
tags = (object["tag"] || []) ++ ["nsfw"]
|
||||
|
||||
object
|
||||
|> Map.put("tag", tags)
|
||||
|> Map.put("sensitive", true)
|
||||
end
|
||||
|
||||
# Hackney needs a trailing slash
|
||||
defp fix_path(%URI{path: path} = uri) when is_binary(path) do
|
||||
path = String.trim_trailing(path, "/") <> "/"
|
||||
Map.put(uri, :path, path)
|
||||
end
|
||||
|
||||
defp fix_path(%URI{path: nil} = uri), do: Map.put(uri, :path, "/")
|
||||
|
||||
@impl true
|
||||
def describe do
|
||||
options = %{
|
||||
threshold: Config.get([@policy, :threshold]),
|
||||
mark_sensitive: Config.get([@policy, :mark_sensitive]),
|
||||
unlist: Config.get([@policy, :unlist]),
|
||||
reject: Config.get([@policy, :reject])
|
||||
}
|
||||
|
||||
{:ok, %{@policy => options}}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def config_description do
|
||||
%{
|
||||
key: @policy,
|
||||
related_policy: to_string(__MODULE__),
|
||||
label: "NSFW API Policy",
|
||||
description:
|
||||
"Hide, delete, or mark sensitive NSFW content with artificial intelligence. Requires running an external API server.",
|
||||
children: [
|
||||
%{
|
||||
key: :url,
|
||||
type: :string,
|
||||
description: "Base URL of the API server.",
|
||||
suggestions: ["http://127.0.0.1:5000/"]
|
||||
},
|
||||
%{
|
||||
key: :threshold,
|
||||
type: :float,
|
||||
description: "Lowest score to take action on. Between 0 and 1.",
|
||||
suggestions: [0.7]
|
||||
},
|
||||
%{
|
||||
key: :mark_sensitive,
|
||||
type: :boolean,
|
||||
description: "Mark sensitive all detected NSFW content?",
|
||||
suggestions: [true]
|
||||
},
|
||||
%{
|
||||
key: :unlist,
|
||||
type: :boolean,
|
||||
description: "Unlist sensitive all detected NSFW content?",
|
||||
suggestions: [false]
|
||||
},
|
||||
%{
|
||||
key: :reject,
|
||||
type: :boolean,
|
||||
description: "Reject sensitive all detected NSFW content (takes precedence)?",
|
||||
suggestions: [false]
|
||||
}
|
||||
]
|
||||
}
|
||||
end
|
||||
end
|
|
@ -202,7 +202,8 @@ defp notification_type do
|
|||
"pleroma:report",
|
||||
"move",
|
||||
"follow_request",
|
||||
"poll"
|
||||
"poll",
|
||||
"status"
|
||||
],
|
||||
description: """
|
||||
The type of event that resulted in the notification.
|
||||
|
@ -216,6 +217,7 @@ defp notification_type do
|
|||
- `pleroma:emoji_reaction` - Someone reacted with emoji to your status
|
||||
- `pleroma:chat_mention` - Someone mentioned you in a chat message
|
||||
- `pleroma:report` - Someone was reported
|
||||
- `status` - Someone you are subscribed to created a status
|
||||
"""
|
||||
}
|
||||
end
|
||||
|
|
|
@ -129,8 +129,22 @@ defp attachments(%{params: params} = draft) do
|
|||
|
||||
defp in_reply_to(%{params: %{in_reply_to_status_id: ""}} = draft), do: draft
|
||||
|
||||
defp in_reply_to(%{params: %{in_reply_to_status_id: id}} = draft) when is_binary(id) do
|
||||
%__MODULE__{draft | in_reply_to: Activity.get_by_id(id)}
|
||||
defp in_reply_to(%{params: %{in_reply_to_status_id: :deleted}} = draft) do
|
||||
add_error(draft, dgettext("errors", "Cannot reply to a deleted status"))
|
||||
end
|
||||
|
||||
defp in_reply_to(%{params: %{in_reply_to_status_id: id} = params} = draft) when is_binary(id) do
|
||||
activity = Activity.get_by_id(id)
|
||||
|
||||
params =
|
||||
if is_nil(activity) do
|
||||
# Deleted activities are returned as nil
|
||||
Map.put(params, :in_reply_to_status_id, :deleted)
|
||||
else
|
||||
Map.put(params, :in_reply_to_status_id, activity)
|
||||
end
|
||||
|
||||
in_reply_to(%{draft | params: params})
|
||||
end
|
||||
|
||||
defp in_reply_to(%{params: %{in_reply_to_status_id: %Activity{} = in_reply_to}} = draft) do
|
||||
|
|
|
@ -34,6 +34,7 @@ defmodule Pleroma.Web.MastodonAPI.NotificationController do
|
|||
pleroma:emoji_reaction
|
||||
poll
|
||||
update
|
||||
status
|
||||
}
|
||||
|
||||
# GET /api/v1/notifications
|
||||
|
|
|
@ -108,6 +108,9 @@ def render(
|
|||
"mention" ->
|
||||
put_status(response, activity, reading_user, status_render_opts)
|
||||
|
||||
"status" ->
|
||||
put_status(response, activity, reading_user, status_render_opts)
|
||||
|
||||
"favourite" ->
|
||||
put_status(response, parent_activity_fn.(), reading_user, status_render_opts)
|
||||
|
||||
|
|
|
@ -192,6 +192,7 @@ def format_title(%{activity: %{data: %{"directMessage" => true}}}, _mastodon_typ
|
|||
def format_title(%{type: type}, mastodon_type) do
|
||||
case mastodon_type || type do
|
||||
"mention" -> "New Mention"
|
||||
"status" -> "New Status"
|
||||
"follow" -> "New Follower"
|
||||
"follow_request" -> "New Follow Request"
|
||||
"reblog" -> "New Repeat"
|
||||
|
|
|
@ -23,7 +23,7 @@ defp aws_signed_url?(image) when is_binary(image) and image != "" do
|
|||
%URI{host: host, query: query} = URI.parse(image)
|
||||
|
||||
is_binary(host) and String.contains?(host, "amazonaws.com") and
|
||||
String.contains?(query, "X-Amz-Expires")
|
||||
is_binary(query) and String.contains?(query, "X-Amz-Expires")
|
||||
end
|
||||
|
||||
defp aws_signed_url?(_), do: nil
|
||||
|
|
|
@ -155,7 +155,16 @@ def get_template_from_xml(body) do
|
|||
end
|
||||
end
|
||||
|
||||
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
|
||||
def find_lrdd_template(domain) do
|
||||
@cachex.fetch!(:host_meta_cache, domain, fn _ ->
|
||||
{:commit, fetch_lrdd_template(domain)}
|
||||
end)
|
||||
rescue
|
||||
e -> {:error, "Cachex error: #{inspect(e)}"}
|
||||
end
|
||||
|
||||
defp fetch_lrdd_template(domain) do
|
||||
# WebFinger is restricted to HTTPS - https://tools.ietf.org/html/rfc7033#section-9.1
|
||||
meta_url = "https://#{domain}/.well-known/host-meta"
|
||||
|
||||
|
@ -168,7 +177,7 @@ def find_lrdd_template(domain) do
|
|||
end
|
||||
end
|
||||
|
||||
defp get_address_from_domain(domain, encoded_account) when is_binary(domain) do
|
||||
defp get_address_from_domain(domain, "acct:" <> _ = encoded_account) when is_binary(domain) do
|
||||
case find_lrdd_template(domain) do
|
||||
{:ok, template} ->
|
||||
String.replace(template, "{uri}", encoded_account)
|
||||
|
@ -178,6 +187,11 @@ defp get_address_from_domain(domain, encoded_account) when is_binary(domain) do
|
|||
end
|
||||
end
|
||||
|
||||
defp get_address_from_domain(domain, account) when is_binary(domain) do
|
||||
encoded_account = URI.encode("acct:#{account}")
|
||||
get_address_from_domain(domain, encoded_account)
|
||||
end
|
||||
|
||||
defp get_address_from_domain(_, _), do: {:error, :webfinger_no_domain}
|
||||
|
||||
@spec finger(String.t()) :: {:ok, map()} | {:error, any()}
|
||||
|
@ -192,9 +206,7 @@ def finger(account) do
|
|||
URI.parse(account).host
|
||||
end
|
||||
|
||||
encoded_account = URI.encode("acct:#{account}")
|
||||
|
||||
with address when is_binary(address) <- get_address_from_domain(domain, encoded_account),
|
||||
with address when is_binary(address) <- get_address_from_domain(domain, account),
|
||||
{:ok, %{status: status, body: body, headers: headers}} when status in 200..299 <-
|
||||
HTTP.get(
|
||||
address,
|
||||
|
@ -216,10 +228,28 @@ def finger(account) do
|
|||
_ ->
|
||||
{:error, {:content_type, nil}}
|
||||
end
|
||||
|> case do
|
||||
{:ok, data} -> validate_webfinger(address, data)
|
||||
error -> error
|
||||
end
|
||||
else
|
||||
error ->
|
||||
Logger.debug("Couldn't finger #{account}: #{inspect(error)}")
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_webfinger(request_url, %{"subject" => "acct:" <> acct = subject} = data) do
|
||||
with [_name, acct_host] <- String.split(acct, "@"),
|
||||
{_, url} <- {:address, get_address_from_domain(acct_host, subject)},
|
||||
%URI{host: request_host} <- URI.parse(request_url),
|
||||
%URI{host: acct_host} <- URI.parse(url),
|
||||
{_, true} <- {:hosts_match, acct_host == request_host} do
|
||||
{:ok, data}
|
||||
else
|
||||
_ -> {:error, {:webfinger_invalid, request_url, data}}
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_webfinger(url, data), do: {:error, {:webfinger_invalid, url, data}}
|
||||
end
|
||||
|
|
2
mix.exs
2
mix.exs
|
@ -5,7 +5,7 @@ def project do
|
|||
[
|
||||
app: :pleroma,
|
||||
version: version("2.6.52"),
|
||||
elixir: "~> 1.11",
|
||||
elixir: "~> 1.13",
|
||||
elixirc_paths: elixirc_paths(Mix.env()),
|
||||
compilers: Mix.compilers(),
|
||||
elixirc_options: [warnings_as_errors: warnings_as_errors()],
|
||||
|
|
|
@ -0,0 +1,51 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddStatusToNotificationsEnum do
|
||||
use Ecto.Migration
|
||||
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def up do
|
||||
"""
|
||||
alter type notification_type add value 'status'
|
||||
"""
|
||||
|> execute()
|
||||
end
|
||||
|
||||
def down do
|
||||
alter table(:notifications) do
|
||||
modify(:type, :string)
|
||||
end
|
||||
|
||||
"""
|
||||
delete from notifications where type = 'status'
|
||||
"""
|
||||
|> execute()
|
||||
|
||||
"""
|
||||
drop type if exists notification_type
|
||||
"""
|
||||
|> execute()
|
||||
|
||||
"""
|
||||
create type notification_type as enum (
|
||||
'follow',
|
||||
'follow_request',
|
||||
'mention',
|
||||
'move',
|
||||
'pleroma:emoji_reaction',
|
||||
'pleroma:chat_mention',
|
||||
'reblog',
|
||||
'favourite',
|
||||
'pleroma:report',
|
||||
'poll',
|
||||
'update'
|
||||
)
|
||||
"""
|
||||
|> execute()
|
||||
|
||||
"""
|
||||
alter table notifications
|
||||
alter column type type notification_type using (type::notification_type)
|
||||
"""
|
||||
|> execute()
|
||||
end
|
||||
end
|
|
@ -134,7 +134,7 @@ if [ -z "$1" ] || [ "$1" = "help" ]; then
|
|||
|
||||
"
|
||||
else
|
||||
SCRIPT=$(readlink -f "$0")
|
||||
SCRIPT=$(realpath "$0")
|
||||
SCRIPTPATH=$(dirname "$SCRIPT")
|
||||
|
||||
FULL_ARGS="$*"
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<XRD xmlns="http://docs.oasis-open.org/ns/xri/xrd-1.0">
|
||||
<Link rel="lrdd" template="https://gleasonator.com/.well-known/webfinger?resource={uri}" type="application/xrd+xml" />
|
||||
</XRD>
|
|
@ -0,0 +1,28 @@
|
|||
{
|
||||
"aliases": [
|
||||
"https://gleasonator.com/users/alex",
|
||||
"https://mitra.social/users/alex"
|
||||
],
|
||||
"links": [
|
||||
{
|
||||
"href": "https://gleasonator.com/users/alex",
|
||||
"rel": "http://webfinger.net/rel/profile-page",
|
||||
"type": "text/html"
|
||||
},
|
||||
{
|
||||
"href": "https://gleasonator.com/users/alex",
|
||||
"rel": "self",
|
||||
"type": "application/activity+json"
|
||||
},
|
||||
{
|
||||
"href": "https://gleasonator.com/users/alex",
|
||||
"rel": "self",
|
||||
"type": "application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\""
|
||||
},
|
||||
{
|
||||
"rel": "http://ostatus.org/schema/1.0/subscribe",
|
||||
"template": "https://gleasonator.com/ostatus_subscribe?acct={uri}"
|
||||
}
|
||||
],
|
||||
"subject": "acct:trump@whitehouse.gov"
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
{
|
||||
"subject": "acct:graf@poa.st",
|
||||
"aliases": [
|
||||
"https://fba.ryona.agenc/webfingertest"
|
||||
],
|
||||
"links": [
|
||||
{
|
||||
"rel": "http://webfinger.net/rel/profile-page",
|
||||
"type": "text/html",
|
||||
"href": "https://fba.ryona.agenc/webfingertest"
|
||||
},
|
||||
{
|
||||
"rel": "self",
|
||||
"type": "application/activity+json",
|
||||
"href": "https://fba.ryona.agenc/webfingertest"
|
||||
},
|
||||
{
|
||||
"rel": "http://ostatus.org/schema/1.0/subscribe",
|
||||
"template": "https://fba.ryona.agenc/contact/follow?url={uri}"
|
||||
},
|
||||
{
|
||||
"rel": "http://schemas.google.com/g/2010#updates-from",
|
||||
"type": "application/atom+xml",
|
||||
"href": ""
|
||||
},
|
||||
{
|
||||
"rel": "salmon",
|
||||
"href": "https://fba.ryona.agenc/salmon/friendica"
|
||||
},
|
||||
{
|
||||
"rel": "http://microformats.org/profile/hcard",
|
||||
"type": "text/html",
|
||||
"href": "https://fba.ryona.agenc/hcard/friendica"
|
||||
},
|
||||
{
|
||||
"rel": "http://joindiaspora.com/seed_location",
|
||||
"type": "text/html",
|
||||
"href": "https://fba.ryona.agenc"
|
||||
}
|
||||
]
|
||||
}
|
|
@ -112,6 +112,7 @@ test "it creates a notification for subscribed users" do
|
|||
{:ok, [notification]} = Notification.create_notifications(status)
|
||||
|
||||
assert notification.user_id == subscriber.id
|
||||
assert notification.type == "status"
|
||||
end
|
||||
|
||||
test "does not create a notification for subscribed users if status is a reply" do
|
||||
|
@ -136,6 +137,21 @@ test "does not create a notification for subscribed users if status is a reply"
|
|||
assert Enum.empty?(subscriber_notifications)
|
||||
end
|
||||
|
||||
test "does not create subscriber notification if mentioned" do
|
||||
user = insert(:user)
|
||||
subscriber = insert(:user)
|
||||
|
||||
User.subscribe(subscriber, user)
|
||||
|
||||
{:ok, status} = CommonAPI.post(user, %{status: "mentioning @#{subscriber.nickname}"})
|
||||
{:ok, [notification] = notifications} = Notification.create_notifications(status)
|
||||
|
||||
assert length(notifications) == 1
|
||||
|
||||
assert notification.user_id == subscriber.id
|
||||
assert notification.type == "mention"
|
||||
end
|
||||
|
||||
test "it sends edited notifications to those who repeated a status" do
|
||||
user = insert(:user)
|
||||
repeated_user = insert(:user)
|
||||
|
@ -843,22 +859,6 @@ test "repeating an activity which is already deleted does not generate a notific
|
|||
assert Enum.empty?(Notification.for_user(user))
|
||||
end
|
||||
|
||||
test "replying to a deleted post without tagging does not generate a notification" do
|
||||
user = insert(:user)
|
||||
other_user = insert(:user)
|
||||
|
||||
{:ok, activity} = CommonAPI.post(user, %{status: "test post"})
|
||||
{:ok, _deletion_activity} = CommonAPI.delete(activity.id, user)
|
||||
|
||||
{:ok, _reply_activity} =
|
||||
CommonAPI.post(other_user, %{
|
||||
status: "test reply",
|
||||
in_reply_to_status_id: activity.id
|
||||
})
|
||||
|
||||
assert Enum.empty?(Notification.for_user(user))
|
||||
end
|
||||
|
||||
test "notifications are deleted if a local user is deleted" do
|
||||
user = insert(:user)
|
||||
other_user = insert(:user)
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Search.HealthcheckTest do
|
||||
use Pleroma.DataCase
|
||||
|
||||
import Tesla.Mock
|
||||
|
||||
alias Pleroma.Search.Healthcheck
|
||||
|
||||
@good1 "http://good1.example.com/healthz"
|
||||
@good2 "http://good2.example.com/health"
|
||||
@bad "http://bad.example.com/healthy"
|
||||
|
||||
setup do
|
||||
mock(fn
|
||||
%{method: :get, url: @good1} ->
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body: ""
|
||||
}
|
||||
|
||||
%{method: :get, url: @good2} ->
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body: ""
|
||||
}
|
||||
|
||||
%{method: :get, url: @bad} ->
|
||||
%Tesla.Env{
|
||||
status: 503,
|
||||
body: ""
|
||||
}
|
||||
end)
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
test "true for 200 responses" do
|
||||
assert Healthcheck.check([@good1])
|
||||
assert Healthcheck.check([@good1, @good2])
|
||||
end
|
||||
|
||||
test "false if any response is not a 200" do
|
||||
refute Healthcheck.check([@bad])
|
||||
refute Healthcheck.check([@good1, @bad])
|
||||
end
|
||||
end
|
|
@ -0,0 +1,158 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Uploaders.IPFSTest do
|
||||
use Pleroma.DataCase
|
||||
|
||||
alias Pleroma.Uploaders.IPFS
|
||||
alias Tesla.Multipart
|
||||
|
||||
import ExUnit.CaptureLog
|
||||
import Mock
|
||||
import Mox
|
||||
|
||||
alias Pleroma.UnstubbedConfigMock, as: Config
|
||||
|
||||
describe "get_final_url" do
|
||||
setup do
|
||||
Config
|
||||
|> expect(:get, fn [Pleroma.Uploaders.IPFS] ->
|
||||
[post_gateway_url: "http://localhost:5001"]
|
||||
end)
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
test "it returns the final url for put_file" do
|
||||
assert IPFS.put_file_endpoint() == "http://localhost:5001/api/v0/add"
|
||||
end
|
||||
|
||||
test "it returns the final url for delete_file" do
|
||||
assert IPFS.delete_file_endpoint() == "http://localhost:5001/api/v0/files/rm"
|
||||
end
|
||||
end
|
||||
|
||||
describe "get_file/1" do
|
||||
setup do
|
||||
Config
|
||||
|> expect(:get, fn [Pleroma.Upload, :uploader] -> Pleroma.Uploaders.IPFS end)
|
||||
|> expect(:get, fn [Pleroma.Upload, :base_url] -> nil end)
|
||||
|> expect(:get, fn [Pleroma.Uploaders.IPFS, :public_endpoint] -> nil end)
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
test "it returns path to ipfs file with cid as subdomain" do
|
||||
Config
|
||||
|> expect(:get, fn [Pleroma.Uploaders.IPFS, :get_gateway_url] ->
|
||||
"https://{CID}.ipfs.mydomain.com"
|
||||
end)
|
||||
|
||||
assert IPFS.get_file("testcid") == {
|
||||
:ok,
|
||||
{:url, "https://testcid.ipfs.mydomain.com"}
|
||||
}
|
||||
end
|
||||
|
||||
test "it returns path to ipfs file with cid as path" do
|
||||
Config
|
||||
|> expect(:get, fn [Pleroma.Uploaders.IPFS, :get_gateway_url] ->
|
||||
"https://ipfs.mydomain.com/ipfs/{CID}"
|
||||
end)
|
||||
|
||||
assert IPFS.get_file("testcid") == {
|
||||
:ok,
|
||||
{:url, "https://ipfs.mydomain.com/ipfs/testcid"}
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
describe "put_file/1" do
|
||||
setup do
|
||||
Config
|
||||
|> expect(:get, fn [Pleroma.Uploaders.IPFS] ->
|
||||
[post_gateway_url: "http://localhost:5001"]
|
||||
end)
|
||||
|
||||
file_upload = %Pleroma.Upload{
|
||||
name: "image-tet.jpg",
|
||||
content_type: "image/jpeg",
|
||||
path: "test_folder/image-tet.jpg",
|
||||
tempfile: Path.absname("test/instance_static/add/shortcode.png")
|
||||
}
|
||||
|
||||
mp =
|
||||
Multipart.new()
|
||||
|> Multipart.add_content_type_param("charset=utf-8")
|
||||
|> Multipart.add_file(file_upload.tempfile)
|
||||
|
||||
[file_upload: file_upload, mp: mp]
|
||||
end
|
||||
|
||||
test "save file", %{file_upload: file_upload} do
|
||||
with_mock Pleroma.HTTP,
|
||||
post: fn "http://localhost:5001/api/v0/add", _mp, [], params: ["cid-version": "1"] ->
|
||||
{:ok,
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body:
|
||||
"{\"Name\":\"image-tet.jpg\",\"Size\":\"5000\", \"Hash\":\"bafybeicrh7ltzx52yxcwrvxxckfmwhqdgsb6qym6dxqm2a4ymsakeshwoi\"}"
|
||||
}}
|
||||
end do
|
||||
assert IPFS.put_file(file_upload) ==
|
||||
{:ok, {:file, "bafybeicrh7ltzx52yxcwrvxxckfmwhqdgsb6qym6dxqm2a4ymsakeshwoi"}}
|
||||
end
|
||||
end
|
||||
|
||||
test "returns error", %{file_upload: file_upload} do
|
||||
with_mock Pleroma.HTTP,
|
||||
post: fn "http://localhost:5001/api/v0/add", _mp, [], params: ["cid-version": "1"] ->
|
||||
{:error, "IPFS Gateway upload failed"}
|
||||
end do
|
||||
assert capture_log(fn ->
|
||||
assert IPFS.put_file(file_upload) == {:error, "IPFS Gateway upload failed"}
|
||||
end) =~ "Elixir.Pleroma.Uploaders.IPFS: {:error, \"IPFS Gateway upload failed\"}"
|
||||
end
|
||||
end
|
||||
|
||||
test "returns error if JSON decode fails", %{file_upload: file_upload} do
|
||||
with_mock Pleroma.HTTP, [],
|
||||
post: fn "http://localhost:5001/api/v0/add", _mp, [], params: ["cid-version": "1"] ->
|
||||
{:ok, %Tesla.Env{status: 200, body: "invalid"}}
|
||||
end do
|
||||
assert capture_log(fn ->
|
||||
assert IPFS.put_file(file_upload) == {:error, "JSON decode failed"}
|
||||
end) =~
|
||||
"Elixir.Pleroma.Uploaders.IPFS: {:error, %Jason.DecodeError"
|
||||
end
|
||||
end
|
||||
|
||||
test "returns error if JSON body doesn't contain Hash key", %{file_upload: file_upload} do
|
||||
with_mock Pleroma.HTTP, [],
|
||||
post: fn "http://localhost:5001/api/v0/add", _mp, [], params: ["cid-version": "1"] ->
|
||||
{:ok, %Tesla.Env{status: 200, body: "{\"key\": \"value\"}"}}
|
||||
end do
|
||||
assert IPFS.put_file(file_upload) == {:error, "JSON doesn't contain Hash key"}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "delete_file/1" do
|
||||
setup do
|
||||
Config
|
||||
|> expect(:get, fn [Pleroma.Uploaders.IPFS] ->
|
||||
[post_gateway_url: "http://localhost:5001"]
|
||||
end)
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
test_with_mock "deletes file", Pleroma.HTTP,
|
||||
post: fn "http://localhost:5001/api/v0/files/rm", "", [], params: [arg: "image.jpg"] ->
|
||||
{:ok, %{status: 204}}
|
||||
end do
|
||||
assert :ok = IPFS.delete_file("image.jpg")
|
||||
end
|
||||
end
|
||||
end
|
|
@ -877,109 +877,19 @@ test "gets an existing user by nickname starting with http" do
|
|||
setup do: clear_config([Pleroma.Web.WebFinger, :update_nickname_on_user_fetch], true)
|
||||
|
||||
test "for mastodon" do
|
||||
Tesla.Mock.mock(fn
|
||||
%{url: "https://example.com/.well-known/host-meta"} ->
|
||||
%Tesla.Env{
|
||||
status: 302,
|
||||
headers: [{"location", "https://sub.example.com/.well-known/host-meta"}]
|
||||
}
|
||||
|
||||
%{url: "https://sub.example.com/.well-known/host-meta"} ->
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body:
|
||||
"test/fixtures/webfinger/masto-host-meta.xml"
|
||||
|> File.read!()
|
||||
|> String.replace("{{domain}}", "sub.example.com")
|
||||
}
|
||||
|
||||
%{url: "https://sub.example.com/.well-known/webfinger?resource=acct:a@example.com"} ->
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body:
|
||||
"test/fixtures/webfinger/masto-webfinger.json"
|
||||
|> File.read!()
|
||||
|> String.replace("{{nickname}}", "a")
|
||||
|> String.replace("{{domain}}", "example.com")
|
||||
|> String.replace("{{subdomain}}", "sub.example.com"),
|
||||
headers: [{"content-type", "application/jrd+json"}]
|
||||
}
|
||||
|
||||
%{url: "https://sub.example.com/users/a"} ->
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body:
|
||||
"test/fixtures/webfinger/masto-user.json"
|
||||
|> File.read!()
|
||||
|> String.replace("{{nickname}}", "a")
|
||||
|> String.replace("{{domain}}", "sub.example.com"),
|
||||
headers: [{"content-type", "application/activity+json"}]
|
||||
}
|
||||
|
||||
%{url: "https://sub.example.com/users/a/collections/featured"} ->
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body:
|
||||
File.read!("test/fixtures/users_mock/masto_featured.json")
|
||||
|> String.replace("{{domain}}", "sub.example.com")
|
||||
|> String.replace("{{nickname}}", "a"),
|
||||
headers: [{"content-type", "application/activity+json"}]
|
||||
}
|
||||
end)
|
||||
|
||||
ap_id = "a@example.com"
|
||||
ap_id = "a@mastodon.example"
|
||||
{:ok, fetched_user} = User.get_or_fetch(ap_id)
|
||||
|
||||
assert fetched_user.ap_id == "https://sub.example.com/users/a"
|
||||
assert fetched_user.nickname == "a@example.com"
|
||||
assert fetched_user.ap_id == "https://sub.mastodon.example/users/a"
|
||||
assert fetched_user.nickname == "a@mastodon.example"
|
||||
end
|
||||
|
||||
test "for pleroma" do
|
||||
Tesla.Mock.mock(fn
|
||||
%{url: "https://example.com/.well-known/host-meta"} ->
|
||||
%Tesla.Env{
|
||||
status: 302,
|
||||
headers: [{"location", "https://sub.example.com/.well-known/host-meta"}]
|
||||
}
|
||||
|
||||
%{url: "https://sub.example.com/.well-known/host-meta"} ->
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body:
|
||||
"test/fixtures/webfinger/pleroma-host-meta.xml"
|
||||
|> File.read!()
|
||||
|> String.replace("{{domain}}", "sub.example.com")
|
||||
}
|
||||
|
||||
%{url: "https://sub.example.com/.well-known/webfinger?resource=acct:a@example.com"} ->
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body:
|
||||
"test/fixtures/webfinger/pleroma-webfinger.json"
|
||||
|> File.read!()
|
||||
|> String.replace("{{nickname}}", "a")
|
||||
|> String.replace("{{domain}}", "example.com")
|
||||
|> String.replace("{{subdomain}}", "sub.example.com"),
|
||||
headers: [{"content-type", "application/jrd+json"}]
|
||||
}
|
||||
|
||||
%{url: "https://sub.example.com/users/a"} ->
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body:
|
||||
"test/fixtures/webfinger/pleroma-user.json"
|
||||
|> File.read!()
|
||||
|> String.replace("{{nickname}}", "a")
|
||||
|> String.replace("{{domain}}", "sub.example.com"),
|
||||
headers: [{"content-type", "application/activity+json"}]
|
||||
}
|
||||
end)
|
||||
|
||||
ap_id = "a@example.com"
|
||||
ap_id = "a@pleroma.example"
|
||||
{:ok, fetched_user} = User.get_or_fetch(ap_id)
|
||||
|
||||
assert fetched_user.ap_id == "https://sub.example.com/users/a"
|
||||
assert fetched_user.nickname == "a@example.com"
|
||||
assert fetched_user.ap_id == "https://sub.pleroma.example/users/a"
|
||||
assert fetched_user.nickname == "a@pleroma.example"
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -0,0 +1,267 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.ActivityPub.MRF.NsfwApiPolicyTest do
|
||||
use Pleroma.DataCase
|
||||
|
||||
import ExUnit.CaptureLog
|
||||
import Pleroma.Factory
|
||||
|
||||
alias Pleroma.Constants
|
||||
alias Pleroma.Web.ActivityPub.MRF.NsfwApiPolicy
|
||||
|
||||
require Pleroma.Constants
|
||||
|
||||
@policy :mrf_nsfw_api
|
||||
|
||||
@sfw_url "https://kittens.co/kitty.gif"
|
||||
@nsfw_url "https://b00bies.com/nsfw.jpg"
|
||||
@timeout_url "http://time.out/i.jpg"
|
||||
|
||||
setup_all do
|
||||
clear_config(@policy,
|
||||
url: "http://127.0.0.1:5000/",
|
||||
threshold: 0.7,
|
||||
mark_sensitive: true,
|
||||
unlist: false,
|
||||
reject: false
|
||||
)
|
||||
end
|
||||
|
||||
setup do
|
||||
Tesla.Mock.mock(fn
|
||||
# NSFW URL
|
||||
%{method: :get, url: "http://127.0.0.1:5000/?url=#{@nsfw_url}"} ->
|
||||
%Tesla.Env{status: 200, body: ~s({"score":0.99772077798843384,"url":"#{@nsfw_url}"})}
|
||||
|
||||
# SFW URL
|
||||
%{method: :get, url: "http://127.0.0.1:5000/?url=#{@sfw_url}"} ->
|
||||
%Tesla.Env{status: 200, body: ~s({"score":0.00011714912398019806,"url":"#{@sfw_url}"})}
|
||||
|
||||
# Timeout URL
|
||||
%{method: :get, url: "http://127.0.0.1:5000/?url=#{@timeout_url}"} ->
|
||||
{:error, :timeout}
|
||||
|
||||
# Fallback URL
|
||||
%{method: :get, url: "http://127.0.0.1:5000/?url=" <> url} ->
|
||||
body =
|
||||
~s({"error_code":500,"error_reason":"[Errno -2] Name or service not known","url":"#{url}"})
|
||||
|
||||
%Tesla.Env{status: 500, body: body}
|
||||
end)
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
describe "build_request_url/1" do
|
||||
test "it works" do
|
||||
expected = "http://127.0.0.1:5000/?url=https://b00bies.com/nsfw.jpg"
|
||||
assert NsfwApiPolicy.build_request_url(@nsfw_url) == expected
|
||||
end
|
||||
|
||||
test "it adds a trailing slash" do
|
||||
clear_config([@policy, :url], "http://localhost:5000")
|
||||
|
||||
expected = "http://localhost:5000/?url=https://b00bies.com/nsfw.jpg"
|
||||
assert NsfwApiPolicy.build_request_url(@nsfw_url) == expected
|
||||
end
|
||||
|
||||
test "it adds a trailing slash preserving the path" do
|
||||
clear_config([@policy, :url], "http://localhost:5000/nsfw_api")
|
||||
|
||||
expected = "http://localhost:5000/nsfw_api/?url=https://b00bies.com/nsfw.jpg"
|
||||
assert NsfwApiPolicy.build_request_url(@nsfw_url) == expected
|
||||
end
|
||||
end
|
||||
|
||||
describe "parse_url/1" do
|
||||
test "returns decoded JSON from the API server" do
|
||||
expected = %{"score" => 0.99772077798843384, "url" => @nsfw_url}
|
||||
assert NsfwApiPolicy.parse_url(@nsfw_url) == {:ok, expected}
|
||||
end
|
||||
|
||||
test "warns when the API server fails" do
|
||||
expected = "[NsfwApiPolicy]: The API server failed. Skipping."
|
||||
assert capture_log(fn -> NsfwApiPolicy.parse_url(@timeout_url) end) =~ expected
|
||||
end
|
||||
|
||||
test "returns {:error, _} tuple when the API server fails" do
|
||||
capture_log(fn ->
|
||||
assert {:error, _} = NsfwApiPolicy.parse_url(@timeout_url)
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
||||
describe "check_url_nsfw/1" do
|
||||
test "returns {:nsfw, _} tuple" do
|
||||
expected = {:nsfw, %{url: @nsfw_url, score: 0.99772077798843384, threshold: 0.7}}
|
||||
assert NsfwApiPolicy.check_url_nsfw(@nsfw_url) == expected
|
||||
end
|
||||
|
||||
test "returns {:sfw, _} tuple" do
|
||||
expected = {:sfw, %{url: @sfw_url, score: 0.00011714912398019806, threshold: 0.7}}
|
||||
assert NsfwApiPolicy.check_url_nsfw(@sfw_url) == expected
|
||||
end
|
||||
|
||||
test "returns {:sfw, _} on failure" do
|
||||
expected = {:sfw, %{url: @timeout_url, score: nil, threshold: 0.7}}
|
||||
|
||||
capture_log(fn ->
|
||||
assert NsfwApiPolicy.check_url_nsfw(@timeout_url) == expected
|
||||
end)
|
||||
end
|
||||
|
||||
test "works with map URL" do
|
||||
expected = {:nsfw, %{url: @nsfw_url, score: 0.99772077798843384, threshold: 0.7}}
|
||||
assert NsfwApiPolicy.check_url_nsfw(%{"href" => @nsfw_url}) == expected
|
||||
end
|
||||
end
|
||||
|
||||
describe "check_attachment_nsfw/1" do
|
||||
test "returns {:nsfw, _} if any items are NSFW" do
|
||||
attachment = %{"url" => [%{"href" => @nsfw_url}, @nsfw_url, @sfw_url]}
|
||||
assert NsfwApiPolicy.check_attachment_nsfw(attachment) == {:nsfw, attachment}
|
||||
end
|
||||
|
||||
test "returns {:sfw, _} if all items are SFW" do
|
||||
attachment = %{"url" => [%{"href" => @sfw_url}, @sfw_url, @sfw_url]}
|
||||
assert NsfwApiPolicy.check_attachment_nsfw(attachment) == {:sfw, attachment}
|
||||
end
|
||||
|
||||
test "works with binary URL" do
|
||||
attachment = %{"url" => @nsfw_url}
|
||||
assert NsfwApiPolicy.check_attachment_nsfw(attachment) == {:nsfw, attachment}
|
||||
end
|
||||
end
|
||||
|
||||
describe "check_object_nsfw/1" do
|
||||
test "returns {:nsfw, _} if any items are NSFW" do
|
||||
object = %{"attachment" => [%{"url" => [%{"href" => @nsfw_url}, @sfw_url]}]}
|
||||
assert NsfwApiPolicy.check_object_nsfw(object) == {:nsfw, object}
|
||||
end
|
||||
|
||||
test "returns {:sfw, _} if all items are SFW" do
|
||||
object = %{"attachment" => [%{"url" => [%{"href" => @sfw_url}, @sfw_url]}]}
|
||||
assert NsfwApiPolicy.check_object_nsfw(object) == {:sfw, object}
|
||||
end
|
||||
|
||||
test "works with embedded object" do
|
||||
object = %{"object" => %{"attachment" => [%{"url" => [%{"href" => @nsfw_url}, @sfw_url]}]}}
|
||||
assert NsfwApiPolicy.check_object_nsfw(object) == {:nsfw, object}
|
||||
end
|
||||
end
|
||||
|
||||
describe "unlist/1" do
|
||||
test "unlist addressing" do
|
||||
user = insert(:user)
|
||||
|
||||
object = %{
|
||||
"to" => [Constants.as_public()],
|
||||
"cc" => [user.follower_address, "https://hello.world/users/alex"],
|
||||
"actor" => user.ap_id
|
||||
}
|
||||
|
||||
expected = %{
|
||||
"to" => [user.follower_address],
|
||||
"cc" => [Constants.as_public(), "https://hello.world/users/alex"],
|
||||
"actor" => user.ap_id
|
||||
}
|
||||
|
||||
assert NsfwApiPolicy.unlist(object) == expected
|
||||
end
|
||||
|
||||
test "raise if user isn't found" do
|
||||
object = %{
|
||||
"to" => [Constants.as_public()],
|
||||
"cc" => [],
|
||||
"actor" => "https://hello.world/users/alex"
|
||||
}
|
||||
|
||||
assert_raise(RuntimeError, fn ->
|
||||
NsfwApiPolicy.unlist(object)
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
||||
describe "mark_sensitive/1" do
|
||||
test "adds nsfw tag and marks sensitive" do
|
||||
object = %{"tag" => ["yolo"]}
|
||||
expected = %{"tag" => ["yolo", "nsfw"], "sensitive" => true}
|
||||
assert NsfwApiPolicy.mark_sensitive(object) == expected
|
||||
end
|
||||
|
||||
test "works with embedded object" do
|
||||
object = %{"object" => %{"tag" => ["yolo"]}}
|
||||
expected = %{"object" => %{"tag" => ["yolo", "nsfw"], "sensitive" => true}}
|
||||
assert NsfwApiPolicy.mark_sensitive(object) == expected
|
||||
end
|
||||
end
|
||||
|
||||
describe "filter/1" do
|
||||
setup do
|
||||
user = insert(:user)
|
||||
|
||||
nsfw_object = %{
|
||||
"to" => [Constants.as_public()],
|
||||
"cc" => [user.follower_address],
|
||||
"actor" => user.ap_id,
|
||||
"attachment" => [%{"url" => @nsfw_url}]
|
||||
}
|
||||
|
||||
sfw_object = %{
|
||||
"to" => [Constants.as_public()],
|
||||
"cc" => [user.follower_address],
|
||||
"actor" => user.ap_id,
|
||||
"attachment" => [%{"url" => @sfw_url}]
|
||||
}
|
||||
|
||||
%{user: user, nsfw_object: nsfw_object, sfw_object: sfw_object}
|
||||
end
|
||||
|
||||
test "passes SFW object through", %{sfw_object: object} do
|
||||
{:ok, _} = NsfwApiPolicy.filter(object)
|
||||
end
|
||||
|
||||
test "passes NSFW object through when actions are disabled", %{nsfw_object: object} do
|
||||
clear_config([@policy, :mark_sensitive], false)
|
||||
clear_config([@policy, :unlist], false)
|
||||
clear_config([@policy, :reject], false)
|
||||
{:ok, _} = NsfwApiPolicy.filter(object)
|
||||
end
|
||||
|
||||
test "passes NSFW object through when :threshold is 1", %{nsfw_object: object} do
|
||||
clear_config([@policy, :reject], true)
|
||||
clear_config([@policy, :threshold], 1)
|
||||
{:ok, _} = NsfwApiPolicy.filter(object)
|
||||
end
|
||||
|
||||
test "rejects SFW object through when :threshold is 0", %{sfw_object: object} do
|
||||
clear_config([@policy, :reject], true)
|
||||
clear_config([@policy, :threshold], 0)
|
||||
{:reject, _} = NsfwApiPolicy.filter(object)
|
||||
end
|
||||
|
||||
test "rejects NSFW when :reject is enabled", %{nsfw_object: object} do
|
||||
clear_config([@policy, :reject], true)
|
||||
{:reject, _} = NsfwApiPolicy.filter(object)
|
||||
end
|
||||
|
||||
test "passes NSFW through when :reject is disabled", %{nsfw_object: object} do
|
||||
clear_config([@policy, :reject], false)
|
||||
{:ok, _} = NsfwApiPolicy.filter(object)
|
||||
end
|
||||
|
||||
test "unlists NSFW when :unlist is enabled", %{user: user, nsfw_object: object} do
|
||||
clear_config([@policy, :unlist], true)
|
||||
{:ok, object} = NsfwApiPolicy.filter(object)
|
||||
assert object["to"] == [user.follower_address]
|
||||
end
|
||||
|
||||
test "passes NSFW through when :unlist is disabled", %{nsfw_object: object} do
|
||||
clear_config([@policy, :unlist], false)
|
||||
{:ok, object} = NsfwApiPolicy.filter(object)
|
||||
assert object["to"] == [Constants.as_public()]
|
||||
end
|
||||
end
|
||||
end
|
|
@ -235,6 +235,16 @@ test "replying to a status", %{user: user, conn: conn} do
|
|||
assert Activity.get_in_reply_to_activity(activity).id == replied_to.id
|
||||
end
|
||||
|
||||
test "replying to a deleted status", %{user: user, conn: conn} do
|
||||
{:ok, status} = CommonAPI.post(user, %{status: "cofe"})
|
||||
{:ok, _deleted_status} = CommonAPI.delete(status.id, user)
|
||||
|
||||
conn
|
||||
|> put_req_header("content-type", "application/json")
|
||||
|> post("/api/v1/statuses", %{"status" => "xD", "in_reply_to_id" => status.id})
|
||||
|> json_response_and_validate_schema(422)
|
||||
end
|
||||
|
||||
test "replying to a direct message with visibility other than direct", %{
|
||||
user: user,
|
||||
conn: conn
|
||||
|
|
|
@ -331,4 +331,31 @@ test "muted notification" do
|
|||
|
||||
test_notifications_rendering([notification], user, [expected])
|
||||
end
|
||||
|
||||
test "Subscribed status notification" do
|
||||
user = insert(:user)
|
||||
subscriber = insert(:user)
|
||||
|
||||
User.subscribe(subscriber, user)
|
||||
|
||||
{:ok, activity} = CommonAPI.post(user, %{status: "hi"})
|
||||
{:ok, [notification]} = Notification.create_notifications(activity)
|
||||
|
||||
user = User.get_cached_by_id(user.id)
|
||||
|
||||
expected = %{
|
||||
id: to_string(notification.id),
|
||||
pleroma: %{is_seen: false, is_muted: false},
|
||||
type: "status",
|
||||
account:
|
||||
AccountView.render("show.json", %{
|
||||
user: user,
|
||||
for: subscriber
|
||||
}),
|
||||
status: StatusView.render("show.json", %{activity: activity, for: subscriber}),
|
||||
created_at: Utils.to_masto_date(notification.inserted_at)
|
||||
}
|
||||
|
||||
test_notifications_rendering([notification], subscriber, [expected])
|
||||
end
|
||||
end
|
||||
|
|
|
@ -10,6 +10,7 @@ defmodule Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrlTest do
|
|||
|
||||
alias Pleroma.UnstubbedConfigMock, as: ConfigMock
|
||||
alias Pleroma.Web.RichMedia.Card
|
||||
alias Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl
|
||||
|
||||
setup do
|
||||
ConfigMock
|
||||
|
@ -82,6 +83,12 @@ test "s3 signed url is parsed and correct ttl is set for rich media" do
|
|||
assert DateTime.diff(scheduled_at, timestamp_dt) == valid_till
|
||||
end
|
||||
|
||||
test "AWS URL for an image without expiration works" do
|
||||
og_data = %{"image" => "https://amazonaws.com/image.png"}
|
||||
|
||||
assert is_nil(AwsSignedUrl.ttl(og_data, ""))
|
||||
end
|
||||
|
||||
defp construct_s3_url(timestamp, valid_till) do
|
||||
"https://pleroma.s3.ap-southeast-1.amazonaws.com/sachin%20%281%29%20_a%20-%25%2Aasdasd%20BNN%20bnnn%20.png?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAIBLWWK6RGDQXDLJQ%2F20190716%2Fap-southeast-1%2Fs3%2Faws4_request&X-Amz-Date=#{timestamp}&X-Amz-Expires=#{valid_till}&X-Amz-Signature=04ffd6b98634f4b1bbabc62e0fac4879093cd54a6eed24fe8eb38e8369526bbf&X-Amz-SignedHeaders=host"
|
||||
end
|
||||
|
|
|
@ -76,15 +76,6 @@ test "returns the ActivityPub actor URI for an ActivityPub user" do
|
|||
{:ok, _data} = WebFinger.finger(user)
|
||||
end
|
||||
|
||||
test "returns the ActivityPub actor URI and subscribe address for an ActivityPub user with the ld+json mimetype" do
|
||||
user = "kaniini@gerzilla.de"
|
||||
|
||||
{:ok, data} = WebFinger.finger(user)
|
||||
|
||||
assert data["ap_id"] == "https://gerzilla.de/channel/kaniini"
|
||||
assert data["subscribe_address"] == "https://gerzilla.de/follow?f=&url={uri}"
|
||||
end
|
||||
|
||||
test "it work for AP-only user" do
|
||||
user = "kpherox@mstdn.jp"
|
||||
|
||||
|
@ -99,12 +90,6 @@ test "it work for AP-only user" do
|
|||
assert data["subscribe_address"] == "https://mstdn.jp/authorize_interaction?acct={uri}"
|
||||
end
|
||||
|
||||
test "it works for friendica" do
|
||||
user = "lain@squeet.me"
|
||||
|
||||
{:ok, _data} = WebFinger.finger(user)
|
||||
end
|
||||
|
||||
test "it gets the xrd endpoint" do
|
||||
{:ok, template} = WebFinger.find_lrdd_template("social.heldscal.la")
|
||||
|
||||
|
@ -203,5 +188,44 @@ test "refuses to process XML remote entities" do
|
|||
|
||||
assert :error = WebFinger.finger("pekorino@pawoo.net")
|
||||
end
|
||||
|
||||
test "prevents spoofing" do
|
||||
Tesla.Mock.mock(fn
|
||||
%{
|
||||
url: "https://gleasonator.com/.well-known/webfinger?resource=acct:alex@gleasonator.com"
|
||||
} ->
|
||||
{:ok,
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body: File.read!("test/fixtures/tesla_mock/webfinger_spoof.json"),
|
||||
headers: [{"content-type", "application/jrd+json"}]
|
||||
}}
|
||||
|
||||
%{url: "https://gleasonator.com/.well-known/host-meta"} ->
|
||||
{:ok,
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body: File.read!("test/fixtures/tesla_mock/gleasonator.com_host_meta")
|
||||
}}
|
||||
end)
|
||||
|
||||
{:error, _data} = WebFinger.finger("alex@gleasonator.com")
|
||||
end
|
||||
end
|
||||
|
||||
@tag capture_log: true
|
||||
test "prevents forgeries" do
|
||||
Tesla.Mock.mock(fn
|
||||
%{url: "https://fba.ryona.agency/.well-known/webfinger?resource=acct:graf@fba.ryona.agency"} ->
|
||||
fake_webfinger =
|
||||
File.read!("test/fixtures/webfinger/graf-imposter-webfinger.json") |> Jason.decode!()
|
||||
|
||||
Tesla.Mock.json(fake_webfinger)
|
||||
|
||||
%{url: "https://fba.ryona.agency/.well-known/host-meta"} ->
|
||||
{:ok, %Tesla.Env{status: 404}}
|
||||
end)
|
||||
|
||||
assert {:error, _} = WebFinger.finger("graf@fba.ryona.agency")
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1521,6 +1521,120 @@ def get("https://friends.grishka.me/users/1", _, _, _) do
|
|||
}}
|
||||
end
|
||||
|
||||
def get("https://mastodon.example/.well-known/host-meta", _, _, _) do
|
||||
{:ok,
|
||||
%Tesla.Env{
|
||||
status: 302,
|
||||
headers: [{"location", "https://sub.mastodon.example/.well-known/host-meta"}]
|
||||
}}
|
||||
end
|
||||
|
||||
def get("https://sub.mastodon.example/.well-known/host-meta", _, _, _) do
|
||||
{:ok,
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body:
|
||||
"test/fixtures/webfinger/masto-host-meta.xml"
|
||||
|> File.read!()
|
||||
|> String.replace("{{domain}}", "sub.mastodon.example")
|
||||
}}
|
||||
end
|
||||
|
||||
def get(
|
||||
"https://sub.mastodon.example/.well-known/webfinger?resource=acct:a@mastodon.example",
|
||||
_,
|
||||
_,
|
||||
_
|
||||
) do
|
||||
{:ok,
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body:
|
||||
"test/fixtures/webfinger/masto-webfinger.json"
|
||||
|> File.read!()
|
||||
|> String.replace("{{nickname}}", "a")
|
||||
|> String.replace("{{domain}}", "mastodon.example")
|
||||
|> String.replace("{{subdomain}}", "sub.mastodon.example"),
|
||||
headers: [{"content-type", "application/jrd+json"}]
|
||||
}}
|
||||
end
|
||||
|
||||
def get("https://sub.mastodon.example/users/a", _, _, _) do
|
||||
{:ok,
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body:
|
||||
"test/fixtures/webfinger/masto-user.json"
|
||||
|> File.read!()
|
||||
|> String.replace("{{nickname}}", "a")
|
||||
|> String.replace("{{domain}}", "sub.mastodon.example"),
|
||||
headers: [{"content-type", "application/activity+json"}]
|
||||
}}
|
||||
end
|
||||
|
||||
def get("https://sub.mastodon.example/users/a/collections/featured", _, _, _) do
|
||||
{:ok,
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body:
|
||||
File.read!("test/fixtures/users_mock/masto_featured.json")
|
||||
|> String.replace("{{domain}}", "sub.mastodon.example")
|
||||
|> String.replace("{{nickname}}", "a"),
|
||||
headers: [{"content-type", "application/activity+json"}]
|
||||
}}
|
||||
end
|
||||
|
||||
def get("https://pleroma.example/.well-known/host-meta", _, _, _) do
|
||||
{:ok,
|
||||
%Tesla.Env{
|
||||
status: 302,
|
||||
headers: [{"location", "https://sub.pleroma.example/.well-known/host-meta"}]
|
||||
}}
|
||||
end
|
||||
|
||||
def get("https://sub.pleroma.example/.well-known/host-meta", _, _, _) do
|
||||
{:ok,
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body:
|
||||
"test/fixtures/webfinger/pleroma-host-meta.xml"
|
||||
|> File.read!()
|
||||
|> String.replace("{{domain}}", "sub.pleroma.example")
|
||||
}}
|
||||
end
|
||||
|
||||
def get(
|
||||
"https://sub.pleroma.example/.well-known/webfinger?resource=acct:a@pleroma.example",
|
||||
_,
|
||||
_,
|
||||
_
|
||||
) do
|
||||
{:ok,
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body:
|
||||
"test/fixtures/webfinger/pleroma-webfinger.json"
|
||||
|> File.read!()
|
||||
|> String.replace("{{nickname}}", "a")
|
||||
|> String.replace("{{domain}}", "pleroma.example")
|
||||
|> String.replace("{{subdomain}}", "sub.pleroma.example"),
|
||||
headers: [{"content-type", "application/jrd+json"}]
|
||||
}}
|
||||
end
|
||||
|
||||
def get("https://sub.pleroma.example/users/a", _, _, _) do
|
||||
{:ok,
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body:
|
||||
"test/fixtures/webfinger/pleroma-user.json"
|
||||
|> File.read!()
|
||||
|> String.replace("{{nickname}}", "a")
|
||||
|> String.replace("{{domain}}", "sub.pleroma.example"),
|
||||
headers: [{"content-type", "application/activity+json"}]
|
||||
}}
|
||||
end
|
||||
|
||||
def get(url, query, body, headers) do
|
||||
{:error,
|
||||
"Mock response not implemented for GET #{inspect(url)}, #{query}, #{inspect(body)}, #{inspect(headers)}"}
|
||||
|
|
Loading…
Reference in New Issue