Merge remote-tracking branch 'origin/develop' into status-notification-type

Signed-off-by: marcin mikołajczak <git@mkljczk.pl>
This commit is contained in:
marcin mikołajczak 2024-05-18 11:30:25 +02:00
commit 2e76ceb5b4
202 changed files with 3806 additions and 1496 deletions

5
.gitignore vendored
View File

@ -57,5 +57,6 @@ pleroma.iml
.tool-versions
# Editor temp files
/*~
/*#
*~
*#
*.swp

View File

@ -26,10 +26,10 @@ cache: &global_cache_policy
- _build
stages:
- check-changelog
- build
- lint
- test
- check-changelog
- benchmark
- deploy
- release
@ -113,7 +113,7 @@ benchmark:
variables:
MIX_ENV: benchmark
services:
- name: postgres:9.6-alpine
- name: postgres:11.22-alpine
alias: postgres
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
script:
@ -169,25 +169,6 @@ unit-testing-1.12-erratic:
- mix ecto.migrate
- mix test --only=erratic
unit-testing-1.12-rum:
extends:
- .build_changes_policy
- .using-ci-base
stage: test
cache: *testing_cache_policy
services:
- name: git.pleroma.social:5050/pleroma/pleroma/postgres-with-rum-13
alias: postgres
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
variables:
<<: *global_variables
RUM_ENABLED: "true"
script:
- mix ecto.create
- mix ecto.migrate
- "mix ecto.migrate --migrations-path priv/repo/optional_migrations/rum_indexing/"
- mix test --preload-modules
formatting-1.13:
extends: .build_changes_policy
image: &formatting_elixir elixir:1.13-alpine

View File

@ -4,6 +4,11 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## 2.6.2
### Security
- MRF StealEmojiPolicy: Sanitize shortcodes (thanks to Hazel K for the report
## 2.6.1
### Changed
- - Document maximum supported version of Erlang & Elixir

View File

View File

@ -0,0 +1 @@
Include following/followers in backups

View File

@ -0,0 +1 @@
Support Bandit as an alternative to Cowboy for the HTTP server.

View File

@ -0,0 +1 @@
Allow to group bookmarks in folders

View File

View File

@ -0,0 +1 @@
Fix federation with Convergence AP Bridge

View File

@ -0,0 +1 @@
Mastodon API: Remove deprecated GET /api/v1/statuses/:id/card endpoint https://github.com/mastodon/mastodon/pull/11213

View File

@ -0,0 +1 @@
Include image description in status media cards

View File

@ -0,0 +1 @@
- Config: Check the permissions of the linked file instead of the symlink

View File

@ -0,0 +1 @@
MediaProxy was setting the content-length header which is not permitted by RFC9112§6.2 when we are chunking the reply as it conflicts with the existence of the transfer-encoding header.

View File

View File

1
changelog.d/fep-2c59.add Normal file
View File

@ -0,0 +1 @@
Implement FEP-2c59, add "webfinger" to user actor

View File

@ -0,0 +1 @@
Framegrabs with ffmpeg will execute with a 5 second timeout and cache the URLs of failures with a TTL of 15 minutes to prevent excessive retries.

View File

@ -0,0 +1 @@
Add ForceMention MRF

View File

@ -0,0 +1 @@
Video framegrabs were not working correctly after the change to use Exile to execute ffmpeg

View File

1
changelog.d/gun_pool.fix Normal file
View File

@ -0,0 +1 @@
Fix logic error in Gun connection pooling which prevented retries even when the worker was launched with retry = true

View File

@ -0,0 +1 @@
Connection pool errors when publishing an activity is a soft-error that will be retried shortly.

View File

View File

@ -0,0 +1 @@
Add contact account to InstanceView

View File

@ -0,0 +1 @@
Add instance rules

View File

View File

@ -0,0 +1 @@
Handle cases when users.inbox is nil.

View File

@ -0,0 +1 @@
Verify profile link ownership with rel="me"

View File

@ -0,0 +1 @@
Add new parameters to /api/v2/instance: configuration[accounts][max_pinned_statuses] and configuration[statuses][characters_reserved_per_url]

1
changelog.d/memleak.fix Normal file
View File

@ -0,0 +1 @@
Fix a memory leak caused by Websocket connections that would not enter a state where a full garbage collection run could be triggered.

View File

View File

@ -0,0 +1 @@
Startup detection for configured MRF modules that are missing or incorrectly defined

View File

@ -0,0 +1 @@
Fix notifications query which was not using the index properly

View File

@ -0,0 +1 @@
Notifications: improve performance by filtering on users table instead of activities table

View File

@ -0,0 +1 @@
Use User.full_nickname/1 in oauth html template

View File

@ -0,0 +1 @@
Disable jit by default for PostgreSQL

View File

@ -0,0 +1 @@
Expose nonAnonymous field from Smithereen polls

View File

@ -0,0 +1 @@
ReceiverWorker: Make sure non-{:ok, _} is returned as {:error, …}

View File

View File

@ -0,0 +1 @@
Rich Media Preview cache eviction when the activity is updated.

View File

@ -0,0 +1 @@
Refactored Rich Media to cache the content in the database. Fetching operations that could block status rendering have been eliminated.

View File

1
changelog.d/tesla.deps Normal file
View File

@ -0,0 +1 @@
Update Tesla HTTP client middleware to 1.8.0

View File

View File

@ -0,0 +1 @@
Set default values on validators for transient objects (attachment, poll options)

View File

@ -0,0 +1 @@
Web Push notifications are no longer generated for muted/blocked threads and users.

View File

@ -0,0 +1 @@
Refactor the Mastodon /api/v1/streaming websocket handler to use Phoenix.Socket.Transport

View File

@ -114,14 +114,7 @@
config :pleroma, Pleroma.Web.Endpoint,
url: [host: "localhost"],
http: [
ip: {127, 0, 0, 1},
dispatch: [
{:_,
[
{"/api/v1/streaming", Pleroma.Web.MastodonAPI.WebsocketHandler, []},
{:_, Plug.Cowboy.Handler, {Pleroma.Web.Endpoint, []}}
]}
]
ip: {127, 0, 0, 1}
],
protocol: "https",
secret_key_base: "aK4Abxf29xU9TTDKre9coZPUgevcVCFQJe/5xP/7Lt4BEif6idBIbjupVbOrbKxl",
@ -422,6 +415,10 @@
config :pleroma, :mrf_inline_quote, template: "<bdi>RT:</bdi> {url}"
config :pleroma, :mrf_force_mention,
mention_parent: true,
mention_quoted: true
config :pleroma, :rich_media,
enabled: true,
ignore_hosts: [],
@ -431,7 +428,11 @@
Pleroma.Web.RichMedia.Parsers.OEmbed
],
failure_backoff: 60_000,
ttl_setters: [Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl]
ttl_setters: [
Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl,
Pleroma.Web.RichMedia.Parser.TTL.Opengraph
],
max_body: 5_000_000
config :pleroma, :media_proxy,
enabled: false,
@ -578,7 +579,8 @@
attachments_cleanup: 1,
new_users_digest: 1,
mute_expire: 5,
search_indexing: 10
search_indexing: 10,
rich_media_expiration: 2
],
plugins: [Oban.Plugins.Pruner],
crontab: [
@ -802,7 +804,7 @@
config :pleroma, configurable_from_database: false
config :pleroma, Pleroma.Repo,
parameters: [gin_fuzzy_search_limit: "500"],
parameters: [gin_fuzzy_search_limit: "500", jit: "off"],
prepare: :unnamed
config :pleroma, :connections_pool,

View File

@ -566,6 +566,20 @@
"Cool instance"
]
},
%{
key: :status_page,
type: :string,
description: "A page where people can see the status of the server during an outage",
suggestions: [
"https://status.pleroma.example.org"
]
},
%{
key: :contact_username,
type: :string,
description: "Instance owner username",
suggestions: ["admin"]
},
%{
key: :limit,
type: :integer,
@ -3508,7 +3522,7 @@
},
%{
key: :initial_indexing_chunk_size,
type: :int,
type: :integer,
description:
"Amount of posts in a batch when running the initial indexing operation. Should probably not be more than 100000" <>
" since there's a limit on maximum insert size",

View File

@ -8,8 +8,7 @@
# with brunch.io to recompile .js and .css sources.
config :pleroma, Pleroma.Web.Endpoint,
http: [
port: 4000,
protocol_options: [max_request_line_length: 8192, max_header_value_length: 8192]
port: 4000
],
protocol: "http",
debug_errors: true,

View File

@ -49,7 +49,7 @@
hostname: System.get_env("DB_HOST") || "localhost",
port: System.get_env("DB_PORT") || "5432",
pool: Ecto.Adapters.SQL.Sandbox,
pool_size: 50
pool_size: System.schedulers_online() * 2
config :pleroma, :dangerzone, override_repo_pool_size: true
@ -61,7 +61,8 @@
config :pleroma, :rich_media,
enabled: false,
ignore_hosts: [],
ignore_tld: ["local", "localdomain", "lan"]
ignore_tld: ["local", "localdomain", "lan"],
max_body: 2_000_000
config :pleroma, :instance,
multi_factor_authentication: [
@ -174,6 +175,8 @@
config :pleroma, Pleroma.Emoji.Loader, test_emoji: true
config :pleroma, Pleroma.Web.RichMedia.Backfill, provider: Pleroma.Web.RichMedia.Backfill
if File.exists?("./config/test.secret.exs") do
import_config "test.secret.exs"
else

View File

@ -161,7 +161,8 @@ To add configuration to your config file, you can copy it from the base config.
* `Pleroma.Web.ActivityPub.MRF.KeywordPolicy`: Rejects or removes from the federated timeline or replaces keywords. (See [`:mrf_keyword`](#mrf_keyword)).
* `Pleroma.Web.ActivityPub.MRF.ForceMentionsInContent`: Forces every mentioned user to be reflected in the post content.
* `Pleroma.Web.ActivityPub.MRF.InlineQuotePolicy`: Forces quote post URLs to be reflected in the message content inline.
* `Pleroma.Web.ActivityPub.MRF.QuoteToLinkTagPolicy`: Force a Link tag for posts quoting another post. (may break outgoing federation of quote posts with older Pleroma versions)
* `Pleroma.Web.ActivityPub.MRF.QuoteToLinkTagPolicy`: Force a Link tag for posts quoting another post. (may break outgoing federation of quote posts with older Pleroma versions).
* `Pleroma.Web.ActivityPub.MRF.ForceMention`: Forces posts to include a mention of the author of parent post or the author of quoted post.
* `transparency`: Make the content of your Message Rewrite Facility settings public (via nodeinfo).
* `transparency_exclusions`: Exclude specific instance names from MRF transparency. The use of the exclusions feature will be disclosed in nodeinfo as a boolean value.
@ -272,6 +273,10 @@ Notes:
#### :mrf_inline_quote
* `template`: The template to append to the post. `{url}` will be replaced with the actual link to the quoted post. Default: `<bdi>RT:</bdi> {url}`
#### :mrf_force_mention
* `mention_parent`: Whether to append mention of parent post author
* `mention_quoted`: Whether to append mention of parent quoted author
### :activitypub
* `unfollow_blocked`: Whether blocks result in people getting unfollowed
* `outgoing_blocks`: Whether to federate blocks to other instances

View File

@ -1751,3 +1751,53 @@ Note that this differs from the Mastodon API variant: Mastodon API only returns
```json
{}
```
## `GET /api/v1/pleroma/admin/rules`
### List rules
- Response: JSON, list of rules
```json
[
{
"id": "1",
"priority": 1,
"text": "There are no rules",
"hint": null
}
]
```
## `POST /api/v1/pleroma/admin/rules`
### Create a rule
- Params:
- `text`: string, required, rule content
- `hint`: string, optional, rule description
- `priority`: integer, optional, rule ordering priority
- Response: JSON, a single rule
## `PATCH /api/v1/pleroma/admin/rules/:id`
### Update a rule
- Params:
- `text`: string, optional, rule content
- `hint`: string, optional, rule description
- `priority`: integer, optional, rule ordering priority
- Response: JSON, a single rule
## `DELETE /api/v1/pleroma/admin/rules/:id`
### Delete a rule
- Response: JSON, empty object
```json
{}
```

View File

@ -40,6 +40,8 @@ Has these additional fields under the `pleroma` object:
- `parent_visible`: If the parent of this post is visible to the user or not.
- `pinned_at`: a datetime (iso8601) when status was pinned, `null` otherwise.
- `quotes_count`: the count of status quotes.
- `non_anonymous`: true if the source post specifies the poll results are not anonymous. Currently only implemented by Smithereen.
- `bookmark_folder`: the ID of the folder bookmark is stored within (if any).
The `GET /api/v1/statuses/:id/source` endpoint additionally has the following attributes:
@ -65,6 +67,12 @@ Some apps operate under the assumption that no more than 4 attachments can be re
Pleroma does not process remote images and therefore cannot include fields such as `meta` and `blurhash`. It does not support focal points or aspect ratios. The frontend is expected to handle it.
## Bookmarks
The `GET /api/v1/bookmarks` endpoint accepts optional parameter `folder_id` for bookmark folder ID.
The `POST /api/v1/statuses/:id/bookmark` endpoint accepts optional parameter `folder_id` for bookmark folder ID.
## Accounts
The `id` parameter can also be the `nickname` of the user. This only works in these endpoints, not the deeper nested ones for following etc.

View File

@ -283,6 +283,52 @@ See [Admin-API](admin_api.md)
* `id`: the id of the status
* Response: JSON, returns a list of Mastodon Status entities
## `GET /api/v1/pleroma/bookmark_folders`
### Gets user bookmark folders
* Authentication: required
* Response: JSON. Returns a list of bookmark folders.
* Example response:
```json
[
{
"id": "9umDrYheeY451cQnEe",
"name": "Read later",
"emoji": "🕓",
"source": {
"emoji": "🕓"
}
}
]
```
## `POST /api/v1/pleroma/bookmark_folders`
### Creates a bookmark folder
* Authentication: required
* Params:
* `name`: folder name
* `emoji`: folder emoji (optional)
* Response: JSON. Returns a single bookmark folder.
## `PATCH /api/v1/pleroma/bookmark_folders/:id`
### Updates a bookmark folder
* Authentication: required
* Params:
* `id`: folder id
* `name`: folder name (optional)
* `emoji`: folder emoji (optional)
* Response: JSON. Returns a single bookmark folder.
## `DELETE /api/v1/pleroma/bookmark_folders/:id`
### Deletes a bookmark folder
* Authentication: required
* Params:
* `id`: folder id
* Response: JSON. Returns a single bookmark folder.
## `/api/v1/pleroma/mascot`
### Gets user mascot image
* Method `GET`

View File

@ -12,8 +12,8 @@ Note: This article is potentially outdated because at this time we may not have
### 必要なソフトウェア
- PostgreSQL 9.6以上 (Ubuntu16.04では9.5しか提供されていないので,[](https://www.postgresql.org/download/linux/ubuntu/)こちらから新しいバージョンを入手してください)
- `postgresql-contrib` 9.6以上 (同上)
- PostgreSQL 11.0以上 (Ubuntu16.04では9.5しか提供されていないので,[](https://www.postgresql.org/download/linux/ubuntu/)こちらから新しいバージョンを入手してください)
- `postgresql-contrib` 11.0以上 (同上)
- Elixir 1.8 以上 ([Debianのリポジトリからインストールしないこと ここからインストールすること!](https://elixir-lang.org/install.html#unix-and-unix-like)。または [asdf](https://github.com/asdf-vm/asdf) をpleromaユーザーでインストールしてください)
- `erlang-dev`
- `erlang-nox`

View File

@ -1,6 +1,6 @@
## Required dependencies
* PostgreSQL >=9.6
* PostgreSQL >=11.0
* Elixir >=1.11.0 <1.15
* Erlang OTP >=22.2.0 (supported: <27)
* git

View File

@ -111,7 +111,7 @@ def run(["get-packs" | args]) do
{:ok, _} =
:zip.unzip(binary_archive,
cwd: pack_path,
cwd: String.to_charlist(pack_path),
file_list: files_to_unzip
)

View File

@ -1,93 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Phoenix.Transports.WebSocket.Raw do
import Plug.Conn,
only: [
fetch_query_params: 1,
send_resp: 3
]
alias Phoenix.Socket.Transport
def default_config do
[
timeout: 60_000,
transport_log: false,
cowboy: Phoenix.Endpoint.CowboyWebSocket
]
end
def init(%Plug.Conn{method: "GET"} = conn, {endpoint, handler, transport}) do
{_, opts} = handler.__transport__(transport)
conn =
conn
|> fetch_query_params
|> Transport.transport_log(opts[:transport_log])
|> Transport.check_origin(handler, endpoint, opts)
case conn do
%{halted: false} = conn ->
case handler.connect(%{
endpoint: endpoint,
transport: transport,
options: [serializer: nil],
params: conn.params
}) do
{:ok, socket} ->
{:ok, conn, {__MODULE__, {socket, opts}}}
:error ->
send_resp(conn, :forbidden, "")
{:error, conn}
end
_ ->
{:error, conn}
end
end
def init(conn, _) do
send_resp(conn, :bad_request, "")
{:error, conn}
end
def ws_init({socket, config}) do
Process.flag(:trap_exit, true)
{:ok, %{socket: socket}, config[:timeout]}
end
def ws_handle(op, data, state) do
state.socket.handler
|> apply(:handle, [op, data, state])
|> case do
{op, data} ->
{:reply, {op, data}, state}
{op, data, state} ->
{:reply, {op, data}, state}
%{} = state ->
{:ok, state}
_ ->
{:ok, state}
end
end
def ws_info({_, _} = tuple, state) do
{:reply, tuple, state}
end
def ws_info(_tuple, state), do: {:ok, state}
def ws_close(state) do
ws_handle(:closed, :normal, state)
end
def ws_terminate(reason, state) do
ws_handle(:closed, reason, state)
end
end

View File

@ -28,7 +28,7 @@ defp get_cache_keys_for(activity_id) do
end
end
defp add_cache_key_for(activity_id, additional_key) do
def add_cache_key_for(activity_id, additional_key) do
current = get_cache_keys_for(activity_id)
unless additional_key in current do

View File

@ -119,28 +119,7 @@ def start(_type, _args) do
max_restarts = Application.get_env(:pleroma, __MODULE__)[:max_restarts]
opts = [strategy: :one_for_one, name: Pleroma.Supervisor, max_restarts: max_restarts]
result = Supervisor.start_link(children, opts)
set_postgres_server_version()
result
end
defp set_postgres_server_version do
version =
with %{rows: [[version]]} <- Ecto.Adapters.SQL.query!(Pleroma.Repo, "show server_version"),
{num, _} <- Float.parse(version) do
num
else
e ->
Logger.warning(
"Could not get the postgres version: #{inspect(e)}.\nSetting the default value of 9.6"
)
9.6
end
:persistent_term.put({Pleroma.Repo, :postgres_version}, version)
Supervisor.start_link(children, opts)
end
def load_custom_modules do
@ -177,6 +156,7 @@ defp cachex_children do
build_cachex("web_resp", limit: 2500),
build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10),
build_cachex("failed_proxy_url", limit: 2500),
build_cachex("failed_media_helper_url", default_ttl: :timer.minutes(15), limit: 2_500),
build_cachex("banned_urls", default_ttl: :timer.hours(24 * 30), limit: 5_000),
build_cachex("chat_message_id_idempotency_key",
expiration: chat_message_id_idempotency_key_expiration(),

View File

@ -28,6 +28,7 @@ def verify! do
|> check_welcome_message_config!()
|> check_rum!()
|> check_repo_pool_size!()
|> check_mrfs()
|> handle_result()
end
@ -234,4 +235,25 @@ defp check_filter(filter, command_required) do
true
end
end
defp check_mrfs(:ok) do
mrfs = Config.get!([:mrf, :policies])
missing_mrfs =
Enum.reduce(mrfs, [], fn x, acc ->
if Code.ensure_compiled(x) do
acc
else
acc ++ [x]
end
end)
if Enum.empty?(missing_mrfs) do
:ok
else
{:error, "The following MRF modules are configured but missing: #{inspect(missing_mrfs)}"}
end
end
defp check_mrfs(result), do: result
end

View File

@ -10,6 +10,7 @@ defmodule Pleroma.Bookmark do
alias Pleroma.Activity
alias Pleroma.Bookmark
alias Pleroma.BookmarkFolder
alias Pleroma.Repo
alias Pleroma.User
@ -18,33 +19,46 @@ defmodule Pleroma.Bookmark do
schema "bookmarks" do
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
belongs_to(:activity, Activity, type: FlakeId.Ecto.CompatType)
belongs_to(:folder, BookmarkFolder, type: FlakeId.Ecto.CompatType)
timestamps()
end
@spec create(Ecto.UUID.t(), Ecto.UUID.t()) ::
{:ok, Bookmark.t()} | {:error, Ecto.Changeset.t()}
def create(user_id, activity_id) do
def create(user_id, activity_id, folder_id \\ nil) do
attrs = %{
user_id: user_id,
activity_id: activity_id
activity_id: activity_id,
folder_id: folder_id
}
%Bookmark{}
|> cast(attrs, [:user_id, :activity_id])
|> cast(attrs, [:user_id, :activity_id, :folder_id])
|> validate_required([:user_id, :activity_id])
|> unique_constraint(:activity_id, name: :bookmarks_user_id_activity_id_index)
|> Repo.insert()
|> Repo.insert(
on_conflict: [set: [folder_id: folder_id]],
conflict_target: [:user_id, :activity_id]
)
end
@spec for_user_query(Ecto.UUID.t()) :: Ecto.Query.t()
def for_user_query(user_id) do
def for_user_query(user_id, folder_id \\ nil) do
Bookmark
|> where(user_id: ^user_id)
|> maybe_filter_by_folder(folder_id)
|> join(:inner, [b], activity in assoc(b, :activity))
|> preload([b, a], activity: a)
end
defp maybe_filter_by_folder(query, nil), do: query
defp maybe_filter_by_folder(query, folder_id) do
query
|> where(folder_id: ^folder_id)
end
def get(user_id, activity_id) do
Bookmark
|> where(user_id: ^user_id)
@ -62,4 +76,11 @@ def destroy(user_id, activity_id) do
|> Repo.one()
|> Repo.delete()
end
def set_folder(bookmark, folder_id) do
bookmark
|> cast(%{folder_id: folder_id}, [:folder_id])
|> validate_required([:folder_id])
|> Repo.update()
end
end

View File

@ -0,0 +1,115 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.BookmarkFolder do
use Ecto.Schema
import Ecto.Changeset
import Ecto.Query
alias Pleroma.BookmarkFolder
alias Pleroma.Emoji
alias Pleroma.Repo
alias Pleroma.User
@type t :: %__MODULE__{}
@primary_key {:id, FlakeId.Ecto.CompatType, autogenerate: true}
schema "bookmark_folders" do
field(:name, :string)
field(:emoji, :string)
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
timestamps()
end
def get_by_id(id), do: Repo.get_by(BookmarkFolder, id: id)
def create(user_id, name, emoji \\ nil) do
%BookmarkFolder{}
|> cast(
%{
user_id: user_id,
name: name,
emoji: emoji
},
[:user_id, :name, :emoji]
)
|> validate_required([:user_id, :name])
|> fix_emoji()
|> validate_emoji()
|> unique_constraint([:user_id, :name])
|> Repo.insert()
end
def update(folder_id, name, emoji \\ nil) do
get_by_id(folder_id)
|> cast(
%{
name: name,
emoji: emoji
},
[:name, :emoji]
)
|> fix_emoji()
|> validate_emoji()
|> unique_constraint([:user_id, :name])
|> Repo.update()
end
defp fix_emoji(changeset) do
with {:emoji_field, emoji} when is_binary(emoji) <-
{:emoji_field, get_field(changeset, :emoji)},
{:fixed_emoji, emoji} <-
{:fixed_emoji,
emoji
|> Pleroma.Emoji.fully_qualify_emoji()
|> Pleroma.Emoji.maybe_quote()} do
put_change(changeset, :emoji, emoji)
else
{:emoji_field, _} -> changeset
end
end
defp validate_emoji(changeset) do
validate_change(changeset, :emoji, fn
:emoji, nil ->
[]
:emoji, emoji ->
if Emoji.unicode?(emoji) or valid_local_custom_emoji?(emoji) do
[]
else
[emoji: "Invalid emoji"]
end
end)
end
defp valid_local_custom_emoji?(emoji) do
with %{file: _path} <- Emoji.get(emoji) do
true
else
_ -> false
end
end
def delete(folder_id) do
BookmarkFolder
|> Repo.get_by(id: folder_id)
|> Repo.delete()
end
def for_user(user_id) do
BookmarkFolder
|> where(user_id: ^user_id)
|> Repo.all()
end
def belongs_to_user?(folder_id, user_id) do
BookmarkFolder
|> where(id: ^folder_id, user_id: ^user_id)
|> Repo.exists?()
end
end

View File

@ -8,10 +8,13 @@ defmodule Pleroma.Caching do
@callback put(Cachex.cache(), any(), any(), Keyword.t()) :: {Cachex.status(), boolean()}
@callback put(Cachex.cache(), any(), any()) :: {Cachex.status(), boolean()}
@callback fetch!(Cachex.cache(), any(), function() | nil) :: any()
@callback fetch(Cachex.cache(), any(), function() | nil) ::
{atom(), any()} | {atom(), any(), any()}
# @callback del(Cachex.cache(), any(), Keyword.t()) :: {Cachex.status(), boolean()}
@callback del(Cachex.cache(), any()) :: {Cachex.status(), boolean()}
@callback stream!(Cachex.cache(), any()) :: Enumerable.t()
@callback expire_at(Cachex.cache(), binary(), number()) :: {Cachex.status(), boolean()}
@callback expire(Cachex.cache(), binary(), number()) :: {Cachex.status(), boolean()}
@callback exists?(Cachex.cache(), any()) :: {Cachex.status(), boolean()}
@callback execute!(Cachex.cache(), function()) :: any()
@callback get_and_update(Cachex.cache(), any(), function()) ::

View File

@ -256,7 +256,7 @@ def check_old_mrf_config do
move_namespace_and_warn(@mrf_config_map, warning_preface)
end
@spec move_namespace_and_warn([config_map()], String.t()) :: :ok | nil
@spec move_namespace_and_warn([config_map()], String.t()) :: :ok | :error
def move_namespace_and_warn(config_map, warning_preface) do
warning =
Enum.reduce(config_map, "", fn
@ -279,7 +279,7 @@ def move_namespace_and_warn(config_map, warning_preface) do
end
end
@spec check_media_proxy_whitelist_config() :: :ok | nil
@spec check_media_proxy_whitelist_config() :: :ok | :error
def check_media_proxy_whitelist_config do
whitelist = Config.get([:media_proxy, :whitelist])
@ -340,7 +340,7 @@ def check_gun_pool_options do
end
end
@spec check_activity_expiration_config() :: :ok | nil
@spec check_activity_expiration_config() :: :ok | :error
def check_activity_expiration_config do
warning_preface = """
!!!DEPRECATION WARNING!!!
@ -356,7 +356,7 @@ def check_activity_expiration_config do
)
end
@spec check_remote_ip_plug_name() :: :ok | nil
@spec check_remote_ip_plug_name() :: :ok | :error
def check_remote_ip_plug_name do
warning_preface = """
!!!DEPRECATION WARNING!!!
@ -372,7 +372,7 @@ def check_remote_ip_plug_name do
)
end
@spec check_uploaders_s3_public_endpoint() :: :ok | nil
@spec check_uploaders_s3_public_endpoint() :: :ok | :error
def check_uploaders_s3_public_endpoint do
s3_config = Pleroma.Config.get([Pleroma.Uploaders.S3])
@ -393,7 +393,7 @@ def check_uploaders_s3_public_endpoint do
end
end
@spec check_old_chat_shoutbox() :: :ok | nil
@spec check_old_chat_shoutbox() :: :ok | :error
def check_old_chat_shoutbox do
instance_config = Pleroma.Config.get([:instance])
chat_config = Pleroma.Config.get([:chat]) || []

View File

@ -21,7 +21,7 @@ def load(config, opts) do
with_runtime_config =
if File.exists?(config_path) do
# <https://git.pleroma.social/pleroma/pleroma/-/issues/3135>
%File.Stat{mode: mode} = File.lstat!(config_path)
%File.Stat{mode: mode} = File.stat!(config_path)
if Bitwise.band(mode, 0o007) > 0 do
raise "Configuration at #{config_path} has world-permissions, execute the following: chmod o= #{config_path}"

View File

@ -19,7 +19,8 @@ defmodule Pleroma.Constants do
"context_id",
"deleted_activity_id",
"pleroma_internal",
"generator"
"generator",
"rules"
]
)

View File

@ -100,7 +100,7 @@ def add_file(%Pack{} = pack, _, _, %Plug.Upload{content_type: "application/zip"}
{:ok, _emoji_files} =
:zip.unzip(
to_charlist(file.path),
[{:file_list, Enum.map(emojies, & &1[:path])}, {:cwd, tmp_dir}]
[{:file_list, Enum.map(emojies, & &1[:path])}, {:cwd, String.to_charlist(tmp_dir)}]
)
{_, updated_pack} =

View File

@ -216,9 +216,6 @@ def compose_regex([_ | _] = filters, format) do
:re ->
~r/\b#{phrases}\b/i
_ ->
nil
end
end

View File

@ -241,13 +241,13 @@ def find(following_relationships, follower, following) do
end
@doc """
For a query with joined activity,
keeps rows where activity's actor is followed by user -or- is NOT domain-blocked by user.
For a query with joined activity's actor,
keeps rows where actor is followed by user -or- is NOT domain-blocked by user.
"""
def keep_following_or_not_domain_blocked(query, user) do
where(
query,
[_, activity],
[_, user_actor: user_actor],
fragment(
# "(actor's domain NOT in domain_blocks) OR (actor IS in followed AP IDs)"
"""
@ -255,9 +255,9 @@ def keep_following_or_not_domain_blocked(query, user) do
? = ANY(SELECT ap_id FROM users AS u INNER JOIN following_relationships AS fr
ON u.id = fr.following_id WHERE fr.follower_id = ? AND fr.state = ?)
""",
activity.actor,
user_actor.ap_id,
^user.domain_blocks,
activity.actor,
user_actor.ap_id,
^User.binary_id(user.id),
^accept_state_code()
)

View File

@ -18,10 +18,12 @@ def init(_opts) do
)
end
def start_worker(opts, retry \\ false) do
def start_worker(opts, last_attempt \\ false) do
case DynamicSupervisor.start_child(__MODULE__, {Pleroma.Gun.ConnectionPool.Worker, opts}) do
{:error, :max_children} ->
if Enum.any?([retry, free_pool()], &match?(&1, :error)) do
funs = [fn -> last_attempt end, fn -> match?(:error, free_pool()) end]
if Enum.any?(funs, fn fun -> fun.() end) do
:telemetry.execute([:pleroma, :connection_pool, :provision_failure], %{opts: opts})
{:error, :pool_full}
else

View File

@ -12,6 +12,8 @@ defmodule Pleroma.Helpers.MediaHelper do
require Logger
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
def missing_dependencies do
Enum.reduce([ffmpeg: "ffmpeg"], [], fn {sym, executable}, acc ->
if Pleroma.Utils.command_available?(executable) do
@ -40,28 +42,43 @@ def image_resize(url, options) do
end
# Note: video thumbnail is intentionally not resized (always has original dimensions)
@spec video_framegrab(String.t()) :: {:ok, binary()} | {:error, any()}
def video_framegrab(url) do
with executable when is_binary(executable) <- System.find_executable("ffmpeg"),
false <- @cachex.exists?(:failed_media_helper_cache, url),
{:ok, env} <- HTTP.get(url, [], pool: :media),
{:ok, pid} <- StringIO.open(env.body) do
body_stream = IO.binstream(pid, 1)
Exile.stream!(
[
executable,
"-i",
"pipe:0",
"-vframes",
"1",
"-f",
"mjpeg",
"pipe:1"
],
input: body_stream,
ignore_epipe: true,
stderr: :disable
)
|> Enum.into(<<>>)
task =
Task.async(fn ->
Exile.stream!(
[
executable,
"-i",
"pipe:0",
"-vframes",
"1",
"-f",
"mjpeg",
"pipe:1"
],
input: body_stream,
ignore_epipe: true,
stderr: :disable
)
|> Enum.into(<<>>)
end)
case Task.yield(task, 5_000) do
nil ->
Task.shutdown(task)
@cachex.put(:failed_media_helper_cache, url, nil)
{:error, {:ffmpeg, :timeout}}
result ->
{:ok, result}
end
else
nil -> {:error, {:ffmpeg, :command_not_found}}
{:error, _} = error -> error

View File

@ -6,8 +6,6 @@ defmodule Pleroma.HTML do
# Scrubbers are compiled on boot so they can be configured in OTP releases
# @on_load :compile_scrubbers
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
def compile_scrubbers do
dir = Path.join(:code.priv_dir(:pleroma), "scrubbers")
@ -67,22 +65,9 @@ def ensure_scrubbed_html(
end
end
def extract_first_external_url_from_object(%{data: %{"content" => content}} = object)
@spec extract_first_external_url_from_object(Pleroma.Object.t()) :: String.t() | nil
def extract_first_external_url_from_object(%{data: %{"content" => content}})
when is_binary(content) do
unless object.data["fake"] do
key = "URL|#{object.id}"
@cachex.fetch!(:scrubber_cache, key, fn _key ->
{:commit, {:ok, extract_first_external_url(content)}}
end)
else
{:ok, extract_first_external_url(content)}
end
end
def extract_first_external_url_from_object(_), do: {:error, :no_content}
def extract_first_external_url(content) do
content
|> Floki.parse_fragment!()
|> Floki.find("a:not(.mention,.hashtag,.attachment,[rel~=\"tag\"])")
@ -90,4 +75,6 @@ def extract_first_external_url(content) do
|> Floki.attribute("href")
|> Enum.at(0)
end
def extract_first_external_url_from_object(_), do: nil
end

View File

@ -54,12 +54,12 @@ def opts(request, options), do: %{request | opts: options}
@doc """
Add optional parameters to the request
"""
@spec add_param(Request.t(), atom(), atom(), any()) :: Request.t()
@spec add_param(Request.t(), atom(), atom() | String.t(), any()) :: Request.t()
def add_param(request, :query, :query, values), do: %{request | query: values}
def add_param(request, :body, :body, value), do: %{request | body: value}
def add_param(request, :body, key, value) do
def add_param(request, :body, key, value) when is_binary(key) do
request
|> Map.put(:body, Multipart.new())
|> Map.update!(

View File

@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Maps do
@ -18,4 +18,17 @@ def safe_put_in(data, keys, value) when is_map(data) and is_list(keys) do
rescue
_ -> data
end
def filter_empty_values(data) do
# TODO: Change to Map.filter in Elixir 1.13+
data
|> Enum.filter(fn
{_k, nil} -> false
{_k, ""} -> false
{_k, []} -> false
{_k, %{} = v} -> Map.keys(v) != []
{_k, _v} -> true
end)
|> Map.new()
end
end

View File

@ -77,7 +77,7 @@ def generate_backup_codes(%User{} = user) do
{:ok, codes}
else
{:error, msg} ->
%{error: msg}
{:error, msg}
end
end

View File

@ -14,6 +14,7 @@ defmodule Pleroma.MFA.TOTP do
@doc """
https://github.com/google/google-authenticator/wiki/Key-Uri-Format
"""
@spec provisioning_uri(String.t(), String.t(), list()) :: String.t()
def provisioning_uri(secret, label, opts \\ []) do
query =
%{
@ -27,7 +28,7 @@ def provisioning_uri(secret, label, opts \\ []) do
|> URI.encode_query()
%URI{scheme: "otpauth", host: "totp", path: "/" <> label, query: query}
|> URI.to_string()
|> to_string()
end
defp default_period, do: Config.get(@config_ns ++ [:period])

View File

@ -89,7 +89,7 @@ def last_read_query(user) do
where: q.seen == true,
select: type(q.id, :string),
limit: 1,
order_by: [desc: :id]
order_by: fragment("? desc nulls last", q.id)
)
end
@ -138,7 +138,7 @@ defp exclude_blocked(query, user, opts) do
blocked_ap_ids = opts[:blocked_users_ap_ids] || User.blocked_users_ap_ids(user)
query
|> where([n, a], a.actor not in ^blocked_ap_ids)
|> where([..., user_actor: user_actor], user_actor.ap_id not in ^blocked_ap_ids)
|> FollowingRelationship.keep_following_or_not_domain_blocked(user)
end
@ -149,7 +149,7 @@ defp exclude_blockers(query, user) do
blocker_ap_ids = User.incoming_relationships_ungrouped_ap_ids(user, [:block])
query
|> where([n, a], a.actor not in ^blocker_ap_ids)
|> where([..., user_actor: user_actor], user_actor.ap_id not in ^blocker_ap_ids)
end
end
@ -162,7 +162,7 @@ defp exclude_notification_muted(query, user, opts) do
opts[:notification_muted_users_ap_ids] || User.notification_muted_users_ap_ids(user)
query
|> where([n, a], a.actor not in ^notification_muted_ap_ids)
|> where([..., user_actor: user_actor], user_actor.ap_id not in ^notification_muted_ap_ids)
|> join(:left, [n, a], tm in ThreadMute,
on: tm.user_id == ^user.id and tm.context == fragment("?->>'context'", a.data),
as: :thread_mute
@ -362,43 +362,37 @@ def dismiss(%{id: user_id} = _user, id) do
end
end
@spec create_notifications(Activity.t(), keyword()) :: {:ok, [Notification.t()] | []}
def create_notifications(activity, options \\ [])
@spec create_notifications(Activity.t()) :: {:ok, [Notification.t()] | []}
def create_notifications(activity)
def create_notifications(%Activity{data: %{"to" => _, "type" => "Create"}} = activity, options) do
def create_notifications(%Activity{data: %{"to" => _, "type" => "Create"}} = activity) do
object = Object.normalize(activity, fetch: false)
if object && object.data["type"] == "Answer" do
{:ok, []}
else
do_create_notifications(activity, options)
do_create_notifications(activity)
end
end
def create_notifications(%Activity{data: %{"type" => type}} = activity, options)
def create_notifications(%Activity{data: %{"type" => type}} = activity)
when type in ["Follow", "Like", "Announce", "Move", "EmojiReact", "Flag", "Update"] do
do_create_notifications(activity, options)
do_create_notifications(activity)
end
def create_notifications(_, _), do: {:ok, []}
def create_notifications(_), do: {:ok, []}
defp do_create_notifications(%Activity{} = activity, options) do
do_send = Keyword.get(options, :do_send, true)
defp do_create_notifications(%Activity{} = activity) do
enabled_receivers = get_notified_from_activity(activity)
{enabled_receivers, disabled_receivers} = get_notified_from_activity(activity)
potential_receivers = enabled_receivers ++ disabled_receivers
{enabled_subscribers, disabled_subscribers} = get_notified_subscribers_from_activity(activity)
potential_subscribers = (enabled_subscribers ++ disabled_subscribers) -- potential_receivers
enabled_subscribers = get_notified_subscribers_from_activity(activity)
notifications =
(Enum.map(potential_receivers, fn user ->
do_send = do_send && user in enabled_receivers
create_notification(activity, user, do_send: do_send)
(Enum.map(enabled_receivers, fn user ->
create_notification(activity, user)
end) ++
Enum.map(potential_subscribers, fn user ->
do_send = do_send && user in enabled_subscribers
create_notification(activity, user, do_send: do_send, type: "status")
Enum.map(enabled_subscribers -- enabled_receivers, fn user ->
create_notification(activity, user, type: "status")
end))
|> Enum.reject(&is_nil/1)
@ -458,7 +452,6 @@ defp type_from_activity_object(%{data: %{"type" => "Create"}} = activity) do
# TODO move to sql, too.
def create_notification(%Activity{} = activity, %User{} = user, opts \\ []) do
do_send = Keyword.get(opts, :do_send, true)
type = Keyword.get(opts, :type, type_from_activity(activity))
unless skip?(activity, user, opts) do
@ -473,11 +466,6 @@ def create_notification(%Activity{} = activity, %User{} = user, opts \\ []) do
|> Marker.multi_set_last_read_id(user, "notifications")
|> Repo.transaction()
if do_send do
Streamer.stream(["user", "user:notification"], notification)
Push.send(notification)
end
notification
end
end
@ -535,10 +523,7 @@ def get_notified_from_activity(%Activity{data: %{"type" => type}} = activity, lo
|> exclude_relationship_restricted_ap_ids(activity)
|> exclude_thread_muter_ap_ids(activity)
notification_enabled_users =
Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
{notification_enabled_users, potential_receivers -- notification_enabled_users}
Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
end
def get_notified_from_activity(_, _local_only), do: {[], []}
@ -556,10 +541,7 @@ def get_notified_subscribers_from_activity(
potential_receivers =
User.get_users_from_set(notification_enabled_ap_ids, local_only: local_only)
notification_enabled_users =
Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
{notification_enabled_users, potential_receivers -- notification_enabled_users}
Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
end
def get_notified_subscribers_from_activity(_, _), do: {[], []}
@ -671,6 +653,7 @@ def skip?(activity, user, opts \\ [])
def skip?(%Activity{} = activity, %User{} = user, opts) do
[
:self,
:internal,
:invisible,
:block_from_strangers,
:recently_followed,
@ -690,6 +673,12 @@ def skip?(:self, %Activity{} = activity, %User{} = user, opts) do
end
end
def skip?(:internal, %Activity{} = activity, _user, _opts) do
actor = activity.data["actor"]
user = User.get_cached_by_ap_id(actor)
User.internal?(user)
end
def skip?(:invisible, %Activity{} = activity, _user, _opts) do
actor = activity.data["actor"]
user = User.get_cached_by_ap_id(actor)
@ -776,4 +765,12 @@ def mark_context_as_read(%User{id: id}, context) do
)
|> Repo.update_all(set: [seen: true])
end
@spec send(list(Notification.t())) :: :ok
def send(notifications) do
Enum.each(notifications, fn notification ->
Streamer.stream(["user", "user:notification"], notification)
Push.send(notification)
end)
end
end

View File

@ -61,15 +61,16 @@ def fetch_paginated(query, params, :offset, table_binding) do
|> Repo.all()
end
@spec paginate(Ecto.Query.t(), map(), type(), atom() | nil) :: [Ecto.Schema.t()]
def paginate(query, options, method \\ :keyset, table_binding \\ nil)
def paginate(list, options, _method, _table_binding) when is_list(list) do
@spec paginate_list(list(), keyword()) :: list()
def paginate_list(list, options) do
offset = options[:offset] || 0
limit = options[:limit] || 0
Enum.slice(list, offset, limit)
end
@spec paginate(Ecto.Query.t(), map(), type(), atom() | nil) :: [Ecto.Schema.t()]
def paginate(query, options, method \\ :keyset, table_binding \\ nil)
def paginate(query, options, :keyset, table_binding) do
query
|> restrict(:min_id, options, table_binding)

View File

@ -28,7 +28,7 @@ def verify_pass(password, hash) do
iterations = String.to_integer(iterations)
digest = String.to_atom(digest)
digest = String.to_existing_atom(digest)
binary_hash =
KeyGenerator.generate(password, salt, digest: digest, iterations: iterations, length: 64)

View File

@ -8,7 +8,7 @@ defmodule Pleroma.ReverseProxy do
~w(if-unmodified-since if-none-match) ++ @range_headers
@resp_cache_headers ~w(etag date last-modified)
@keep_resp_headers @resp_cache_headers ++
~w(content-length content-type content-disposition content-encoding) ++
~w(content-type content-disposition content-encoding) ++
~w(content-range accept-ranges vary)
@default_cache_control_header "public, max-age=1209600"
@valid_resp_codes [200, 206, 304]

68
lib/pleroma/rule.ex Normal file
View File

@ -0,0 +1,68 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Rule do
use Ecto.Schema
import Ecto.Changeset
import Ecto.Query
alias Pleroma.Repo
alias Pleroma.Rule
schema "rules" do
field(:priority, :integer, default: 0)
field(:text, :string)
field(:hint, :string)
timestamps()
end
def changeset(%Rule{} = rule, params \\ %{}) do
rule
|> cast(params, [:priority, :text, :hint])
|> validate_required([:text])
end
def query do
Rule
|> order_by(asc: :priority)
|> order_by(asc: :id)
end
def get(ids) when is_list(ids) do
from(r in __MODULE__, where: r.id in ^ids)
|> Repo.all()
end
def get(id), do: Repo.get(__MODULE__, id)
def exists?(id) do
from(r in __MODULE__, where: r.id == ^id)
|> Repo.exists?()
end
def create(params) do
{:ok, rule} =
%Rule{}
|> changeset(params)
|> Repo.insert()
rule
end
def update(params, id) do
{:ok, rule} =
get(id)
|> changeset(params)
|> Repo.update()
rule
end
def delete(id) do
get(id)
|> Repo.delete()
end
end

View File

@ -23,19 +23,12 @@ def search(user, search_query, options \\ []) do
offset = Keyword.get(options, :offset, 0)
author = Keyword.get(options, :author)
search_function =
if :persistent_term.get({Pleroma.Repo, :postgres_version}) >= 11 do
:websearch
else
:plain
end
try do
Activity
|> Activity.with_preloaded_object()
|> Activity.restrict_deactivated_users()
|> restrict_public(user)
|> query_with(index_type, search_query, search_function)
|> query_with(index_type, search_query, :websearch)
|> maybe_restrict_local(user)
|> maybe_restrict_author(author)
|> maybe_restrict_blocked(user)

View File

@ -59,7 +59,7 @@ def handle_event(
_,
_
) do
Logger.error(fn ->
Logger.debug(fn ->
"Connection pool had to refuse opening a connection to #{key} due to connection limit exhaustion"
end)
end
@ -81,7 +81,7 @@ def handle_event(
%{key: key, protocol: :http},
_
) do
Logger.info(fn ->
Logger.debug(fn ->
"Pool worker for #{key}: #{length(clients)} clients are using an HTTP1 connection at the same time, head-of-line blocking might occur."
end)
end

View File

@ -8,6 +8,7 @@ defmodule Pleroma.User do
import Ecto.Changeset
import Ecto.Query
import Ecto, only: [assoc: 2]
import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1]
alias Ecto.Multi
alias Pleroma.Activity
@ -596,9 +597,23 @@ def update_changeset(struct, params \\ %{}) do
defp put_fields(changeset) do
if raw_fields = get_change(changeset, :raw_fields) do
old_fields = changeset.data.raw_fields
raw_fields =
raw_fields
|> Enum.filter(fn %{"name" => n} -> n != "" end)
|> Enum.map(fn field ->
previous =
old_fields
|> Enum.find(fn %{"value" => value} -> field["value"] == value end)
if previous && Map.has_key?(previous, "verified_at") do
field
|> Map.put("verified_at", previous["verified_at"])
else
field
end
end)
fields =
raw_fields
@ -1200,6 +1215,10 @@ def update_and_set_cache(struct, params) do
def update_and_set_cache(changeset) do
with {:ok, user} <- Repo.update(changeset, stale_error_field: :id) do
if get_change(changeset, :raw_fields) do
BackgroundWorker.enqueue("verify_fields_links", %{"user_id" => user.id})
end
set_cache(user)
end
end
@ -1975,8 +1994,45 @@ def perform(:delete, %User{} = user) do
maybe_delete_from_db(user)
end
def perform(:verify_fields_links, user) do
profile_urls = [user.ap_id]
fields =
user.raw_fields
|> Enum.map(&verify_field_link(&1, profile_urls))
changeset =
user
|> update_changeset(%{raw_fields: fields})
with {:ok, user} <- Repo.update(changeset, stale_error_field: :id) do
set_cache(user)
end
end
def perform(:set_activation_async, user, status), do: set_activation(user, status)
defp verify_field_link(field, profile_urls) do
verified_at =
with %{"value" => value} <- field,
{:verified_at, nil} <- {:verified_at, Map.get(field, "verified_at")},
%{scheme: scheme, userinfo: nil, host: host}
when not_empty_string(host) and scheme in ["http", "https"] <-
URI.parse(value),
{:not_idn, true} <- {:not_idn, to_string(:idna.encode(host)) == host},
"me" <- Pleroma.Web.RelMe.maybe_put_rel_me(value, profile_urls) do
CommonUtils.to_masto_date(NaiveDateTime.utc_now())
else
{:verified_at, value} when not_empty_string(value) ->
value
_ ->
nil
end
Map.put(field, "verified_at", verified_at)
end
@spec external_users_query() :: Ecto.Query.t()
def external_users_query do
User.Query.build(%{
@ -2664,10 +2720,11 @@ def sanitize_html(%User{} = user) do
# - display name
def sanitize_html(%User{} = user, filter) do
fields =
Enum.map(user.fields, fn %{"name" => name, "value" => value} ->
Enum.map(user.fields, fn %{"name" => name, "value" => value} = fields ->
%{
"name" => name,
"value" => HTML.filter_tags(value, Pleroma.HTML.Scrubber.LinksOnly)
"value" => HTML.filter_tags(value, Pleroma.HTML.Scrubber.LinksOnly),
"verified_at" => Map.get(fields, "verified_at")
}
end)

View File

@ -196,7 +196,14 @@ defp wait_backup(backup, current_processed, task) do
end
end
@files ['actor.json', 'outbox.json', 'likes.json', 'bookmarks.json']
@files [
'actor.json',
'outbox.json',
'likes.json',
'bookmarks.json',
'followers.json',
'following.json'
]
@spec export(Pleroma.User.Backup.t(), pid()) :: {:ok, String.t()} | :error
def export(%__MODULE__{} = backup, caller_pid) do
backup = Repo.preload(backup, :user)
@ -207,6 +214,8 @@ def export(%__MODULE__{} = backup, caller_pid) do
:ok <- statuses(dir, backup.user, caller_pid),
:ok <- likes(dir, backup.user, caller_pid),
:ok <- bookmarks(dir, backup.user, caller_pid),
:ok <- followers(dir, backup.user, caller_pid),
:ok <- following(dir, backup.user, caller_pid),
{:ok, zip_path} <- :zip.create(backup.file_name, @files, cwd: dir),
{:ok, _} <- File.rm_rf(dir) do
{:ok, zip_path}
@ -357,6 +366,16 @@ defp statuses(dir, user, caller_pid) do
caller_pid
)
end
defp followers(dir, user, caller_pid) do
User.get_followers_query(user)
|> write(dir, "followers", fn a -> {:ok, a.ap_id} end, caller_pid)
end
defp following(dir, user, caller_pid) do
User.get_friends_query(user)
|> write(dir, "following", fn a -> {:ok, a.ap_id} end, caller_pid)
end
end
defmodule Pleroma.User.Backup.ProcessorAPI do

View File

@ -147,9 +147,7 @@ def insert(map, local \\ true, fake \\ false, bypass_actor_check \\ false) when
# Splice in the child object if we have one.
activity = Maps.put_if_present(activity, :object, object)
ConcurrentLimiter.limit(Pleroma.Web.RichMedia.Helpers, fn ->
Task.start(fn -> Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity) end)
end)
Pleroma.Web.RichMedia.Card.get_by_activity(activity)
# Add local posts to search index
if local, do: Pleroma.Search.add_to_index(activity)
@ -177,7 +175,7 @@ def insert(map, local \\ true, fake \\ false, bypass_actor_check \\ false) when
id: "pleroma:fakeid"
}
Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
Pleroma.Web.RichMedia.Card.get_by_activity(activity)
{:ok, activity}
{:remote_limit_pass, _} ->
@ -202,7 +200,8 @@ defp insert_activity_with_expiration(data, local, recipients) do
end
def notify_and_stream(activity) do
Notification.create_notifications(activity)
{:ok, notifications} = Notification.create_notifications(activity)
Notification.send(notifications)
original_activity =
case activity do
@ -1261,6 +1260,15 @@ defp restrict_quote_url(query, %{quote_url: quote_url}) do
defp restrict_quote_url(query, _), do: query
defp restrict_rule(query, %{rule_id: rule_id}) do
from(
activity in query,
where: fragment("(?)->'rules' \\? (?)", activity.data, ^rule_id)
)
end
defp restrict_rule(query, _), do: query
defp exclude_poll_votes(query, %{include_poll_votes: true}), do: query
defp exclude_poll_votes(query, _) do
@ -1423,6 +1431,7 @@ def fetch_activities_query(recipients, opts \\ %{}) do
|> restrict_instance(opts)
|> restrict_announce_object_actor(opts)
|> restrict_filtered(opts)
|> restrict_rule(opts)
|> restrict_quote_url(opts)
|> maybe_restrict_deactivated_users(opts)
|> exclude_poll_votes(opts)

View File

@ -0,0 +1,59 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.ForceMention do
require Pleroma.Constants
alias Pleroma.Config
alias Pleroma.Object
alias Pleroma.User
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
defp get_author(url) do
with %Object{data: %{"actor" => actor}} <- Object.normalize(url, fetch: false),
%User{ap_id: ap_id, nickname: nickname} <- User.get_cached_by_ap_id(actor) do
%{"type" => "Mention", "href" => ap_id, "name" => "@#{nickname}"}
else
_ -> nil
end
end
defp prepend_author(tags, _, false), do: tags
defp prepend_author(tags, nil, _), do: tags
defp prepend_author(tags, url, _) do
actor = get_author(url)
if not is_nil(actor) do
[actor | tags]
else
tags
end
end
@impl true
def filter(%{"type" => "Create", "object" => %{"tag" => tag} = object} = activity) do
tag =
tag
|> prepend_author(
object["inReplyTo"],
Config.get([:mrf_force_mention, :mention_parent, true])
)
|> prepend_author(
object["quoteUrl"],
Config.get([:mrf_force_mention, :mention_quoted, true])
)
|> Enum.uniq()
{:ok, put_in(activity["object"]["tag"], tag)}
end
@impl true
def filter(object), do: {:ok, object}
@impl true
def describe, do: {:ok, %{}}
end

View File

@ -36,6 +36,7 @@ defp steal_emoji({shortcode, url}, emoji_dir_path) do
extension = if extension == "", do: ".png", else: extension
shortcode = Path.basename(shortcode)
file_path = Path.join(emoji_dir_path, shortcode <> extension)
case File.write(file_path, response.body) do
@ -78,6 +79,7 @@ def filter(%{"object" => %{"emoji" => foreign_emojis, "actor" => actor}} = messa
new_emojis =
foreign_emojis
|> Enum.reject(fn {shortcode, _url} -> shortcode in installed_emoji end)
|> Enum.reject(fn {shortcode, _url} -> String.contains?(shortcode, ["/", "\\"]) end)
|> Enum.filter(fn {shortcode, _url} ->
reject_emoji? =
[:mrf_steal_emoji, :rejected_shortcodes]

View File

@ -12,13 +12,13 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator do
@primary_key false
embedded_schema do
field(:id, :string)
field(:type, :string)
field(:type, :string, default: "Link")
field(:mediaType, ObjectValidators.MIME, default: "application/octet-stream")
field(:name, :string)
field(:blurhash, :string)
embeds_many :url, UrlObjectValidator, primary_key: false do
field(:type, :string)
field(:type, :string, default: "Link")
field(:href, ObjectValidators.Uri)
field(:mediaType, ObjectValidators.MIME, default: "application/octet-stream")
field(:width, :integer)

View File

@ -4,6 +4,7 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes do
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.Maps
alias Pleroma.Object
alias Pleroma.Object.Containment
alias Pleroma.User
@ -24,6 +25,8 @@ def cast_and_filter_recipients(message, field, follower_collection, field_fallba
end
def fix_object_defaults(data) do
data = Maps.filter_empty_values(data)
context =
Utils.maybe_create_context(
data["context"] || data["conversation"] || data["inReplyTo"] || data["id"]

View File

@ -14,10 +14,10 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.QuestionOptionsValidator do
embeds_one :replies, Replies, primary_key: false do
field(:totalItems, :integer)
field(:type, :string)
field(:type, :string, default: "Collection")
end
field(:type, :string)
field(:type, :string, default: "Note")
end
def changeset(struct, data) do

View File

@ -29,6 +29,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.QuestionValidator do
field(:closed, ObjectValidators.DateTime)
field(:voters, {:array, ObjectValidators.ObjectID}, default: [])
field(:nonAnonymous, :boolean)
embeds_many(:anyOf, QuestionOptionsValidator)
embeds_many(:oneOf, QuestionOptionsValidator)
end

View File

@ -129,6 +129,10 @@ def publish_one(%{inbox: inbox, json: json, actor: %User{} = actor, id: id} = pa
_ -> {:error, e}
end
{:error, :pool_full} ->
Logger.debug("Publisher snoozing worker job due to full connection pool")
{:snooze, 30}
e ->
unless params[:unreachable_since], do: Instances.set_unreachable(inbox)
Logger.metadata(activity: id, inbox: inbox)
@ -154,19 +158,18 @@ defp signature_host(%URI{port: port, scheme: scheme, host: host}) do
end
end
defp should_federate?(inbox, public) do
if public do
true
else
%{host: host} = URI.parse(inbox)
def should_federate?(nil, _), do: false
def should_federate?(_, true), do: true
quarantined_instances =
Config.get([:instance, :quarantined_instances], [])
|> Pleroma.Web.ActivityPub.MRF.instance_list_from_tuples()
|> Pleroma.Web.ActivityPub.MRF.subdomains_regex()
def should_federate?(inbox, _) do
%{host: host} = URI.parse(inbox)
!Pleroma.Web.ActivityPub.MRF.subdomain_match?(quarantined_instances, host)
end
quarantined_instances =
Config.get([:instance, :quarantined_instances], [])
|> Pleroma.Web.ActivityPub.MRF.instance_list_from_tuples()
|> Pleroma.Web.ActivityPub.MRF.subdomains_regex()
!Pleroma.Web.ActivityPub.MRF.subdomain_match?(quarantined_instances, host)
end
@spec recipients(User.t(), Activity.t()) :: [[User.t()]]

View File

@ -21,7 +21,6 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
alias Pleroma.Web.ActivityPub.Builder
alias Pleroma.Web.ActivityPub.Pipeline
alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.Push
alias Pleroma.Web.Streamer
alias Pleroma.Workers.PollWorker
@ -125,7 +124,7 @@ def handle(
nil
end
{:ok, notifications} = Notification.create_notifications(object, do_send: false)
{:ok, notifications} = Notification.create_notifications(object)
meta =
meta
@ -184,7 +183,11 @@ def handle(%{data: %{"type" => "Like"}} = object, meta) do
liked_object = Object.get_by_ap_id(object.data["object"])
Utils.add_like_to_object(object, liked_object)
Notification.create_notifications(object)
{:ok, notifications} = Notification.create_notifications(object)
meta =
meta
|> add_notifications(notifications)
{:ok, object, meta}
end
@ -202,7 +205,7 @@ def handle(%{data: %{"type" => "Like"}} = object, meta) do
def handle(%{data: %{"type" => "Create"}} = activity, meta) do
with {:ok, object, meta} <- handle_object_creation(meta[:object_data], activity, meta),
%User{} = user <- User.get_cached_by_ap_id(activity.data["actor"]) do
{:ok, notifications} = Notification.create_notifications(activity, do_send: false)
{:ok, notifications} = Notification.create_notifications(activity)
{:ok, _user} = ActivityPub.increase_note_count_if_public(user, object)
{:ok, _user} = ActivityPub.update_last_status_at_if_public(user, object)
@ -227,9 +230,7 @@ def handle(%{data: %{"type" => "Create"}} = activity, meta) do
end
end
ConcurrentLimiter.limit(Pleroma.Web.RichMedia.Helpers, fn ->
Task.start(fn -> Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity) end)
end)
Pleroma.Web.RichMedia.Card.get_by_activity(activity)
Pleroma.Search.add_to_index(Map.put(activity, :object, object))
@ -258,11 +259,13 @@ def handle(%{data: %{"type" => "Announce"}} = object, meta) do
Utils.add_announce_to_object(object, announced_object)
if !User.internal?(user) do
Notification.create_notifications(object)
{:ok, notifications} = Notification.create_notifications(object)
ap_streamer().stream_out(object)
end
if !User.internal?(user), do: ap_streamer().stream_out(object)
meta =
meta
|> add_notifications(notifications)
{:ok, object, meta}
end
@ -283,7 +286,11 @@ def handle(%{data: %{"type" => "EmojiReact"}} = object, meta) do
reacted_object = Object.get_by_ap_id(object.data["object"])
Utils.add_emoji_reaction_to_object(object, reacted_object)
Notification.create_notifications(object)
{:ok, notifications} = Notification.create_notifications(object)
meta =
meta
|> add_notifications(notifications)
{:ok, object, meta}
end
@ -587,10 +594,7 @@ defp delete_object(object) do
defp send_notifications(meta) do
Keyword.get(meta, :notifications, [])
|> Enum.each(fn notification ->
Streamer.stream(["user", "user:notification"], notification)
Push.send(notification)
end)
|> Notification.send()
meta
end

View File

@ -336,10 +336,6 @@ def fix_tag(%{"tag" => %{} = tag} = object) do
def fix_tag(object), do: object
def fix_content_map(%{"contentMap" => nil} = object) do
Map.drop(object, ["contentMap"])
end
# content map usually only has one language so this will do for now.
def fix_content_map(%{"contentMap" => content_map} = object) do
content_groups = Map.to_list(content_map)

View File

@ -721,14 +721,18 @@ def make_listen_data(params, additional) do
#### Flag-related helpers
@spec make_flag_data(map(), map()) :: map()
def make_flag_data(%{actor: actor, context: context, content: content} = params, additional) do
def make_flag_data(
%{actor: actor, context: context, content: content} = params,
additional
) do
%{
"type" => "Flag",
"actor" => actor.ap_id,
"content" => content,
"object" => build_flag_object(params),
"context" => context,
"state" => "open"
"state" => "open",
"rules" => Map.get(params, :rules, nil)
}
|> Map.merge(additional)
end

Some files were not shown because too many files have changed in this diff Show More