Merge branch 'develop' into remove-twitter-api
This commit is contained in:
commit
4a306720e8
|
@ -1,4 +1,4 @@
|
||||||
image: elixir:1.8.1
|
image: elixir:1.9.4
|
||||||
|
|
||||||
variables: &global_variables
|
variables: &global_variables
|
||||||
POSTGRES_DB: pleroma_test
|
POSTGRES_DB: pleroma_test
|
||||||
|
@ -170,8 +170,7 @@ stop_review_app:
|
||||||
|
|
||||||
amd64:
|
amd64:
|
||||||
stage: release
|
stage: release
|
||||||
# TODO: Replace with upstream image when 1.9.0 comes out
|
image: elixir:1.10.3
|
||||||
image: rinpatch/elixir:1.9.0-rc.0
|
|
||||||
only: &release-only
|
only: &release-only
|
||||||
- stable@pleroma/pleroma
|
- stable@pleroma/pleroma
|
||||||
- develop@pleroma/pleroma
|
- develop@pleroma/pleroma
|
||||||
|
@ -208,8 +207,7 @@ amd64-musl:
|
||||||
stage: release
|
stage: release
|
||||||
artifacts: *release-artifacts
|
artifacts: *release-artifacts
|
||||||
only: *release-only
|
only: *release-only
|
||||||
# TODO: Replace with upstream image when 1.9.0 comes out
|
image: elixir:1.10.3-alpine
|
||||||
image: rinpatch/elixir:1.9.0-rc.0-alpine
|
|
||||||
cache: *release-cache
|
cache: *release-cache
|
||||||
variables: *release-variables
|
variables: *release-variables
|
||||||
before_script: &before-release-musl
|
before_script: &before-release-musl
|
||||||
|
@ -225,8 +223,7 @@ arm:
|
||||||
only: *release-only
|
only: *release-only
|
||||||
tags:
|
tags:
|
||||||
- arm32
|
- arm32
|
||||||
# TODO: Replace with upstream image when 1.9.0 comes out
|
image: elixir:1.10.3
|
||||||
image: rinpatch/elixir:1.9.0-rc.0-arm
|
|
||||||
cache: *release-cache
|
cache: *release-cache
|
||||||
variables: *release-variables
|
variables: *release-variables
|
||||||
before_script: *before-release
|
before_script: *before-release
|
||||||
|
@ -238,8 +235,7 @@ arm-musl:
|
||||||
only: *release-only
|
only: *release-only
|
||||||
tags:
|
tags:
|
||||||
- arm32
|
- arm32
|
||||||
# TODO: Replace with upstream image when 1.9.0 comes out
|
image: elixir:1.10.3-alpine
|
||||||
image: rinpatch/elixir:1.9.0-rc.0-arm-alpine
|
|
||||||
cache: *release-cache
|
cache: *release-cache
|
||||||
variables: *release-variables
|
variables: *release-variables
|
||||||
before_script: *before-release-musl
|
before_script: *before-release-musl
|
||||||
|
@ -251,8 +247,7 @@ arm64:
|
||||||
only: *release-only
|
only: *release-only
|
||||||
tags:
|
tags:
|
||||||
- arm
|
- arm
|
||||||
# TODO: Replace with upstream image when 1.9.0 comes out
|
image: elixir:1.10.3
|
||||||
image: rinpatch/elixir:1.9.0-rc.0-arm64
|
|
||||||
cache: *release-cache
|
cache: *release-cache
|
||||||
variables: *release-variables
|
variables: *release-variables
|
||||||
before_script: *before-release
|
before_script: *before-release
|
||||||
|
@ -265,7 +260,7 @@ arm64-musl:
|
||||||
tags:
|
tags:
|
||||||
- arm
|
- arm
|
||||||
# TODO: Replace with upstream image when 1.9.0 comes out
|
# TODO: Replace with upstream image when 1.9.0 comes out
|
||||||
image: rinpatch/elixir:1.9.0-rc.0-arm64-alpine
|
image: elixir:1.10.3-alpine
|
||||||
cache: *release-cache
|
cache: *release-cache
|
||||||
variables: *release-variables
|
variables: *release-variables
|
||||||
before_script: *before-release-musl
|
before_script: *before-release-musl
|
||||||
|
|
33
CHANGELOG.md
33
CHANGELOG.md
|
@ -6,32 +6,60 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
## [unreleased]
|
## [unreleased]
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
- **Breaking:** Elixir >=1.9 is now required (was >= 1.8)
|
||||||
|
- In Conversations, return only direct messages as `last_status`
|
||||||
|
- Using the `only_media` filter on timelines will now exclude reblog media
|
||||||
|
- MFR policy to set global expiration for all local Create activities
|
||||||
|
- OGP rich media parser merged with TwitterCard
|
||||||
|
- Configuration: `:instance, rewrite_policy` moved to `:mrf, policies`, `:instance, :mrf_transparency` moved to `:mrf, :transparency`, `:instance, :mrf_transparency_exclusions` moved to `:mrf, :transparency_exclusions`. Old config namespace is deprecated.
|
||||||
|
|
||||||
<details>
|
<details>
|
||||||
<summary>API Changes</summary>
|
<summary>API Changes</summary>
|
||||||
|
|
||||||
- **Breaking:** Emoji API: changed methods and renamed routes.
|
- **Breaking:** Emoji API: changed methods and renamed routes.
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Admin API Changes</summary>
|
||||||
|
|
||||||
|
- Status visibility stats: now can return stats per instance.
|
||||||
|
|
||||||
|
- Mix task to refresh counter cache (`mix pleroma.refresh_counter_cache`)
|
||||||
|
</details>
|
||||||
|
|
||||||
### Removed
|
### Removed
|
||||||
- **Breaking:** removed `with_move` parameter from notifications timeline.
|
- **Breaking:** removed `with_move` parameter from notifications timeline.
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
|
- Chats: Added support for federated chats. For details, see the docs.
|
||||||
|
- ActivityPub: Added support for existing AP ids for instances migrated from Mastodon.
|
||||||
|
- Instance: Add `background_image` to configuration and `/api/v1/instance`
|
||||||
- Instance: Extend `/api/v1/instance` with Pleroma-specific information.
|
- Instance: Extend `/api/v1/instance` with Pleroma-specific information.
|
||||||
- NodeInfo: `pleroma:api/v1/notifications:include_types_filter` to the `features` list.
|
- NodeInfo: `pleroma:api/v1/notifications:include_types_filter` to the `features` list.
|
||||||
- NodeInfo: `pleroma_emoji_reactions` to the `features` list.
|
- NodeInfo: `pleroma_emoji_reactions` to the `features` list.
|
||||||
- Configuration: `:restrict_unauthenticated` setting, restrict access for unauthenticated users to timelines (public and federate), user profiles and statuses.
|
- Configuration: `:restrict_unauthenticated` setting, restrict access for unauthenticated users to timelines (public and federate), user profiles and statuses.
|
||||||
- Configuration: Add `:database_config_whitelist` setting to whitelist settings which can be configured from AdminFE.
|
- Configuration: Add `:database_config_whitelist` setting to whitelist settings which can be configured from AdminFE.
|
||||||
|
- Configuration: `filename_display_max_length` option to set filename truncate limit, if filename display enabled (0 = no limit).
|
||||||
- New HTTP adapter [gun](https://github.com/ninenines/gun). Gun adapter requires minimum OTP version of 22.2 otherwise Pleroma won’t start. For hackney OTP update is not required.
|
- New HTTP adapter [gun](https://github.com/ninenines/gun). Gun adapter requires minimum OTP version of 22.2 otherwise Pleroma won’t start. For hackney OTP update is not required.
|
||||||
- Mix task to create trusted OAuth App.
|
- Mix task to create trusted OAuth App.
|
||||||
|
- Mix task to reset MFA for user accounts
|
||||||
- Notifications: Added `follow_request` notification type.
|
- Notifications: Added `follow_request` notification type.
|
||||||
- Added `:reject_deletes` group to SimplePolicy
|
- Added `:reject_deletes` group to SimplePolicy
|
||||||
|
- MRF (`EmojiStealPolicy`): New MRF Policy which allows to automatically download emojis from remote instances
|
||||||
|
- Support pagination in emoji packs API (for packs and for files in pack)
|
||||||
|
|
||||||
<details>
|
<details>
|
||||||
<summary>API Changes</summary>
|
<summary>API Changes</summary>
|
||||||
|
- Mastodon API: Add pleroma.parents_visible field to statuses.
|
||||||
- Mastodon API: Extended `/api/v1/instance`.
|
- Mastodon API: Extended `/api/v1/instance`.
|
||||||
- Mastodon API: Support for `include_types` in `/api/v1/notifications`.
|
- Mastodon API: Support for `include_types` in `/api/v1/notifications`.
|
||||||
- Mastodon API: Added `/api/v1/notifications/:id/dismiss` endpoint.
|
- Mastodon API: Added `/api/v1/notifications/:id/dismiss` endpoint.
|
||||||
- Mastodon API: Add support for filtering replies in public and home timelines
|
- Mastodon API: Add support for filtering replies in public and home timelines
|
||||||
|
- Mastodon API: Support for `bot` field in `/api/v1/accounts/update_credentials`
|
||||||
- Admin API: endpoints for create/update/delete OAuth Apps.
|
- Admin API: endpoints for create/update/delete OAuth Apps.
|
||||||
- Admin API: endpoint for status view.
|
- Admin API: endpoint for status view.
|
||||||
|
- OTP: Add command to reload emoji packs
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
|
@ -40,6 +68,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- Fix follower/blocks import when nicknames starts with @
|
- Fix follower/blocks import when nicknames starts with @
|
||||||
- Filtering of push notifications on activities from blocked domains
|
- Filtering of push notifications on activities from blocked domains
|
||||||
- Resolving Peertube accounts with Webfinger
|
- Resolving Peertube accounts with Webfinger
|
||||||
|
- `blob:` urls not being allowed by connect-src CSP
|
||||||
|
- Mastodon API: fix `GET /api/v1/notifications` not returning the full result set
|
||||||
|
|
||||||
## [Unreleased (patch)]
|
## [Unreleased (patch)]
|
||||||
|
|
||||||
|
@ -78,6 +108,9 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
2. Run database migrations (inside Pleroma directory):
|
2. Run database migrations (inside Pleroma directory):
|
||||||
- OTP: `./bin/pleroma_ctl migrate`
|
- OTP: `./bin/pleroma_ctl migrate`
|
||||||
- From Source: `mix ecto.migrate`
|
- From Source: `mix ecto.migrate`
|
||||||
|
3. Reset status visibility counters (inside Pleroma directory):
|
||||||
|
- OTP: `./bin/pleroma_ctl refresh_counter_cache`
|
||||||
|
- From Source: `mix pleroma.refresh_counter_cache`
|
||||||
|
|
||||||
|
|
||||||
## [2.0.2] - 2020-04-08
|
## [2.0.2] - 2020-04-08
|
||||||
|
|
10
README.md
10
README.md
|
@ -34,6 +34,16 @@ Currently Pleroma is not packaged by any OS/Distros, but if you want to package
|
||||||
### Docker
|
### Docker
|
||||||
While we don’t provide docker files, other people have written very good ones. Take a look at <https://github.com/angristan/docker-pleroma> or <https://glitch.sh/sn0w/pleroma-docker>.
|
While we don’t provide docker files, other people have written very good ones. Take a look at <https://github.com/angristan/docker-pleroma> or <https://glitch.sh/sn0w/pleroma-docker>.
|
||||||
|
|
||||||
|
### Compilation Troubleshooting
|
||||||
|
If you ever encounter compilation issues during the updating of Pleroma, you can try these commands and see if they fix things:
|
||||||
|
|
||||||
|
- `mix deps.clean --all`
|
||||||
|
- `mix local.rebar`
|
||||||
|
- `mix local.hex`
|
||||||
|
- `rm -r _build`
|
||||||
|
|
||||||
|
If you are not developing Pleroma, it is better to use the OTP release, which comes with everything precompiled.
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
- Latest Released revision: <https://docs.pleroma.social>
|
- Latest Released revision: <https://docs.pleroma.social>
|
||||||
- Latest Git revision: <https://docs-develop.pleroma.social>
|
- Latest Git revision: <https://docs-develop.pleroma.social>
|
||||||
|
|
|
@ -22,8 +22,21 @@ defmodule Pleroma.LoadTesting.Activities do
|
||||||
@max_concurrency 10
|
@max_concurrency 10
|
||||||
|
|
||||||
@visibility ~w(public private direct unlisted)
|
@visibility ~w(public private direct unlisted)
|
||||||
@types ~w(simple emoji mentions hell_thread attachment tag like reblog simple_thread remote)
|
@types [
|
||||||
@groups ~w(user friends non_friends)
|
:simple,
|
||||||
|
:emoji,
|
||||||
|
:mentions,
|
||||||
|
:hell_thread,
|
||||||
|
:attachment,
|
||||||
|
:tag,
|
||||||
|
:like,
|
||||||
|
:reblog,
|
||||||
|
:simple_thread
|
||||||
|
]
|
||||||
|
@groups [:friends_local, :friends_remote, :non_friends_local, :non_friends_local]
|
||||||
|
@remote_groups [:friends_remote, :non_friends_remote]
|
||||||
|
@friends_groups [:friends_local, :friends_remote]
|
||||||
|
@non_friends_groups [:non_friends_local, :non_friends_remote]
|
||||||
|
|
||||||
@spec generate(User.t(), keyword()) :: :ok
|
@spec generate(User.t(), keyword()) :: :ok
|
||||||
def generate(user, opts \\ []) do
|
def generate(user, opts \\ []) do
|
||||||
|
@ -34,33 +47,24 @@ def generate(user, opts \\ []) do
|
||||||
|
|
||||||
opts = Keyword.merge(@defaults, opts)
|
opts = Keyword.merge(@defaults, opts)
|
||||||
|
|
||||||
friends =
|
users = Users.prepare_users(user, opts)
|
||||||
user
|
|
||||||
|> Users.get_users(limit: opts[:friends_used], local: :local, friends?: true)
|
|
||||||
|> Enum.shuffle()
|
|
||||||
|
|
||||||
non_friends =
|
{:ok, _} = Agent.start_link(fn -> users[:non_friends_remote] end, name: :non_friends_remote)
|
||||||
user
|
|
||||||
|> Users.get_users(limit: opts[:non_friends_used], local: :local, friends?: false)
|
|
||||||
|> Enum.shuffle()
|
|
||||||
|
|
||||||
task_data =
|
task_data =
|
||||||
for visibility <- @visibility,
|
for visibility <- @visibility,
|
||||||
type <- @types,
|
type <- @types,
|
||||||
group <- @groups,
|
group <- [:user | @groups],
|
||||||
do: {visibility, type, group}
|
do: {visibility, type, group}
|
||||||
|
|
||||||
IO.puts("Starting generating #{opts[:iterations]} iterations of activities...")
|
IO.puts("Starting generating #{opts[:iterations]} iterations of activities...")
|
||||||
|
|
||||||
friends_thread = Enum.take(friends, 5)
|
|
||||||
non_friends_thread = Enum.take(friends, 5)
|
|
||||||
|
|
||||||
public_long_thread = fn ->
|
public_long_thread = fn ->
|
||||||
generate_long_thread("public", user, friends_thread, non_friends_thread, opts)
|
generate_long_thread("public", users, opts)
|
||||||
end
|
end
|
||||||
|
|
||||||
private_long_thread = fn ->
|
private_long_thread = fn ->
|
||||||
generate_long_thread("private", user, friends_thread, non_friends_thread, opts)
|
generate_long_thread("private", users, opts)
|
||||||
end
|
end
|
||||||
|
|
||||||
iterations = opts[:iterations]
|
iterations = opts[:iterations]
|
||||||
|
@ -73,10 +77,10 @@ def generate(user, opts \\ []) do
|
||||||
i when i == iterations - 2 ->
|
i when i == iterations - 2 ->
|
||||||
spawn(public_long_thread)
|
spawn(public_long_thread)
|
||||||
spawn(private_long_thread)
|
spawn(private_long_thread)
|
||||||
generate_activities(user, friends, non_friends, Enum.shuffle(task_data), opts)
|
generate_activities(users, Enum.shuffle(task_data), opts)
|
||||||
|
|
||||||
_ ->
|
_ ->
|
||||||
generate_activities(user, friends, non_friends, Enum.shuffle(task_data), opts)
|
generate_activities(users, Enum.shuffle(task_data), opts)
|
||||||
end
|
end
|
||||||
)
|
)
|
||||||
end)
|
end)
|
||||||
|
@ -123,22 +127,22 @@ def generate_tagged_activities(opts \\ []) do
|
||||||
Enum.each(1..activity_count, fn _ ->
|
Enum.each(1..activity_count, fn _ ->
|
||||||
random = :rand.uniform()
|
random = :rand.uniform()
|
||||||
i = Enum.find_index(intervals, fn {lower, upper} -> lower <= random && upper > random end)
|
i = Enum.find_index(intervals, fn {lower, upper} -> lower <= random && upper > random end)
|
||||||
CommonAPI.post(Enum.random(users), %{"status" => "a post with the tag #tag_#{i}"})
|
CommonAPI.post(Enum.random(users), %{status: "a post with the tag #tag_#{i}"})
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp generate_long_thread(visibility, user, friends, non_friends, _opts) do
|
defp generate_long_thread(visibility, users, _opts) do
|
||||||
group =
|
group =
|
||||||
if visibility == "public",
|
if visibility == "public",
|
||||||
do: "friends",
|
do: :friends_local,
|
||||||
else: "user"
|
else: :user
|
||||||
|
|
||||||
tasks = get_reply_tasks(visibility, group) |> Stream.cycle() |> Enum.take(50)
|
tasks = get_reply_tasks(visibility, group) |> Stream.cycle() |> Enum.take(50)
|
||||||
|
|
||||||
{:ok, activity} =
|
{:ok, activity} =
|
||||||
CommonAPI.post(user, %{
|
CommonAPI.post(users[:user], %{
|
||||||
"status" => "Start of #{visibility} long thread",
|
status: "Start of #{visibility} long thread",
|
||||||
"visibility" => visibility
|
visibility: visibility
|
||||||
})
|
})
|
||||||
|
|
||||||
Agent.update(:benchmark_state, fn state ->
|
Agent.update(:benchmark_state, fn state ->
|
||||||
|
@ -150,31 +154,28 @@ defp generate_long_thread(visibility, user, friends, non_friends, _opts) do
|
||||||
Map.put(state, key, activity)
|
Map.put(state, key, activity)
|
||||||
end)
|
end)
|
||||||
|
|
||||||
acc = {activity.id, ["@" <> user.nickname, "reply to long thread"]}
|
acc = {activity.id, ["@" <> users[:user].nickname, "reply to long thread"]}
|
||||||
insert_replies_for_long_thread(tasks, visibility, user, friends, non_friends, acc)
|
insert_replies_for_long_thread(tasks, visibility, users, acc)
|
||||||
IO.puts("Generating #{visibility} long thread ended\n")
|
IO.puts("Generating #{visibility} long thread ended\n")
|
||||||
end
|
end
|
||||||
|
|
||||||
defp insert_replies_for_long_thread(tasks, visibility, user, friends, non_friends, acc) do
|
defp insert_replies_for_long_thread(tasks, visibility, users, acc) do
|
||||||
Enum.reduce(tasks, acc, fn
|
Enum.reduce(tasks, acc, fn
|
||||||
"friend", {id, data} ->
|
:user, {id, data} ->
|
||||||
friend = Enum.random(friends)
|
user = users[:user]
|
||||||
insert_reply(friend, List.delete(data, "@" <> friend.nickname), id, visibility)
|
|
||||||
|
|
||||||
"non_friend", {id, data} ->
|
|
||||||
non_friend = Enum.random(non_friends)
|
|
||||||
insert_reply(non_friend, List.delete(data, "@" <> non_friend.nickname), id, visibility)
|
|
||||||
|
|
||||||
"user", {id, data} ->
|
|
||||||
insert_reply(user, List.delete(data, "@" <> user.nickname), id, visibility)
|
insert_reply(user, List.delete(data, "@" <> user.nickname), id, visibility)
|
||||||
|
|
||||||
|
group, {id, data} ->
|
||||||
|
replier = Enum.random(users[group])
|
||||||
|
insert_reply(replier, List.delete(data, "@" <> replier.nickname), id, visibility)
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp generate_activities(user, friends, non_friends, task_data, opts) do
|
defp generate_activities(users, task_data, opts) do
|
||||||
Task.async_stream(
|
Task.async_stream(
|
||||||
task_data,
|
task_data,
|
||||||
fn {visibility, type, group} ->
|
fn {visibility, type, group} ->
|
||||||
insert_activity(type, visibility, group, user, friends, non_friends, opts)
|
insert_activity(type, visibility, group, users, opts)
|
||||||
end,
|
end,
|
||||||
max_concurrency: @max_concurrency,
|
max_concurrency: @max_concurrency,
|
||||||
timeout: 30_000
|
timeout: 30_000
|
||||||
|
@ -182,47 +183,46 @@ defp generate_activities(user, friends, non_friends, task_data, opts) do
|
||||||
|> Stream.run()
|
|> Stream.run()
|
||||||
end
|
end
|
||||||
|
|
||||||
defp insert_activity("simple", visibility, group, user, friends, non_friends, _opts) do
|
defp insert_local_activity(visibility, group, users, status) do
|
||||||
{:ok, _activity} =
|
{:ok, _} =
|
||||||
group
|
group
|
||||||
|> get_actor(user, friends, non_friends)
|
|> get_actor(users)
|
||||||
|> CommonAPI.post(%{"status" => "Simple status", "visibility" => visibility})
|
|> CommonAPI.post(%{status: status, visibility: visibility})
|
||||||
end
|
end
|
||||||
|
|
||||||
defp insert_activity("emoji", visibility, group, user, friends, non_friends, _opts) do
|
defp insert_remote_activity(visibility, group, users, status) do
|
||||||
{:ok, _activity} =
|
actor = get_actor(group, users)
|
||||||
group
|
{act_data, obj_data} = prepare_activity_data(actor, visibility, users[:user])
|
||||||
|> get_actor(user, friends, non_friends)
|
{activity_data, object_data} = other_data(actor, status)
|
||||||
|> CommonAPI.post(%{
|
|
||||||
"status" => "Simple status with emoji :firefox:",
|
activity_data
|
||||||
"visibility" => visibility
|
|> Map.merge(act_data)
|
||||||
})
|
|> Map.put("object", Map.merge(object_data, obj_data))
|
||||||
|
|> Pleroma.Web.ActivityPub.ActivityPub.insert(false)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp insert_activity("mentions", visibility, group, user, friends, non_friends, _opts) do
|
defp user_mentions(users) do
|
||||||
user_mentions =
|
user_mentions =
|
||||||
get_random_mentions(friends, Enum.random(0..3)) ++
|
Enum.reduce(
|
||||||
get_random_mentions(non_friends, Enum.random(0..3))
|
@groups,
|
||||||
|
[],
|
||||||
|
fn group, acc ->
|
||||||
|
acc ++ get_random_mentions(users[group], Enum.random(0..2))
|
||||||
|
end
|
||||||
|
)
|
||||||
|
|
||||||
user_mentions =
|
|
||||||
if Enum.random([true, false]),
|
if Enum.random([true, false]),
|
||||||
do: ["@" <> user.nickname | user_mentions],
|
do: ["@" <> users[:user].nickname | user_mentions],
|
||||||
else: user_mentions
|
else: user_mentions
|
||||||
|
|
||||||
{:ok, _activity} =
|
|
||||||
group
|
|
||||||
|> get_actor(user, friends, non_friends)
|
|
||||||
|> CommonAPI.post(%{
|
|
||||||
"status" => Enum.join(user_mentions, ", ") <> " simple status with mentions",
|
|
||||||
"visibility" => visibility
|
|
||||||
})
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp insert_activity("hell_thread", visibility, group, user, friends, non_friends, _opts) do
|
defp hell_thread_mentions(users) do
|
||||||
mentions =
|
|
||||||
with {:ok, nil} <- Cachex.get(:user_cache, "hell_thread_mentions") do
|
with {:ok, nil} <- Cachex.get(:user_cache, "hell_thread_mentions") do
|
||||||
cached =
|
cached =
|
||||||
([user | Enum.take(friends, 10)] ++ Enum.take(non_friends, 10))
|
@groups
|
||||||
|
|> Enum.reduce([users[:user]], fn group, acc ->
|
||||||
|
acc ++ Enum.take(users[group], 5)
|
||||||
|
end)
|
||||||
|> Enum.map(&"@#{&1.nickname}")
|
|> Enum.map(&"@#{&1.nickname}")
|
||||||
|> Enum.join(", ")
|
|> Enum.join(", ")
|
||||||
|
|
||||||
|
@ -231,18 +231,56 @@ defp insert_activity("hell_thread", visibility, group, user, friends, non_friend
|
||||||
else
|
else
|
||||||
{:ok, cached} -> cached
|
{:ok, cached} -> cached
|
||||||
end
|
end
|
||||||
|
|
||||||
{:ok, _activity} =
|
|
||||||
group
|
|
||||||
|> get_actor(user, friends, non_friends)
|
|
||||||
|> CommonAPI.post(%{
|
|
||||||
"status" => mentions <> " hell thread status",
|
|
||||||
"visibility" => visibility
|
|
||||||
})
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp insert_activity("attachment", visibility, group, user, friends, non_friends, _opts) do
|
defp insert_activity(:simple, visibility, group, users, _opts)
|
||||||
actor = get_actor(group, user, friends, non_friends)
|
when group in @remote_groups do
|
||||||
|
insert_remote_activity(visibility, group, users, "Remote status")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_activity(:simple, visibility, group, users, _opts) do
|
||||||
|
insert_local_activity(visibility, group, users, "Simple status")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_activity(:emoji, visibility, group, users, _opts)
|
||||||
|
when group in @remote_groups do
|
||||||
|
insert_remote_activity(visibility, group, users, "Remote status with emoji :firefox:")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_activity(:emoji, visibility, group, users, _opts) do
|
||||||
|
insert_local_activity(visibility, group, users, "Simple status with emoji :firefox:")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_activity(:mentions, visibility, group, users, _opts)
|
||||||
|
when group in @remote_groups do
|
||||||
|
mentions = user_mentions(users)
|
||||||
|
|
||||||
|
status = Enum.join(mentions, ", ") <> " remote status with mentions"
|
||||||
|
|
||||||
|
insert_remote_activity(visibility, group, users, status)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_activity(:mentions, visibility, group, users, _opts) do
|
||||||
|
mentions = user_mentions(users)
|
||||||
|
|
||||||
|
status = Enum.join(mentions, ", ") <> " simple status with mentions"
|
||||||
|
insert_remote_activity(visibility, group, users, status)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_activity(:hell_thread, visibility, group, users, _)
|
||||||
|
when group in @remote_groups do
|
||||||
|
mentions = hell_thread_mentions(users)
|
||||||
|
insert_remote_activity(visibility, group, users, mentions <> " remote hell thread status")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_activity(:hell_thread, visibility, group, users, _opts) do
|
||||||
|
mentions = hell_thread_mentions(users)
|
||||||
|
|
||||||
|
insert_local_activity(visibility, group, users, mentions <> " hell thread status")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_activity(:attachment, visibility, group, users, _opts) do
|
||||||
|
actor = get_actor(group, users)
|
||||||
|
|
||||||
obj_data = %{
|
obj_data = %{
|
||||||
"actor" => actor.ap_id,
|
"actor" => actor.ap_id,
|
||||||
|
@ -262,118 +300,94 @@ defp insert_activity("attachment", visibility, group, user, friends, non_friends
|
||||||
|
|
||||||
{:ok, _activity} =
|
{:ok, _activity} =
|
||||||
CommonAPI.post(actor, %{
|
CommonAPI.post(actor, %{
|
||||||
"status" => "Post with attachment",
|
status: "Post with attachment",
|
||||||
"visibility" => visibility,
|
visibility: visibility,
|
||||||
"media_ids" => [object.id]
|
media_ids: [object.id]
|
||||||
})
|
})
|
||||||
end
|
end
|
||||||
|
|
||||||
defp insert_activity("tag", visibility, group, user, friends, non_friends, _opts) do
|
defp insert_activity(:tag, visibility, group, users, _opts) do
|
||||||
{:ok, _activity} =
|
insert_local_activity(visibility, group, users, "Status with #tag")
|
||||||
group
|
|
||||||
|> get_actor(user, friends, non_friends)
|
|
||||||
|> CommonAPI.post(%{"status" => "Status with #tag", "visibility" => visibility})
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp insert_activity("like", visibility, group, user, friends, non_friends, opts) do
|
defp insert_activity(:like, visibility, group, users, opts) do
|
||||||
actor = get_actor(group, user, friends, non_friends)
|
actor = get_actor(group, users)
|
||||||
|
|
||||||
with activity_id when not is_nil(activity_id) <- get_random_create_activity_id(),
|
with activity_id when not is_nil(activity_id) <- get_random_create_activity_id(),
|
||||||
{:ok, _activity} <- CommonAPI.favorite(actor, activity_id) do
|
{:ok, _activity} <- CommonAPI.favorite(actor, activity_id) do
|
||||||
:ok
|
:ok
|
||||||
else
|
else
|
||||||
{:error, _} ->
|
{:error, _} ->
|
||||||
insert_activity("like", visibility, group, user, friends, non_friends, opts)
|
insert_activity(:like, visibility, group, users, opts)
|
||||||
|
|
||||||
nil ->
|
nil ->
|
||||||
Process.sleep(15)
|
Process.sleep(15)
|
||||||
insert_activity("like", visibility, group, user, friends, non_friends, opts)
|
insert_activity(:like, visibility, group, users, opts)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp insert_activity("reblog", visibility, group, user, friends, non_friends, opts) do
|
defp insert_activity(:reblog, visibility, group, users, opts) do
|
||||||
actor = get_actor(group, user, friends, non_friends)
|
actor = get_actor(group, users)
|
||||||
|
|
||||||
with activity_id when not is_nil(activity_id) <- get_random_create_activity_id(),
|
with activity_id when not is_nil(activity_id) <- get_random_create_activity_id(),
|
||||||
{:ok, _activity, _object} <- CommonAPI.repeat(activity_id, actor) do
|
{:ok, _activity} <- CommonAPI.repeat(activity_id, actor) do
|
||||||
:ok
|
:ok
|
||||||
else
|
else
|
||||||
{:error, _} ->
|
{:error, _} ->
|
||||||
insert_activity("reblog", visibility, group, user, friends, non_friends, opts)
|
insert_activity(:reblog, visibility, group, users, opts)
|
||||||
|
|
||||||
nil ->
|
nil ->
|
||||||
Process.sleep(15)
|
Process.sleep(15)
|
||||||
insert_activity("reblog", visibility, group, user, friends, non_friends, opts)
|
insert_activity(:reblog, visibility, group, users, opts)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp insert_activity("simple_thread", visibility, group, user, friends, non_friends, _opts)
|
defp insert_activity(:simple_thread, "direct", group, users, _opts) do
|
||||||
when visibility in ["public", "unlisted", "private"] do
|
actor = get_actor(group, users)
|
||||||
actor = get_actor(group, user, friends, non_friends)
|
|
||||||
tasks = get_reply_tasks(visibility, group)
|
|
||||||
|
|
||||||
{:ok, activity} =
|
|
||||||
CommonAPI.post(user, %{"status" => "Simple status", "visibility" => visibility})
|
|
||||||
|
|
||||||
acc = {activity.id, ["@" <> actor.nickname, "reply to status"]}
|
|
||||||
insert_replies(tasks, visibility, user, friends, non_friends, acc)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp insert_activity("simple_thread", "direct", group, user, friends, non_friends, _opts) do
|
|
||||||
actor = get_actor(group, user, friends, non_friends)
|
|
||||||
tasks = get_reply_tasks("direct", group)
|
tasks = get_reply_tasks("direct", group)
|
||||||
|
|
||||||
list =
|
list =
|
||||||
case group do
|
case group do
|
||||||
"non_friends" ->
|
:user ->
|
||||||
Enum.take(non_friends, 3)
|
group = Enum.random(@friends_groups)
|
||||||
|
Enum.take(users[group], 3)
|
||||||
|
|
||||||
_ ->
|
_ ->
|
||||||
Enum.take(friends, 3)
|
Enum.take(users[group], 3)
|
||||||
end
|
end
|
||||||
|
|
||||||
data = Enum.map(list, &("@" <> &1.nickname))
|
data = Enum.map(list, &("@" <> &1.nickname))
|
||||||
|
|
||||||
{:ok, activity} =
|
{:ok, activity} =
|
||||||
CommonAPI.post(actor, %{
|
CommonAPI.post(actor, %{
|
||||||
"status" => Enum.join(data, ", ") <> "simple status",
|
status: Enum.join(data, ", ") <> "simple status",
|
||||||
"visibility" => "direct"
|
visibility: "direct"
|
||||||
})
|
})
|
||||||
|
|
||||||
acc = {activity.id, ["@" <> user.nickname | data] ++ ["reply to status"]}
|
acc = {activity.id, ["@" <> users[:user].nickname | data] ++ ["reply to status"]}
|
||||||
insert_direct_replies(tasks, user, list, acc)
|
insert_direct_replies(tasks, users[:user], list, acc)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp insert_activity("remote", _, "user", _, _, _, _), do: :ok
|
defp insert_activity(:simple_thread, visibility, group, users, _opts) do
|
||||||
|
actor = get_actor(group, users)
|
||||||
|
tasks = get_reply_tasks(visibility, group)
|
||||||
|
|
||||||
defp insert_activity("remote", visibility, group, user, _friends, _non_friends, opts) do
|
{:ok, activity} =
|
||||||
remote_friends =
|
CommonAPI.post(users[:user], %{status: "Simple status", visibility: visibility})
|
||||||
Users.get_users(user, limit: opts[:friends_used], local: :external, friends?: true)
|
|
||||||
|
|
||||||
remote_non_friends =
|
acc = {activity.id, ["@" <> actor.nickname, "reply to status"]}
|
||||||
Users.get_users(user, limit: opts[:non_friends_used], local: :external, friends?: false)
|
insert_replies(tasks, visibility, users, acc)
|
||||||
|
|
||||||
actor = get_actor(group, user, remote_friends, remote_non_friends)
|
|
||||||
|
|
||||||
{act_data, obj_data} = prepare_activity_data(actor, visibility, user)
|
|
||||||
{activity_data, object_data} = other_data(actor)
|
|
||||||
|
|
||||||
activity_data
|
|
||||||
|> Map.merge(act_data)
|
|
||||||
|> Map.put("object", Map.merge(object_data, obj_data))
|
|
||||||
|> Pleroma.Web.ActivityPub.ActivityPub.insert(false)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp get_actor("user", user, _friends, _non_friends), do: user
|
defp get_actor(:user, %{user: user}), do: user
|
||||||
defp get_actor("friends", _user, friends, _non_friends), do: Enum.random(friends)
|
defp get_actor(group, users), do: Enum.random(users[group])
|
||||||
defp get_actor("non_friends", _user, _friends, non_friends), do: Enum.random(non_friends)
|
|
||||||
|
|
||||||
defp other_data(actor) do
|
defp other_data(actor, content) do
|
||||||
%{host: host} = URI.parse(actor.ap_id)
|
%{host: host} = URI.parse(actor.ap_id)
|
||||||
datetime = DateTime.utc_now()
|
datetime = DateTime.utc_now()
|
||||||
context_id = "http://#{host}:4000/contexts/#{UUID.generate()}"
|
context_id = "https://#{host}/contexts/#{UUID.generate()}"
|
||||||
activity_id = "http://#{host}:4000/activities/#{UUID.generate()}"
|
activity_id = "https://#{host}/activities/#{UUID.generate()}"
|
||||||
object_id = "http://#{host}:4000/objects/#{UUID.generate()}"
|
object_id = "https://#{host}/objects/#{UUID.generate()}"
|
||||||
|
|
||||||
activity_data = %{
|
activity_data = %{
|
||||||
"actor" => actor.ap_id,
|
"actor" => actor.ap_id,
|
||||||
|
@ -390,7 +404,7 @@ defp other_data(actor) do
|
||||||
"attributedTo" => actor.ap_id,
|
"attributedTo" => actor.ap_id,
|
||||||
"bcc" => [],
|
"bcc" => [],
|
||||||
"bto" => [],
|
"bto" => [],
|
||||||
"content" => "Remote post",
|
"content" => content,
|
||||||
"context" => context_id,
|
"context" => context_id,
|
||||||
"conversation" => context_id,
|
"conversation" => context_id,
|
||||||
"emoji" => %{},
|
"emoji" => %{},
|
||||||
|
@ -476,60 +490,74 @@ defp prepare_activity_data(_actor, "direct", mention) do
|
||||||
{act_data, obj_data}
|
{act_data, obj_data}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp get_reply_tasks("public", "user"), do: ~w(friend non_friend user)
|
defp get_reply_tasks("public", :user) do
|
||||||
defp get_reply_tasks("public", "friends"), do: ~w(non_friend user friend)
|
[:friends_local, :friends_remote, :non_friends_local, :non_friends_remote, :user]
|
||||||
defp get_reply_tasks("public", "non_friends"), do: ~w(user friend non_friend)
|
end
|
||||||
|
|
||||||
defp get_reply_tasks(visibility, "user") when visibility in ["unlisted", "private"],
|
defp get_reply_tasks("public", group) when group in @friends_groups do
|
||||||
do: ~w(friend user friend)
|
[:non_friends_local, :non_friends_remote, :user, :friends_local, :friends_remote]
|
||||||
|
end
|
||||||
|
|
||||||
defp get_reply_tasks(visibility, "friends") when visibility in ["unlisted", "private"],
|
defp get_reply_tasks("public", group) when group in @non_friends_groups do
|
||||||
do: ~w(user friend user)
|
[:user, :friends_local, :friends_remote, :non_friends_local, :non_friends_remote]
|
||||||
|
end
|
||||||
|
|
||||||
defp get_reply_tasks(visibility, "non_friends") when visibility in ["unlisted", "private"],
|
defp get_reply_tasks(visibility, :user) when visibility in ["unlisted", "private"] do
|
||||||
|
[:friends_local, :friends_remote, :user, :friends_local, :friends_remote]
|
||||||
|
end
|
||||||
|
|
||||||
|
defp get_reply_tasks(visibility, group)
|
||||||
|
when visibility in ["unlisted", "private"] and group in @friends_groups do
|
||||||
|
[:user, :friends_remote, :friends_local, :user]
|
||||||
|
end
|
||||||
|
|
||||||
|
defp get_reply_tasks(visibility, group)
|
||||||
|
when visibility in ["unlisted", "private"] and
|
||||||
|
group in @non_friends_groups,
|
||||||
do: []
|
do: []
|
||||||
|
|
||||||
defp get_reply_tasks("direct", "user"), do: ~w(friend user friend)
|
defp get_reply_tasks("direct", :user), do: [:friends_local, :user, :friends_remote]
|
||||||
defp get_reply_tasks("direct", "friends"), do: ~w(user friend user)
|
|
||||||
defp get_reply_tasks("direct", "non_friends"), do: ~w(user non_friend user)
|
|
||||||
|
|
||||||
defp insert_replies(tasks, visibility, user, friends, non_friends, acc) do
|
defp get_reply_tasks("direct", group) when group in @friends_groups,
|
||||||
|
do: [:user, group, :user]
|
||||||
|
|
||||||
|
defp get_reply_tasks("direct", group) when group in @non_friends_groups do
|
||||||
|
[:user, :non_friends_remote, :user, :non_friends_local]
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_replies(tasks, visibility, users, acc) do
|
||||||
Enum.reduce(tasks, acc, fn
|
Enum.reduce(tasks, acc, fn
|
||||||
"friend", {id, data} ->
|
:user, {id, data} ->
|
||||||
friend = Enum.random(friends)
|
insert_reply(users[:user], data, id, visibility)
|
||||||
insert_reply(friend, data, id, visibility)
|
|
||||||
|
|
||||||
"non_friend", {id, data} ->
|
group, {id, data} ->
|
||||||
non_friend = Enum.random(non_friends)
|
replier = Enum.random(users[group])
|
||||||
insert_reply(non_friend, data, id, visibility)
|
insert_reply(replier, data, id, visibility)
|
||||||
|
|
||||||
"user", {id, data} ->
|
|
||||||
insert_reply(user, data, id, visibility)
|
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp insert_direct_replies(tasks, user, list, acc) do
|
defp insert_direct_replies(tasks, user, list, acc) do
|
||||||
Enum.reduce(tasks, acc, fn
|
Enum.reduce(tasks, acc, fn
|
||||||
group, {id, data} when group in ["friend", "non_friend"] ->
|
:user, {id, data} ->
|
||||||
|
{reply_id, _} = insert_reply(user, List.delete(data, "@" <> user.nickname), id, "direct")
|
||||||
|
{reply_id, data}
|
||||||
|
|
||||||
|
_, {id, data} ->
|
||||||
actor = Enum.random(list)
|
actor = Enum.random(list)
|
||||||
|
|
||||||
{reply_id, _} =
|
{reply_id, _} =
|
||||||
insert_reply(actor, List.delete(data, "@" <> actor.nickname), id, "direct")
|
insert_reply(actor, List.delete(data, "@" <> actor.nickname), id, "direct")
|
||||||
|
|
||||||
{reply_id, data}
|
{reply_id, data}
|
||||||
|
|
||||||
"user", {id, data} ->
|
|
||||||
{reply_id, _} = insert_reply(user, List.delete(data, "@" <> user.nickname), id, "direct")
|
|
||||||
{reply_id, data}
|
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp insert_reply(actor, data, activity_id, visibility) do
|
defp insert_reply(actor, data, activity_id, visibility) do
|
||||||
{:ok, reply} =
|
{:ok, reply} =
|
||||||
CommonAPI.post(actor, %{
|
CommonAPI.post(actor, %{
|
||||||
"status" => Enum.join(data, ", "),
|
status: Enum.join(data, ", "),
|
||||||
"visibility" => visibility,
|
visibility: visibility,
|
||||||
"in_reply_to_status_id" => activity_id
|
in_reply_to_status_id: activity_id
|
||||||
})
|
})
|
||||||
|
|
||||||
{reply.id, ["@" <> actor.nickname | data]}
|
{reply.id, ["@" <> actor.nickname | data]}
|
||||||
|
|
|
@ -36,6 +36,7 @@ defp fetch_timelines(user) do
|
||||||
fetch_home_timeline(user)
|
fetch_home_timeline(user)
|
||||||
fetch_direct_timeline(user)
|
fetch_direct_timeline(user)
|
||||||
fetch_public_timeline(user)
|
fetch_public_timeline(user)
|
||||||
|
fetch_public_timeline(user, :with_blocks)
|
||||||
fetch_public_timeline(user, :local)
|
fetch_public_timeline(user, :local)
|
||||||
fetch_public_timeline(user, :tag)
|
fetch_public_timeline(user, :tag)
|
||||||
fetch_notifications(user)
|
fetch_notifications(user)
|
||||||
|
@ -51,12 +52,12 @@ defp render_views(user) do
|
||||||
|
|
||||||
defp opts_for_home_timeline(user) do
|
defp opts_for_home_timeline(user) do
|
||||||
%{
|
%{
|
||||||
"blocking_user" => user,
|
blocking_user: user,
|
||||||
"count" => "20",
|
count: "20",
|
||||||
"muting_user" => user,
|
muting_user: user,
|
||||||
"type" => ["Create", "Announce"],
|
type: ["Create", "Announce"],
|
||||||
"user" => user,
|
user: user,
|
||||||
"with_muted" => "true"
|
with_muted: true
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -69,17 +70,17 @@ defp fetch_home_timeline(user) do
|
||||||
ActivityPub.fetch_activities(recipients, opts) |> Enum.reverse() |> List.last()
|
ActivityPub.fetch_activities(recipients, opts) |> Enum.reverse() |> List.last()
|
||||||
|
|
||||||
second_page_last =
|
second_page_last =
|
||||||
ActivityPub.fetch_activities(recipients, Map.put(opts, "max_id", first_page_last.id))
|
ActivityPub.fetch_activities(recipients, Map.put(opts, :max_id, first_page_last.id))
|
||||||
|> Enum.reverse()
|
|> Enum.reverse()
|
||||||
|> List.last()
|
|> List.last()
|
||||||
|
|
||||||
third_page_last =
|
third_page_last =
|
||||||
ActivityPub.fetch_activities(recipients, Map.put(opts, "max_id", second_page_last.id))
|
ActivityPub.fetch_activities(recipients, Map.put(opts, :max_id, second_page_last.id))
|
||||||
|> Enum.reverse()
|
|> Enum.reverse()
|
||||||
|> List.last()
|
|> List.last()
|
||||||
|
|
||||||
forth_page_last =
|
forth_page_last =
|
||||||
ActivityPub.fetch_activities(recipients, Map.put(opts, "max_id", third_page_last.id))
|
ActivityPub.fetch_activities(recipients, Map.put(opts, :max_id, third_page_last.id))
|
||||||
|> Enum.reverse()
|
|> Enum.reverse()
|
||||||
|> List.last()
|
|> List.last()
|
||||||
|
|
||||||
|
@ -89,19 +90,19 @@ defp fetch_home_timeline(user) do
|
||||||
},
|
},
|
||||||
inputs: %{
|
inputs: %{
|
||||||
"1 page" => opts,
|
"1 page" => opts,
|
||||||
"2 page" => Map.put(opts, "max_id", first_page_last.id),
|
"2 page" => Map.put(opts, :max_id, first_page_last.id),
|
||||||
"3 page" => Map.put(opts, "max_id", second_page_last.id),
|
"3 page" => Map.put(opts, :max_id, second_page_last.id),
|
||||||
"4 page" => Map.put(opts, "max_id", third_page_last.id),
|
"4 page" => Map.put(opts, :max_id, third_page_last.id),
|
||||||
"5 page" => Map.put(opts, "max_id", forth_page_last.id),
|
"5 page" => Map.put(opts, :max_id, forth_page_last.id),
|
||||||
"1 page only media" => Map.put(opts, "only_media", "true"),
|
"1 page only media" => Map.put(opts, :only_media, true),
|
||||||
"2 page only media" =>
|
"2 page only media" =>
|
||||||
Map.put(opts, "max_id", first_page_last.id) |> Map.put("only_media", "true"),
|
Map.put(opts, :max_id, first_page_last.id) |> Map.put(:only_media, true),
|
||||||
"3 page only media" =>
|
"3 page only media" =>
|
||||||
Map.put(opts, "max_id", second_page_last.id) |> Map.put("only_media", "true"),
|
Map.put(opts, :max_id, second_page_last.id) |> Map.put(:only_media, true),
|
||||||
"4 page only media" =>
|
"4 page only media" =>
|
||||||
Map.put(opts, "max_id", third_page_last.id) |> Map.put("only_media", "true"),
|
Map.put(opts, :max_id, third_page_last.id) |> Map.put(:only_media, true),
|
||||||
"5 page only media" =>
|
"5 page only media" =>
|
||||||
Map.put(opts, "max_id", forth_page_last.id) |> Map.put("only_media", "true")
|
Map.put(opts, :max_id, forth_page_last.id) |> Map.put(:only_media, true)
|
||||||
},
|
},
|
||||||
formatters: formatters()
|
formatters: formatters()
|
||||||
)
|
)
|
||||||
|
@ -109,12 +110,12 @@ defp fetch_home_timeline(user) do
|
||||||
|
|
||||||
defp opts_for_direct_timeline(user) do
|
defp opts_for_direct_timeline(user) do
|
||||||
%{
|
%{
|
||||||
:visibility => "direct",
|
visibility: "direct",
|
||||||
"blocking_user" => user,
|
blocking_user: user,
|
||||||
"count" => "20",
|
count: "20",
|
||||||
"type" => "Create",
|
type: "Create",
|
||||||
"user" => user,
|
user: user,
|
||||||
"with_muted" => "true"
|
with_muted: true
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -129,7 +130,7 @@ defp fetch_direct_timeline(user) do
|
||||||
|> Pagination.fetch_paginated(opts)
|
|> Pagination.fetch_paginated(opts)
|
||||||
|> List.last()
|
|> List.last()
|
||||||
|
|
||||||
opts2 = Map.put(opts, "max_id", first_page_last.id)
|
opts2 = Map.put(opts, :max_id, first_page_last.id)
|
||||||
|
|
||||||
second_page_last =
|
second_page_last =
|
||||||
recipients
|
recipients
|
||||||
|
@ -137,7 +138,7 @@ defp fetch_direct_timeline(user) do
|
||||||
|> Pagination.fetch_paginated(opts2)
|
|> Pagination.fetch_paginated(opts2)
|
||||||
|> List.last()
|
|> List.last()
|
||||||
|
|
||||||
opts3 = Map.put(opts, "max_id", second_page_last.id)
|
opts3 = Map.put(opts, :max_id, second_page_last.id)
|
||||||
|
|
||||||
third_page_last =
|
third_page_last =
|
||||||
recipients
|
recipients
|
||||||
|
@ -145,7 +146,7 @@ defp fetch_direct_timeline(user) do
|
||||||
|> Pagination.fetch_paginated(opts3)
|
|> Pagination.fetch_paginated(opts3)
|
||||||
|> List.last()
|
|> List.last()
|
||||||
|
|
||||||
opts4 = Map.put(opts, "max_id", third_page_last.id)
|
opts4 = Map.put(opts, :max_id, third_page_last.id)
|
||||||
|
|
||||||
forth_page_last =
|
forth_page_last =
|
||||||
recipients
|
recipients
|
||||||
|
@ -164,7 +165,7 @@ defp fetch_direct_timeline(user) do
|
||||||
"2 page" => opts2,
|
"2 page" => opts2,
|
||||||
"3 page" => opts3,
|
"3 page" => opts3,
|
||||||
"4 page" => opts4,
|
"4 page" => opts4,
|
||||||
"5 page" => Map.put(opts4, "max_id", forth_page_last.id)
|
"5 page" => Map.put(opts4, :max_id, forth_page_last.id)
|
||||||
},
|
},
|
||||||
formatters: formatters()
|
formatters: formatters()
|
||||||
)
|
)
|
||||||
|
@ -172,34 +173,34 @@ defp fetch_direct_timeline(user) do
|
||||||
|
|
||||||
defp opts_for_public_timeline(user) do
|
defp opts_for_public_timeline(user) do
|
||||||
%{
|
%{
|
||||||
"type" => ["Create", "Announce"],
|
type: ["Create", "Announce"],
|
||||||
"local_only" => false,
|
local_only: false,
|
||||||
"blocking_user" => user,
|
blocking_user: user,
|
||||||
"muting_user" => user
|
muting_user: user
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp opts_for_public_timeline(user, :local) do
|
defp opts_for_public_timeline(user, :local) do
|
||||||
%{
|
%{
|
||||||
"type" => ["Create", "Announce"],
|
type: ["Create", "Announce"],
|
||||||
"local_only" => true,
|
local_only: true,
|
||||||
"blocking_user" => user,
|
blocking_user: user,
|
||||||
"muting_user" => user
|
muting_user: user
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp opts_for_public_timeline(user, :tag) do
|
defp opts_for_public_timeline(user, :tag) do
|
||||||
%{
|
%{
|
||||||
"blocking_user" => user,
|
blocking_user: user,
|
||||||
"count" => "20",
|
count: "20",
|
||||||
"local_only" => nil,
|
local_only: nil,
|
||||||
"muting_user" => user,
|
muting_user: user,
|
||||||
"tag" => ["tag"],
|
tag: ["tag"],
|
||||||
"tag_all" => [],
|
tag_all: [],
|
||||||
"tag_reject" => [],
|
tag_reject: [],
|
||||||
"type" => "Create",
|
type: "Create",
|
||||||
"user" => user,
|
user: user,
|
||||||
"with_muted" => "true"
|
with_muted: true
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -222,24 +223,72 @@ defp fetch_public_timeline(user, :tag) do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp fetch_public_timeline(user, :only_media) do
|
defp fetch_public_timeline(user, :only_media) do
|
||||||
opts = opts_for_public_timeline(user) |> Map.put("only_media", "true")
|
opts = opts_for_public_timeline(user) |> Map.put(:only_media, true)
|
||||||
|
|
||||||
fetch_public_timeline(opts, "public timeline only media")
|
fetch_public_timeline(opts, "public timeline only media")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp fetch_public_timeline(user, :with_blocks) do
|
||||||
|
opts = opts_for_public_timeline(user)
|
||||||
|
|
||||||
|
remote_non_friends = Agent.get(:non_friends_remote, & &1)
|
||||||
|
|
||||||
|
Benchee.run(%{
|
||||||
|
"public timeline without blocks" => fn ->
|
||||||
|
ActivityPub.fetch_public_activities(opts)
|
||||||
|
end
|
||||||
|
})
|
||||||
|
|
||||||
|
Enum.each(remote_non_friends, fn non_friend ->
|
||||||
|
{:ok, _} = User.block(user, non_friend)
|
||||||
|
end)
|
||||||
|
|
||||||
|
user = User.get_by_id(user.id)
|
||||||
|
|
||||||
|
opts = Map.put(opts, :blocking_user, user)
|
||||||
|
|
||||||
|
Benchee.run(%{
|
||||||
|
"public timeline with user block" => fn ->
|
||||||
|
ActivityPub.fetch_public_activities(opts)
|
||||||
|
end
|
||||||
|
})
|
||||||
|
|
||||||
|
domains =
|
||||||
|
Enum.reduce(remote_non_friends, [], fn non_friend, domains ->
|
||||||
|
{:ok, _user} = User.unblock(user, non_friend)
|
||||||
|
%{host: host} = URI.parse(non_friend.ap_id)
|
||||||
|
[host | domains]
|
||||||
|
end)
|
||||||
|
|
||||||
|
domains = Enum.uniq(domains)
|
||||||
|
|
||||||
|
Enum.each(domains, fn domain ->
|
||||||
|
{:ok, _} = User.block_domain(user, domain)
|
||||||
|
end)
|
||||||
|
|
||||||
|
user = User.get_by_id(user.id)
|
||||||
|
opts = Map.put(opts, :blocking_user, user)
|
||||||
|
|
||||||
|
Benchee.run(%{
|
||||||
|
"public timeline with domain block" => fn ->
|
||||||
|
ActivityPub.fetch_public_activities(opts)
|
||||||
|
end
|
||||||
|
})
|
||||||
|
end
|
||||||
|
|
||||||
defp fetch_public_timeline(opts, title) when is_binary(title) do
|
defp fetch_public_timeline(opts, title) when is_binary(title) do
|
||||||
first_page_last = ActivityPub.fetch_public_activities(opts) |> List.last()
|
first_page_last = ActivityPub.fetch_public_activities(opts) |> List.last()
|
||||||
|
|
||||||
second_page_last =
|
second_page_last =
|
||||||
ActivityPub.fetch_public_activities(Map.put(opts, "max_id", first_page_last.id))
|
ActivityPub.fetch_public_activities(Map.put(opts, :max_id, first_page_last.id))
|
||||||
|> List.last()
|
|> List.last()
|
||||||
|
|
||||||
third_page_last =
|
third_page_last =
|
||||||
ActivityPub.fetch_public_activities(Map.put(opts, "max_id", second_page_last.id))
|
ActivityPub.fetch_public_activities(Map.put(opts, :max_id, second_page_last.id))
|
||||||
|> List.last()
|
|> List.last()
|
||||||
|
|
||||||
forth_page_last =
|
forth_page_last =
|
||||||
ActivityPub.fetch_public_activities(Map.put(opts, "max_id", third_page_last.id))
|
ActivityPub.fetch_public_activities(Map.put(opts, :max_id, third_page_last.id))
|
||||||
|> List.last()
|
|> List.last()
|
||||||
|
|
||||||
Benchee.run(
|
Benchee.run(
|
||||||
|
@ -250,17 +299,17 @@ defp fetch_public_timeline(opts, title) when is_binary(title) do
|
||||||
},
|
},
|
||||||
inputs: %{
|
inputs: %{
|
||||||
"1 page" => opts,
|
"1 page" => opts,
|
||||||
"2 page" => Map.put(opts, "max_id", first_page_last.id),
|
"2 page" => Map.put(opts, :max_id, first_page_last.id),
|
||||||
"3 page" => Map.put(opts, "max_id", second_page_last.id),
|
"3 page" => Map.put(opts, :max_id, second_page_last.id),
|
||||||
"4 page" => Map.put(opts, "max_id", third_page_last.id),
|
"4 page" => Map.put(opts, :max_id, third_page_last.id),
|
||||||
"5 page" => Map.put(opts, "max_id", forth_page_last.id)
|
"5 page" => Map.put(opts, :max_id, forth_page_last.id)
|
||||||
},
|
},
|
||||||
formatters: formatters()
|
formatters: formatters()
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp opts_for_notifications do
|
defp opts_for_notifications do
|
||||||
%{"count" => "20", "with_muted" => "true"}
|
%{count: "20", with_muted: true}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp fetch_notifications(user) do
|
defp fetch_notifications(user) do
|
||||||
|
@ -269,15 +318,15 @@ defp fetch_notifications(user) do
|
||||||
first_page_last = MastodonAPI.get_notifications(user, opts) |> List.last()
|
first_page_last = MastodonAPI.get_notifications(user, opts) |> List.last()
|
||||||
|
|
||||||
second_page_last =
|
second_page_last =
|
||||||
MastodonAPI.get_notifications(user, Map.put(opts, "max_id", first_page_last.id))
|
MastodonAPI.get_notifications(user, Map.put(opts, :max_id, first_page_last.id))
|
||||||
|> List.last()
|
|> List.last()
|
||||||
|
|
||||||
third_page_last =
|
third_page_last =
|
||||||
MastodonAPI.get_notifications(user, Map.put(opts, "max_id", second_page_last.id))
|
MastodonAPI.get_notifications(user, Map.put(opts, :max_id, second_page_last.id))
|
||||||
|> List.last()
|
|> List.last()
|
||||||
|
|
||||||
forth_page_last =
|
forth_page_last =
|
||||||
MastodonAPI.get_notifications(user, Map.put(opts, "max_id", third_page_last.id))
|
MastodonAPI.get_notifications(user, Map.put(opts, :max_id, third_page_last.id))
|
||||||
|> List.last()
|
|> List.last()
|
||||||
|
|
||||||
Benchee.run(
|
Benchee.run(
|
||||||
|
@ -288,10 +337,10 @@ defp fetch_notifications(user) do
|
||||||
},
|
},
|
||||||
inputs: %{
|
inputs: %{
|
||||||
"1 page" => opts,
|
"1 page" => opts,
|
||||||
"2 page" => Map.put(opts, "max_id", first_page_last.id),
|
"2 page" => Map.put(opts, :max_id, first_page_last.id),
|
||||||
"3 page" => Map.put(opts, "max_id", second_page_last.id),
|
"3 page" => Map.put(opts, :max_id, second_page_last.id),
|
||||||
"4 page" => Map.put(opts, "max_id", third_page_last.id),
|
"4 page" => Map.put(opts, :max_id, third_page_last.id),
|
||||||
"5 page" => Map.put(opts, "max_id", forth_page_last.id)
|
"5 page" => Map.put(opts, :max_id, forth_page_last.id)
|
||||||
},
|
},
|
||||||
formatters: formatters()
|
formatters: formatters()
|
||||||
)
|
)
|
||||||
|
@ -301,13 +350,13 @@ defp fetch_favourites(user) do
|
||||||
first_page_last = ActivityPub.fetch_favourites(user) |> List.last()
|
first_page_last = ActivityPub.fetch_favourites(user) |> List.last()
|
||||||
|
|
||||||
second_page_last =
|
second_page_last =
|
||||||
ActivityPub.fetch_favourites(user, %{"max_id" => first_page_last.id}) |> List.last()
|
ActivityPub.fetch_favourites(user, %{:max_id => first_page_last.id}) |> List.last()
|
||||||
|
|
||||||
third_page_last =
|
third_page_last =
|
||||||
ActivityPub.fetch_favourites(user, %{"max_id" => second_page_last.id}) |> List.last()
|
ActivityPub.fetch_favourites(user, %{:max_id => second_page_last.id}) |> List.last()
|
||||||
|
|
||||||
forth_page_last =
|
forth_page_last =
|
||||||
ActivityPub.fetch_favourites(user, %{"max_id" => third_page_last.id}) |> List.last()
|
ActivityPub.fetch_favourites(user, %{:max_id => third_page_last.id}) |> List.last()
|
||||||
|
|
||||||
Benchee.run(
|
Benchee.run(
|
||||||
%{
|
%{
|
||||||
|
@ -317,10 +366,10 @@ defp fetch_favourites(user) do
|
||||||
},
|
},
|
||||||
inputs: %{
|
inputs: %{
|
||||||
"1 page" => %{},
|
"1 page" => %{},
|
||||||
"2 page" => %{"max_id" => first_page_last.id},
|
"2 page" => %{:max_id => first_page_last.id},
|
||||||
"3 page" => %{"max_id" => second_page_last.id},
|
"3 page" => %{:max_id => second_page_last.id},
|
||||||
"4 page" => %{"max_id" => third_page_last.id},
|
"4 page" => %{:max_id => third_page_last.id},
|
||||||
"5 page" => %{"max_id" => forth_page_last.id}
|
"5 page" => %{:max_id => forth_page_last.id}
|
||||||
},
|
},
|
||||||
formatters: formatters()
|
formatters: formatters()
|
||||||
)
|
)
|
||||||
|
@ -328,8 +377,8 @@ defp fetch_favourites(user) do
|
||||||
|
|
||||||
defp opts_for_long_thread(user) do
|
defp opts_for_long_thread(user) do
|
||||||
%{
|
%{
|
||||||
"blocking_user" => user,
|
blocking_user: user,
|
||||||
"user" => user
|
user: user
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -339,9 +388,9 @@ defp fetch_long_thread(user) do
|
||||||
|
|
||||||
opts = opts_for_long_thread(user)
|
opts = opts_for_long_thread(user)
|
||||||
|
|
||||||
private_input = {private.data["context"], Map.put(opts, "exclude_id", private.id)}
|
private_input = {private.data["context"], Map.put(opts, :exclude_id, private.id)}
|
||||||
|
|
||||||
public_input = {public.data["context"], Map.put(opts, "exclude_id", public.id)}
|
public_input = {public.data["context"], Map.put(opts, :exclude_id, public.id)}
|
||||||
|
|
||||||
Benchee.run(
|
Benchee.run(
|
||||||
%{
|
%{
|
||||||
|
@ -387,56 +436,47 @@ defp render_timelines(user) do
|
||||||
|
|
||||||
favourites = ActivityPub.fetch_favourites(user)
|
favourites = ActivityPub.fetch_favourites(user)
|
||||||
|
|
||||||
output_relationships =
|
|
||||||
!!Pleroma.Config.get([:extensions, :output_relationships_in_statuses_by_default])
|
|
||||||
|
|
||||||
Benchee.run(
|
Benchee.run(
|
||||||
%{
|
%{
|
||||||
"Rendering home timeline" => fn ->
|
"Rendering home timeline" => fn ->
|
||||||
StatusView.render("index.json", %{
|
StatusView.render("index.json", %{
|
||||||
activities: home_activities,
|
activities: home_activities,
|
||||||
for: user,
|
for: user,
|
||||||
as: :activity,
|
as: :activity
|
||||||
skip_relationships: !output_relationships
|
|
||||||
})
|
})
|
||||||
end,
|
end,
|
||||||
"Rendering direct timeline" => fn ->
|
"Rendering direct timeline" => fn ->
|
||||||
StatusView.render("index.json", %{
|
StatusView.render("index.json", %{
|
||||||
activities: direct_activities,
|
activities: direct_activities,
|
||||||
for: user,
|
for: user,
|
||||||
as: :activity,
|
as: :activity
|
||||||
skip_relationships: !output_relationships
|
|
||||||
})
|
})
|
||||||
end,
|
end,
|
||||||
"Rendering public timeline" => fn ->
|
"Rendering public timeline" => fn ->
|
||||||
StatusView.render("index.json", %{
|
StatusView.render("index.json", %{
|
||||||
activities: public_activities,
|
activities: public_activities,
|
||||||
for: user,
|
for: user,
|
||||||
as: :activity,
|
as: :activity
|
||||||
skip_relationships: !output_relationships
|
|
||||||
})
|
})
|
||||||
end,
|
end,
|
||||||
"Rendering tag timeline" => fn ->
|
"Rendering tag timeline" => fn ->
|
||||||
StatusView.render("index.json", %{
|
StatusView.render("index.json", %{
|
||||||
activities: tag_activities,
|
activities: tag_activities,
|
||||||
for: user,
|
for: user,
|
||||||
as: :activity,
|
as: :activity
|
||||||
skip_relationships: !output_relationships
|
|
||||||
})
|
})
|
||||||
end,
|
end,
|
||||||
"Rendering notifications" => fn ->
|
"Rendering notifications" => fn ->
|
||||||
Pleroma.Web.MastodonAPI.NotificationView.render("index.json", %{
|
Pleroma.Web.MastodonAPI.NotificationView.render("index.json", %{
|
||||||
notifications: notifications,
|
notifications: notifications,
|
||||||
for: user,
|
for: user
|
||||||
skip_relationships: !output_relationships
|
|
||||||
})
|
})
|
||||||
end,
|
end,
|
||||||
"Rendering favourites timeline" => fn ->
|
"Rendering favourites timeline" => fn ->
|
||||||
StatusView.render("index.json", %{
|
StatusView.render("index.json", %{
|
||||||
activities: favourites,
|
activities: favourites,
|
||||||
for: user,
|
for: user,
|
||||||
as: :activity,
|
as: :activity
|
||||||
skip_relationships: !output_relationships
|
|
||||||
})
|
})
|
||||||
end
|
end
|
||||||
},
|
},
|
||||||
|
@ -470,13 +510,13 @@ defp render_long_thread(user) do
|
||||||
public_context =
|
public_context =
|
||||||
ActivityPub.fetch_activities_for_context(
|
ActivityPub.fetch_activities_for_context(
|
||||||
public.data["context"],
|
public.data["context"],
|
||||||
Map.put(fetch_opts, "exclude_id", public.id)
|
Map.put(fetch_opts, :exclude_id, public.id)
|
||||||
)
|
)
|
||||||
|
|
||||||
private_context =
|
private_context =
|
||||||
ActivityPub.fetch_activities_for_context(
|
ActivityPub.fetch_activities_for_context(
|
||||||
private.data["context"],
|
private.data["context"],
|
||||||
Map.put(fetch_opts, "exclude_id", private.id)
|
Map.put(fetch_opts, :exclude_id, private.id)
|
||||||
)
|
)
|
||||||
|
|
||||||
Benchee.run(
|
Benchee.run(
|
||||||
|
@ -507,14 +547,14 @@ defp fetch_timelines_with_reply_filtering(user) do
|
||||||
end,
|
end,
|
||||||
"Public timeline with reply filtering - following" => fn ->
|
"Public timeline with reply filtering - following" => fn ->
|
||||||
public_params
|
public_params
|
||||||
|> Map.put("reply_visibility", "following")
|
|> Map.put(:reply_visibility, "following")
|
||||||
|> Map.put("reply_filtering_user", user)
|
|> Map.put(:reply_filtering_user, user)
|
||||||
|> ActivityPub.fetch_public_activities()
|
|> ActivityPub.fetch_public_activities()
|
||||||
end,
|
end,
|
||||||
"Public timeline with reply filtering - self" => fn ->
|
"Public timeline with reply filtering - self" => fn ->
|
||||||
public_params
|
public_params
|
||||||
|> Map.put("reply_visibility", "self")
|
|> Map.put(:reply_visibility, "self")
|
||||||
|> Map.put("reply_filtering_user", user)
|
|> Map.put(:reply_filtering_user, user)
|
||||||
|> ActivityPub.fetch_public_activities()
|
|> ActivityPub.fetch_public_activities()
|
||||||
end
|
end
|
||||||
},
|
},
|
||||||
|
@ -533,16 +573,16 @@ defp fetch_timelines_with_reply_filtering(user) do
|
||||||
"Home timeline with reply filtering - following" => fn ->
|
"Home timeline with reply filtering - following" => fn ->
|
||||||
private_params =
|
private_params =
|
||||||
private_params
|
private_params
|
||||||
|> Map.put("reply_filtering_user", user)
|
|> Map.put(:reply_filtering_user, user)
|
||||||
|> Map.put("reply_visibility", "following")
|
|> Map.put(:reply_visibility, "following")
|
||||||
|
|
||||||
ActivityPub.fetch_activities(recipients, private_params)
|
ActivityPub.fetch_activities(recipients, private_params)
|
||||||
end,
|
end,
|
||||||
"Home timeline with reply filtering - self" => fn ->
|
"Home timeline with reply filtering - self" => fn ->
|
||||||
private_params =
|
private_params =
|
||||||
private_params
|
private_params
|
||||||
|> Map.put("reply_filtering_user", user)
|
|> Map.put(:reply_filtering_user, user)
|
||||||
|> Map.put("reply_visibility", "self")
|
|> Map.put(:reply_visibility, "self")
|
||||||
|
|
||||||
ActivityPub.fetch_activities(recipients, private_params)
|
ActivityPub.fetch_activities(recipients, private_params)
|
||||||
end
|
end
|
||||||
|
|
|
@ -27,7 +27,7 @@ def generate(opts \\ []) do
|
||||||
|
|
||||||
make_friends(main_user, opts[:friends])
|
make_friends(main_user, opts[:friends])
|
||||||
|
|
||||||
Repo.get(User, main_user.id)
|
User.get_by_id(main_user.id)
|
||||||
end
|
end
|
||||||
|
|
||||||
def generate_users(max) do
|
def generate_users(max) do
|
||||||
|
@ -166,4 +166,24 @@ defp run_stream(users, main_user) do
|
||||||
)
|
)
|
||||||
|> Stream.run()
|
|> Stream.run()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@spec prepare_users(User.t(), keyword()) :: map()
|
||||||
|
def prepare_users(user, opts) do
|
||||||
|
friends_limit = opts[:friends_used]
|
||||||
|
non_friends_limit = opts[:non_friends_used]
|
||||||
|
|
||||||
|
%{
|
||||||
|
user: user,
|
||||||
|
friends_local: fetch_users(user, friends_limit, :local, true),
|
||||||
|
friends_remote: fetch_users(user, friends_limit, :external, true),
|
||||||
|
non_friends_local: fetch_users(user, non_friends_limit, :local, false),
|
||||||
|
non_friends_remote: fetch_users(user, non_friends_limit, :external, false)
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fetch_users(user, limit, local, friends?) do
|
||||||
|
user
|
||||||
|
|> get_users(limit: limit, local: local, friends?: friends?)
|
||||||
|
|> Enum.shuffle()
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -5,7 +5,6 @@ defmodule Mix.Tasks.Pleroma.Benchmarks.Tags do
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.Web.MastodonAPI.TimelineController
|
|
||||||
|
|
||||||
def run(_args) do
|
def run(_args) do
|
||||||
Mix.Pleroma.start_pleroma()
|
Mix.Pleroma.start_pleroma()
|
||||||
|
@ -37,7 +36,7 @@ def run(_args) do
|
||||||
Benchee.run(
|
Benchee.run(
|
||||||
%{
|
%{
|
||||||
"Hashtag fetching, any" => fn tags ->
|
"Hashtag fetching, any" => fn tags ->
|
||||||
TimelineController.hashtag_fetching(
|
hashtag_fetching(
|
||||||
%{
|
%{
|
||||||
"any" => tags
|
"any" => tags
|
||||||
},
|
},
|
||||||
|
@ -47,7 +46,7 @@ def run(_args) do
|
||||||
end,
|
end,
|
||||||
# Will always return zero results because no overlapping hashtags are generated.
|
# Will always return zero results because no overlapping hashtags are generated.
|
||||||
"Hashtag fetching, all" => fn tags ->
|
"Hashtag fetching, all" => fn tags ->
|
||||||
TimelineController.hashtag_fetching(
|
hashtag_fetching(
|
||||||
%{
|
%{
|
||||||
"all" => tags
|
"all" => tags
|
||||||
},
|
},
|
||||||
|
@ -67,7 +66,7 @@ def run(_args) do
|
||||||
Benchee.run(
|
Benchee.run(
|
||||||
%{
|
%{
|
||||||
"Hashtag fetching" => fn tag ->
|
"Hashtag fetching" => fn tag ->
|
||||||
TimelineController.hashtag_fetching(
|
hashtag_fetching(
|
||||||
%{
|
%{
|
||||||
"tag" => tag
|
"tag" => tag
|
||||||
},
|
},
|
||||||
|
@ -80,4 +79,35 @@ def run(_args) do
|
||||||
time: 5
|
time: 5
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp hashtag_fetching(params, user, local_only) do
|
||||||
|
tags =
|
||||||
|
[params["tag"], params["any"]]
|
||||||
|
|> List.flatten()
|
||||||
|
|> Enum.uniq()
|
||||||
|
|> Enum.filter(& &1)
|
||||||
|
|> Enum.map(&String.downcase(&1))
|
||||||
|
|
||||||
|
tag_all =
|
||||||
|
params
|
||||||
|
|> Map.get("all", [])
|
||||||
|
|> Enum.map(&String.downcase(&1))
|
||||||
|
|
||||||
|
tag_reject =
|
||||||
|
params
|
||||||
|
|> Map.get("none", [])
|
||||||
|
|> Enum.map(&String.downcase(&1))
|
||||||
|
|
||||||
|
_activities =
|
||||||
|
params
|
||||||
|
|> Map.put(:type, "Create")
|
||||||
|
|> Map.put(:local_only, local_only)
|
||||||
|
|> Map.put(:blocking_user, user)
|
||||||
|
|> Map.put(:muting_user, user)
|
||||||
|
|> Map.put(:user, user)
|
||||||
|
|> Map.put(:tag, tags)
|
||||||
|
|> Map.put(:tag_all, tag_all)
|
||||||
|
|> Map.put(:tag_reject, tag_reject)
|
||||||
|
|> Pleroma.Web.ActivityPub.ActivityPub.fetch_public_activities()
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -71,7 +71,8 @@
|
||||||
follow_redirect: true,
|
follow_redirect: true,
|
||||||
pool: :upload
|
pool: :upload
|
||||||
]
|
]
|
||||||
]
|
],
|
||||||
|
filename_display_max_length: 30
|
||||||
|
|
||||||
config :pleroma, Pleroma.Uploaders.Local, uploads: "uploads"
|
config :pleroma, Pleroma.Uploaders.Local, uploads: "uploads"
|
||||||
|
|
||||||
|
@ -170,7 +171,8 @@
|
||||||
"application/ld+json" => ["activity+json"]
|
"application/ld+json" => ["activity+json"]
|
||||||
}
|
}
|
||||||
|
|
||||||
config :tesla, adapter: Tesla.Adapter.Gun
|
config :tesla, adapter: Tesla.Adapter.Hackney
|
||||||
|
|
||||||
# Configures http settings, upstream proxy etc.
|
# Configures http settings, upstream proxy etc.
|
||||||
config :pleroma, :http,
|
config :pleroma, :http,
|
||||||
proxy_url: nil,
|
proxy_url: nil,
|
||||||
|
@ -182,7 +184,9 @@
|
||||||
name: "Pleroma",
|
name: "Pleroma",
|
||||||
email: "example@example.com",
|
email: "example@example.com",
|
||||||
notify_email: "noreply@example.com",
|
notify_email: "noreply@example.com",
|
||||||
description: "A Pleroma instance, an alternative fediverse server",
|
description: "Pleroma: An efficient and flexible fediverse server",
|
||||||
|
background_image: "/images/city.jpg",
|
||||||
|
instance_thumbnail: "/instance/thumbnail.jpeg",
|
||||||
limit: 5_000,
|
limit: 5_000,
|
||||||
chat_limit: 5_000,
|
chat_limit: 5_000,
|
||||||
remote_limit: 100_000,
|
remote_limit: 100_000,
|
||||||
|
@ -206,7 +210,6 @@
|
||||||
Pleroma.Web.ActivityPub.Publisher
|
Pleroma.Web.ActivityPub.Publisher
|
||||||
],
|
],
|
||||||
allow_relay: true,
|
allow_relay: true,
|
||||||
rewrite_policy: Pleroma.Web.ActivityPub.MRF.NoOpPolicy,
|
|
||||||
public: true,
|
public: true,
|
||||||
quarantined_instances: [],
|
quarantined_instances: [],
|
||||||
managed_config: true,
|
managed_config: true,
|
||||||
|
@ -217,8 +220,6 @@
|
||||||
"text/markdown",
|
"text/markdown",
|
||||||
"text/bbcode"
|
"text/bbcode"
|
||||||
],
|
],
|
||||||
mrf_transparency: true,
|
|
||||||
mrf_transparency_exclusions: [],
|
|
||||||
autofollowed_nicknames: [],
|
autofollowed_nicknames: [],
|
||||||
max_pinned_statuses: 1,
|
max_pinned_statuses: 1,
|
||||||
attachment_links: false,
|
attachment_links: false,
|
||||||
|
@ -251,8 +252,6 @@
|
||||||
]
|
]
|
||||||
]
|
]
|
||||||
|
|
||||||
config :pleroma, :extensions, output_relationships_in_statuses_by_default: true
|
|
||||||
|
|
||||||
config :pleroma, :feed,
|
config :pleroma, :feed,
|
||||||
post_title: %{
|
post_title: %{
|
||||||
max_length: 100,
|
max_length: 100,
|
||||||
|
@ -273,20 +272,33 @@
|
||||||
|
|
||||||
config :pleroma, :frontend_configurations,
|
config :pleroma, :frontend_configurations,
|
||||||
pleroma_fe: %{
|
pleroma_fe: %{
|
||||||
theme: "pleroma-dark",
|
alwaysShowSubjectInput: true,
|
||||||
logo: "/static/logo.png",
|
|
||||||
background: "/images/city.jpg",
|
background: "/images/city.jpg",
|
||||||
redirectRootNoLogin: "/main/all",
|
|
||||||
redirectRootLogin: "/main/friends",
|
|
||||||
showInstanceSpecificPanel: true,
|
|
||||||
scopeOptionsEnabled: false,
|
|
||||||
formattingOptionsEnabled: false,
|
|
||||||
collapseMessageWithSubject: false,
|
collapseMessageWithSubject: false,
|
||||||
|
disableChat: false,
|
||||||
|
greentext: false,
|
||||||
|
hideFilteredStatuses: false,
|
||||||
|
hideMutedPosts: false,
|
||||||
hidePostStats: false,
|
hidePostStats: false,
|
||||||
|
hideSitename: false,
|
||||||
hideUserStats: false,
|
hideUserStats: false,
|
||||||
|
loginMethod: "password",
|
||||||
|
logo: "/static/logo.png",
|
||||||
|
logoMargin: ".1em",
|
||||||
|
logoMask: true,
|
||||||
|
minimalScopesMode: false,
|
||||||
|
noAttachmentLinks: false,
|
||||||
|
nsfwCensorImage: "",
|
||||||
|
postContentType: "text/plain",
|
||||||
|
redirectRootLogin: "/main/friends",
|
||||||
|
redirectRootNoLogin: "/main/all",
|
||||||
scopeCopy: true,
|
scopeCopy: true,
|
||||||
|
sidebarRight: false,
|
||||||
|
showFeaturesPanel: true,
|
||||||
|
showInstanceSpecificPanel: false,
|
||||||
subjectLineBehavior: "email",
|
subjectLineBehavior: "email",
|
||||||
alwaysShowSubjectInput: true
|
theme: "pleroma-dark",
|
||||||
|
webPushNotifications: false
|
||||||
},
|
},
|
||||||
masto_fe: %{
|
masto_fe: %{
|
||||||
showInstanceSpecificPanel: true
|
showInstanceSpecificPanel: true
|
||||||
|
@ -357,6 +369,8 @@
|
||||||
|
|
||||||
config :pleroma, :mrf_subchain, match_actor: %{}
|
config :pleroma, :mrf_subchain, match_actor: %{}
|
||||||
|
|
||||||
|
config :pleroma, :mrf_activity_expiration, days: 365
|
||||||
|
|
||||||
config :pleroma, :mrf_vocabulary,
|
config :pleroma, :mrf_vocabulary,
|
||||||
accept: [],
|
accept: [],
|
||||||
reject: []
|
reject: []
|
||||||
|
@ -371,13 +385,16 @@
|
||||||
ignore_tld: ["local", "localdomain", "lan"],
|
ignore_tld: ["local", "localdomain", "lan"],
|
||||||
parsers: [
|
parsers: [
|
||||||
Pleroma.Web.RichMedia.Parsers.TwitterCard,
|
Pleroma.Web.RichMedia.Parsers.TwitterCard,
|
||||||
Pleroma.Web.RichMedia.Parsers.OGP,
|
|
||||||
Pleroma.Web.RichMedia.Parsers.OEmbed
|
Pleroma.Web.RichMedia.Parsers.OEmbed
|
||||||
],
|
],
|
||||||
ttl_setters: [Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl]
|
ttl_setters: [Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl]
|
||||||
|
|
||||||
config :pleroma, :media_proxy,
|
config :pleroma, :media_proxy,
|
||||||
enabled: false,
|
enabled: false,
|
||||||
|
invalidation: [
|
||||||
|
enabled: false,
|
||||||
|
provider: Pleroma.Web.MediaProxy.Invalidation.Script
|
||||||
|
],
|
||||||
proxy_opts: [
|
proxy_opts: [
|
||||||
redirect_on_failure: false,
|
redirect_on_failure: false,
|
||||||
max_body_length: 25 * 1_048_576,
|
max_body_length: 25 * 1_048_576,
|
||||||
|
@ -388,6 +405,13 @@
|
||||||
],
|
],
|
||||||
whitelist: []
|
whitelist: []
|
||||||
|
|
||||||
|
config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Http,
|
||||||
|
method: :purge,
|
||||||
|
headers: [],
|
||||||
|
options: []
|
||||||
|
|
||||||
|
config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Script, script_path: nil
|
||||||
|
|
||||||
config :pleroma, :chat, enabled: true
|
config :pleroma, :chat, enabled: true
|
||||||
|
|
||||||
config :phoenix, :format_encoders, json: Jason
|
config :phoenix, :format_encoders, json: Jason
|
||||||
|
@ -410,6 +434,12 @@
|
||||||
],
|
],
|
||||||
unfurl_nsfw: false
|
unfurl_nsfw: false
|
||||||
|
|
||||||
|
config :pleroma, Pleroma.Web.Preload,
|
||||||
|
providers: [
|
||||||
|
Pleroma.Web.Preload.Providers.Instance,
|
||||||
|
Pleroma.Web.Preload.Providers.StatusNet
|
||||||
|
]
|
||||||
|
|
||||||
config :pleroma, :http_security,
|
config :pleroma, :http_security,
|
||||||
enabled: true,
|
enabled: true,
|
||||||
sts: false,
|
sts: false,
|
||||||
|
@ -666,6 +696,15 @@
|
||||||
|
|
||||||
config :pleroma, Pleroma.Web.ApiSpec.CastAndValidate, strict: false
|
config :pleroma, Pleroma.Web.ApiSpec.CastAndValidate, strict: false
|
||||||
|
|
||||||
|
config :pleroma, :mrf,
|
||||||
|
policies: Pleroma.Web.ActivityPub.MRF.NoOpPolicy,
|
||||||
|
transparency: true,
|
||||||
|
transparency_exclusions: []
|
||||||
|
|
||||||
|
config :tzdata, :http_client, Pleroma.HTTP.Tzdata
|
||||||
|
|
||||||
|
config :ex_aws, http_client: Pleroma.HTTP.ExAws
|
||||||
|
|
||||||
# Import environment specific config. This must remain at the bottom
|
# Import environment specific config. This must remain at the bottom
|
||||||
# of this file so it overrides the configuration defined above.
|
# of this file so it overrides the configuration defined above.
|
||||||
import_config "#{Mix.env()}.exs"
|
import_config "#{Mix.env()}.exs"
|
||||||
|
|
|
@ -119,6 +119,11 @@
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :filename_display_max_length,
|
||||||
|
type: :integer,
|
||||||
|
description: "Set max length of a filename to display. 0 = no limit. Default: 30"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -679,31 +684,11 @@
|
||||||
7
|
7
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
%{
|
|
||||||
key: :federation_publisher_modules,
|
|
||||||
type: {:list, :module},
|
|
||||||
description:
|
|
||||||
"List of modules for federation publishing. Module names are shortened (removed leading `Pleroma.Web.` part), but on adding custom module you need to use full name.",
|
|
||||||
suggestions: [
|
|
||||||
Pleroma.Web.ActivityPub.Publisher
|
|
||||||
]
|
|
||||||
},
|
|
||||||
%{
|
%{
|
||||||
key: :allow_relay,
|
key: :allow_relay,
|
||||||
type: :boolean,
|
type: :boolean,
|
||||||
description: "Enable Pleroma's Relay, which makes it possible to follow a whole instance"
|
description: "Enable Pleroma's Relay, which makes it possible to follow a whole instance"
|
||||||
},
|
},
|
||||||
%{
|
|
||||||
key: :rewrite_policy,
|
|
||||||
type: [:module, {:list, :module}],
|
|
||||||
description:
|
|
||||||
"A list of enabled MRF policies. Module names are shortened (removed leading `Pleroma.Web.ActivityPub.MRF.` part), but on adding custom module you need to use full name.",
|
|
||||||
suggestions:
|
|
||||||
Generator.list_modules_in_dir(
|
|
||||||
"lib/pleroma/web/activity_pub/mrf",
|
|
||||||
"Elixir.Pleroma.Web.ActivityPub.MRF."
|
|
||||||
)
|
|
||||||
},
|
|
||||||
%{
|
%{
|
||||||
key: :public,
|
key: :public,
|
||||||
type: :boolean,
|
type: :boolean,
|
||||||
|
@ -746,23 +731,6 @@
|
||||||
"text/bbcode"
|
"text/bbcode"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
%{
|
|
||||||
key: :mrf_transparency,
|
|
||||||
label: "MRF transparency",
|
|
||||||
type: :boolean,
|
|
||||||
description:
|
|
||||||
"Make the content of your Message Rewrite Facility settings public (via nodeinfo)"
|
|
||||||
},
|
|
||||||
%{
|
|
||||||
key: :mrf_transparency_exclusions,
|
|
||||||
label: "MRF transparency exclusions",
|
|
||||||
type: {:list, :string},
|
|
||||||
description:
|
|
||||||
"Exclude specific instance names from MRF transparency. The use of the exclusions feature will be disclosed in nodeinfo as a boolean value.",
|
|
||||||
suggestions: [
|
|
||||||
"exclusion.com"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
%{
|
%{
|
||||||
key: :extended_nickname_format,
|
key: :extended_nickname_format,
|
||||||
type: :boolean,
|
type: :boolean,
|
||||||
|
@ -978,6 +946,13 @@
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :instance_thumbnail,
|
||||||
|
type: :string,
|
||||||
|
description:
|
||||||
|
"The instance thumbnail can be any image that represents your instance and is used by some apps or services when they display information about your instance.",
|
||||||
|
suggestions: ["/instance/thumbnail.jpeg"]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -1121,11 +1096,12 @@
|
||||||
logoMask: true,
|
logoMask: true,
|
||||||
minimalScopesMode: false,
|
minimalScopesMode: false,
|
||||||
noAttachmentLinks: false,
|
noAttachmentLinks: false,
|
||||||
nsfwCensorImage: "",
|
nsfwCensorImage: "/static/img/nsfw.74818f9.png",
|
||||||
postContentType: "text/plain",
|
postContentType: "text/plain",
|
||||||
redirectRootLogin: "/main/friends",
|
redirectRootLogin: "/main/friends",
|
||||||
redirectRootNoLogin: "/main/all",
|
redirectRootNoLogin: "/main/all",
|
||||||
scopeCopy: true,
|
scopeCopy: true,
|
||||||
|
sidebarRight: false,
|
||||||
showFeaturesPanel: true,
|
showFeaturesPanel: true,
|
||||||
showInstanceSpecificPanel: false,
|
showInstanceSpecificPanel: false,
|
||||||
subjectLineBehavior: "email",
|
subjectLineBehavior: "email",
|
||||||
|
@ -1234,7 +1210,7 @@
|
||||||
type: :string,
|
type: :string,
|
||||||
description:
|
description:
|
||||||
"URL of the image to use for hiding NSFW media attachments in the timeline.",
|
"URL of the image to use for hiding NSFW media attachments in the timeline.",
|
||||||
suggestions: ["/static/img/nsfw.png"]
|
suggestions: ["/static/img/nsfw.74818f9.png"]
|
||||||
},
|
},
|
||||||
%{
|
%{
|
||||||
key: :postContentType,
|
key: :postContentType,
|
||||||
|
@ -1265,6 +1241,12 @@
|
||||||
type: :boolean,
|
type: :boolean,
|
||||||
description: "Copy the scope (private/unlisted/public) in replies to posts by default"
|
description: "Copy the scope (private/unlisted/public) in replies to posts by default"
|
||||||
},
|
},
|
||||||
|
%{
|
||||||
|
key: :sidebarRight,
|
||||||
|
label: "Sidebar on Right",
|
||||||
|
type: :boolean,
|
||||||
|
description: "Change alignment of sidebar and panels to the right."
|
||||||
|
},
|
||||||
%{
|
%{
|
||||||
key: :showFeaturesPanel,
|
key: :showFeaturesPanel,
|
||||||
label: "Show instance features panel",
|
label: "Show instance features panel",
|
||||||
|
@ -1348,6 +1330,12 @@
|
||||||
suggestions: [
|
suggestions: [
|
||||||
:pleroma_fox_tan
|
:pleroma_fox_tan
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :default_user_avatar,
|
||||||
|
type: :string,
|
||||||
|
description: "URL of the default user avatar.",
|
||||||
|
suggestions: ["/images/avi.png"]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -1455,6 +1443,21 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
%{
|
||||||
|
group: :pleroma,
|
||||||
|
key: :mrf_activity_expiration,
|
||||||
|
label: "MRF Activity Expiration Policy",
|
||||||
|
type: :group,
|
||||||
|
description: "Adds expiration to all local Create Note activities",
|
||||||
|
children: [
|
||||||
|
%{
|
||||||
|
key: :days,
|
||||||
|
type: :integer,
|
||||||
|
description: "Default global expiration time for all local Create activities (in days)",
|
||||||
|
suggestions: [90, 365]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
%{
|
%{
|
||||||
group: :pleroma,
|
group: :pleroma,
|
||||||
key: :mrf_subchain,
|
key: :mrf_subchain,
|
||||||
|
@ -1592,14 +1595,12 @@
|
||||||
# %{
|
# %{
|
||||||
# group: :pleroma,
|
# group: :pleroma,
|
||||||
# key: :mrf_user_allowlist,
|
# key: :mrf_user_allowlist,
|
||||||
# type: :group,
|
# type: :map,
|
||||||
# description:
|
# description:
|
||||||
# "The keys in this section are the domain names that the policy should apply to." <>
|
# "The keys in this section are the domain names that the policy should apply to." <>
|
||||||
# " Each key should be assigned a list of users that should be allowed through by their ActivityPub ID",
|
# " Each key should be assigned a list of users that should be allowed through by their ActivityPub ID",
|
||||||
# children: [
|
|
||||||
# ["example.org": ["https://example.org/users/admin"]],
|
|
||||||
# suggestions: [
|
# suggestions: [
|
||||||
# ["example.org": ["https://example.org/users/admin"]]
|
# %{"example.org" => ["https://example.org/users/admin"]}
|
||||||
# ]
|
# ]
|
||||||
# ]
|
# ]
|
||||||
# },
|
# },
|
||||||
|
@ -1621,6 +1622,31 @@
|
||||||
"The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host/CDN fronts.",
|
"The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host/CDN fronts.",
|
||||||
suggestions: ["https://example.com"]
|
suggestions: ["https://example.com"]
|
||||||
},
|
},
|
||||||
|
%{
|
||||||
|
key: :invalidation,
|
||||||
|
type: :keyword,
|
||||||
|
descpiption: "",
|
||||||
|
suggestions: [
|
||||||
|
enabled: true,
|
||||||
|
provider: Pleroma.Web.MediaProxy.Invalidation.Script
|
||||||
|
],
|
||||||
|
children: [
|
||||||
|
%{
|
||||||
|
key: :enabled,
|
||||||
|
type: :boolean,
|
||||||
|
description: "Enables invalidate media cache"
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :provider,
|
||||||
|
type: :module,
|
||||||
|
description: "Module which will be used to cache purge.",
|
||||||
|
suggestions: [
|
||||||
|
Pleroma.Web.MediaProxy.Invalidation.Script,
|
||||||
|
Pleroma.Web.MediaProxy.Invalidation.Http
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
%{
|
%{
|
||||||
key: :proxy_opts,
|
key: :proxy_opts,
|
||||||
type: :keyword,
|
type: :keyword,
|
||||||
|
@ -1693,6 +1719,45 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
%{
|
||||||
|
group: :pleroma,
|
||||||
|
key: Pleroma.Web.MediaProxy.Invalidation.Http,
|
||||||
|
type: :group,
|
||||||
|
description: "HTTP invalidate settings",
|
||||||
|
children: [
|
||||||
|
%{
|
||||||
|
key: :method,
|
||||||
|
type: :atom,
|
||||||
|
description: "HTTP method of request. Default: :purge"
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :headers,
|
||||||
|
type: {:list, :tuple},
|
||||||
|
description: "HTTP headers of request.",
|
||||||
|
suggestions: [{"x-refresh", 1}]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :options,
|
||||||
|
type: :keyword,
|
||||||
|
description: "Request options.",
|
||||||
|
suggestions: [params: %{ts: "xxx"}]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
group: :pleroma,
|
||||||
|
key: Pleroma.Web.MediaProxy.Invalidation.Script,
|
||||||
|
type: :group,
|
||||||
|
description: "Script invalidate settings",
|
||||||
|
children: [
|
||||||
|
%{
|
||||||
|
key: :script_path,
|
||||||
|
type: :string,
|
||||||
|
description: "Path to shell script. Which will run purge cache.",
|
||||||
|
suggestions: ["./installation/nginx-cache-purge.sh.example"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
%{
|
%{
|
||||||
group: :pleroma,
|
group: :pleroma,
|
||||||
key: :gopher,
|
key: :gopher,
|
||||||
|
@ -1902,12 +1967,6 @@
|
||||||
(see https://github.com/sorentwo/oban/issues/52).
|
(see https://github.com/sorentwo/oban/issues/52).
|
||||||
""",
|
""",
|
||||||
children: [
|
children: [
|
||||||
%{
|
|
||||||
key: :repo,
|
|
||||||
type: :module,
|
|
||||||
description: "Application's Ecto repo",
|
|
||||||
suggestions: [Pleroma.Repo]
|
|
||||||
},
|
|
||||||
%{
|
%{
|
||||||
key: :verbose,
|
key: :verbose,
|
||||||
type: {:dropdown, :atom},
|
type: {:dropdown, :atom},
|
||||||
|
@ -2081,9 +2140,7 @@
|
||||||
description:
|
description:
|
||||||
"List of Rich Media parsers. Module names are shortened (removed leading `Pleroma.Web.RichMedia.Parsers.` part), but on adding custom module you need to use full name.",
|
"List of Rich Media parsers. Module names are shortened (removed leading `Pleroma.Web.RichMedia.Parsers.` part), but on adding custom module you need to use full name.",
|
||||||
suggestions: [
|
suggestions: [
|
||||||
Pleroma.Web.RichMedia.Parsers.MetaTagsParser,
|
|
||||||
Pleroma.Web.RichMedia.Parsers.OEmbed,
|
Pleroma.Web.RichMedia.Parsers.OEmbed,
|
||||||
Pleroma.Web.RichMedia.Parsers.OGP,
|
|
||||||
Pleroma.Web.RichMedia.Parsers.TwitterCard
|
Pleroma.Web.RichMedia.Parsers.TwitterCard
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -2682,18 +2739,6 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
%{
|
|
||||||
group: :http_signatures,
|
|
||||||
type: :group,
|
|
||||||
description: "HTTP Signatures settings",
|
|
||||||
children: [
|
|
||||||
%{
|
|
||||||
key: :adapter,
|
|
||||||
type: :module,
|
|
||||||
suggestions: [Pleroma.Signature]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
%{
|
%{
|
||||||
group: :pleroma,
|
group: :pleroma,
|
||||||
key: :http,
|
key: :http,
|
||||||
|
@ -3316,5 +3361,41 @@
|
||||||
suggestions: [false]
|
suggestions: [false]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
group: :pleroma,
|
||||||
|
key: :mrf,
|
||||||
|
type: :group,
|
||||||
|
description: "General MRF settings",
|
||||||
|
children: [
|
||||||
|
%{
|
||||||
|
key: :policies,
|
||||||
|
type: [:module, {:list, :module}],
|
||||||
|
description:
|
||||||
|
"A list of MRF policies enabled. Module names are shortened (removed leading `Pleroma.Web.ActivityPub.MRF.` part), but on adding custom module you need to use full name.",
|
||||||
|
suggestions:
|
||||||
|
Generator.list_modules_in_dir(
|
||||||
|
"lib/pleroma/web/activity_pub/mrf",
|
||||||
|
"Elixir.Pleroma.Web.ActivityPub.MRF."
|
||||||
|
)
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :transparency,
|
||||||
|
label: "MRF transparency",
|
||||||
|
type: :boolean,
|
||||||
|
description:
|
||||||
|
"Make the content of your Message Rewrite Facility settings public (via nodeinfo)"
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :transparency_exclusions,
|
||||||
|
label: "MRF transparency exclusions",
|
||||||
|
type: {:list, :string},
|
||||||
|
description:
|
||||||
|
"Exclude specific instance names from MRF transparency. The use of the exclusions feature will be disclosed in nodeinfo as a boolean value.",
|
||||||
|
suggestions: [
|
||||||
|
"exclusion.com"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
|
@ -488,30 +488,52 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
|
||||||
|
|
||||||
### Change the user's email, password, display and settings-related fields
|
### Change the user's email, password, display and settings-related fields
|
||||||
|
|
||||||
- Params:
|
* Params:
|
||||||
- `email`
|
* `email`
|
||||||
- `password`
|
* `password`
|
||||||
- `name`
|
* `name`
|
||||||
- `bio`
|
* `bio`
|
||||||
- `avatar`
|
* `avatar`
|
||||||
- `locked`
|
* `locked`
|
||||||
- `no_rich_text`
|
* `no_rich_text`
|
||||||
- `default_scope`
|
* `default_scope`
|
||||||
- `banner`
|
* `banner`
|
||||||
- `hide_follows`
|
* `hide_follows`
|
||||||
- `hide_followers`
|
* `hide_followers`
|
||||||
- `hide_followers_count`
|
* `hide_followers_count`
|
||||||
- `hide_follows_count`
|
* `hide_follows_count`
|
||||||
- `hide_favorites`
|
* `hide_favorites`
|
||||||
- `allow_following_move`
|
* `allow_following_move`
|
||||||
- `background`
|
* `background`
|
||||||
- `show_role`
|
* `show_role`
|
||||||
- `skip_thread_containment`
|
* `skip_thread_containment`
|
||||||
- `fields`
|
* `fields`
|
||||||
- `discoverable`
|
* `discoverable`
|
||||||
- `actor_type`
|
* `actor_type`
|
||||||
|
|
||||||
- Response: none (code `200`)
|
* Responses:
|
||||||
|
|
||||||
|
Status: 200
|
||||||
|
|
||||||
|
```json
|
||||||
|
{"status": "success"}
|
||||||
|
```
|
||||||
|
|
||||||
|
Status: 400
|
||||||
|
|
||||||
|
```json
|
||||||
|
{"errors":
|
||||||
|
{"actor_type": "is invalid"},
|
||||||
|
{"email": "has invalid format"},
|
||||||
|
...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Status: 404
|
||||||
|
|
||||||
|
```json
|
||||||
|
{"error": "Not found"}
|
||||||
|
```
|
||||||
|
|
||||||
## `GET /api/pleroma/admin/reports`
|
## `GET /api/pleroma/admin/reports`
|
||||||
|
|
||||||
|
@ -531,7 +553,7 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"totalReports" : 1,
|
"total" : 1,
|
||||||
"reports": [
|
"reports": [
|
||||||
{
|
{
|
||||||
"account": {
|
"account": {
|
||||||
|
@ -752,7 +774,7 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
|
||||||
- 400 Bad Request `"Invalid parameters"` when `status` is missing
|
- 400 Bad Request `"Invalid parameters"` when `status` is missing
|
||||||
- On success: `204`, empty response
|
- On success: `204`, empty response
|
||||||
|
|
||||||
## `POST /api/pleroma/admin/reports/:report_id/notes/:id`
|
## `DELETE /api/pleroma/admin/reports/:report_id/notes/:id`
|
||||||
|
|
||||||
### Delete report note
|
### Delete report note
|
||||||
|
|
||||||
|
@ -1096,6 +1118,10 @@ Loads json generated from `config/descriptions.exs`.
|
||||||
|
|
||||||
### Stats
|
### Stats
|
||||||
|
|
||||||
|
- Query Params:
|
||||||
|
- *optional* `instance`: **string** instance hostname (without protocol) to get stats for
|
||||||
|
- Example: `https://mypleroma.org/api/pleroma/admin/stats?instance=lain.com`
|
||||||
|
|
||||||
- Response:
|
- Response:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
|
@ -1209,3 +1235,65 @@ Loads json generated from `config/descriptions.exs`.
|
||||||
- On success: `204`, empty response
|
- On success: `204`, empty response
|
||||||
- On failure:
|
- On failure:
|
||||||
- 400 Bad Request `"Invalid parameters"` when `status` is missing
|
- 400 Bad Request `"Invalid parameters"` when `status` is missing
|
||||||
|
|
||||||
|
## `GET /api/pleroma/admin/media_proxy_caches`
|
||||||
|
|
||||||
|
### Get a list of all banned MediaProxy URLs in Cachex
|
||||||
|
|
||||||
|
- Authentication: required
|
||||||
|
- Params:
|
||||||
|
- *optional* `page`: **integer** page number
|
||||||
|
- *optional* `page_size`: **integer** number of log entries per page (default is `50`)
|
||||||
|
|
||||||
|
- Response:
|
||||||
|
|
||||||
|
``` json
|
||||||
|
{
|
||||||
|
"urls": [
|
||||||
|
"http://example.com/media/a688346.jpg",
|
||||||
|
"http://example.com/media/fb1f4d.jpg"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
## `POST /api/pleroma/admin/media_proxy_caches/delete`
|
||||||
|
|
||||||
|
### Remove a banned MediaProxy URL from Cachex
|
||||||
|
|
||||||
|
- Authentication: required
|
||||||
|
- Params:
|
||||||
|
- `urls` (array)
|
||||||
|
|
||||||
|
- Response:
|
||||||
|
|
||||||
|
``` json
|
||||||
|
{
|
||||||
|
"urls": [
|
||||||
|
"http://example.com/media/a688346.jpg",
|
||||||
|
"http://example.com/media/fb1f4d.jpg"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
## `POST /api/pleroma/admin/media_proxy_caches/purge`
|
||||||
|
|
||||||
|
### Purge a MediaProxy URL
|
||||||
|
|
||||||
|
- Authentication: required
|
||||||
|
- Params:
|
||||||
|
- `urls` (array)
|
||||||
|
- `ban` (boolean)
|
||||||
|
|
||||||
|
- Response:
|
||||||
|
|
||||||
|
``` json
|
||||||
|
{
|
||||||
|
"urls": [
|
||||||
|
"http://example.com/media/a688346.jpg",
|
||||||
|
"http://example.com/media/fb1f4d.jpg"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
|
@ -0,0 +1,248 @@
|
||||||
|
# Chats
|
||||||
|
|
||||||
|
Chats are a way to represent an IM-style conversation between two actors. They are not the same as direct messages and they are not `Status`es, even though they have a lot in common.
|
||||||
|
|
||||||
|
## Why Chats?
|
||||||
|
|
||||||
|
There are no 'visibility levels' in ActivityPub, their definition is purely a Mastodon convention. Direct Messaging between users on the fediverse has mostly been modeled by using ActivityPub addressing following Mastodon conventions on normal `Note` objects. In this case, a 'direct message' would be a message that has no followers addressed and also does not address the special public actor, but just the recipients in the `to` field. It would still be a `Note` and is presented with other `Note`s as a `Status` in the API.
|
||||||
|
|
||||||
|
This is an awkward setup for a few reasons:
|
||||||
|
|
||||||
|
- As DMs generally still follow the usual `Status` conventions, it is easy to accidentally pull somebody into a DM thread by mentioning them. (e.g. "I hate @badguy so much")
|
||||||
|
- It is possible to go from a publicly addressed `Status` to a DM reply, back to public, then to a 'followers only' reply, and so on. This can be become very confusing, as it is unclear which user can see which part of the conversation.
|
||||||
|
- The standard `Status` format of implicit addressing also leads to rather ugly results if you try to display the messages as a chat, because all the recipients are always mentioned by name in the message.
|
||||||
|
- As direct messages are posted with the same api call (and usually same frontend component) as public messages, accidentally making a public message private or vice versa can happen easily. Client bugs can also lead to this, accidentally making private messages public.
|
||||||
|
|
||||||
|
As a measure to improve this situation, the `Conversation` concept and related Pleroma extensions were introduced. While it made it possible to work around a few of the issues, many of the problems remained and it didn't see much adoption because it was too complicated to use correctly.
|
||||||
|
|
||||||
|
## Chats explained
|
||||||
|
For this reasons, Chats are a new and different entity, both in the API as well as in ActivityPub. A quick overview:
|
||||||
|
|
||||||
|
- Chats are meant to represent an instant message conversation between two actors. For now these are only 1-on-1 conversations, but the other actor can be a group in the future.
|
||||||
|
- Chat messages have the ActivityPub type `ChatMessage`. They are not `Note`s. Servers that don't understand them will just drop them.
|
||||||
|
- The only addressing allowed in `ChatMessage`s is one single ActivityPub actor in the `to` field.
|
||||||
|
- There's always only one Chat between two actors. If you start chatting with someone and later start a 'new' Chat, the old Chat will be continued.
|
||||||
|
- `ChatMessage`s are posted with a different api, making it very hard to accidentally send a message to the wrong person.
|
||||||
|
- `ChatMessage`s don't show up in the existing timelines.
|
||||||
|
- Chats can never go from private to public. They are always private between the two actors.
|
||||||
|
|
||||||
|
## Caveats
|
||||||
|
|
||||||
|
- Chats are NOT E2E encrypted (yet). Security is still the same as email.
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
In general, the way to send a `ChatMessage` is to first create a `Chat`, then post a message to that `Chat`. `Group`s will later be supported by making them a sub-type of `Account`.
|
||||||
|
|
||||||
|
This is the overview of using the API. The API is also documented via OpenAPI, so you can view it and play with it by pointing SwaggerUI or a similar OpenAPI tool to `https://yourinstance.tld/api/openapi`.
|
||||||
|
|
||||||
|
### Creating or getting a chat.
|
||||||
|
|
||||||
|
To create or get an existing Chat for a certain recipient (identified by Account ID)
|
||||||
|
you can call:
|
||||||
|
|
||||||
|
`POST /api/v1/pleroma/chats/by-account-id/:account_id`
|
||||||
|
|
||||||
|
The account id is the normal FlakeId of the user
|
||||||
|
```
|
||||||
|
POST /api/v1/pleroma/chats/by-account-id/someflakeid
|
||||||
|
```
|
||||||
|
|
||||||
|
If you already have the id of a chat, you can also use
|
||||||
|
|
||||||
|
```
|
||||||
|
GET /api/v1/pleroma/chats/:id
|
||||||
|
```
|
||||||
|
|
||||||
|
There will only ever be ONE Chat for you and a given recipient, so this call
|
||||||
|
will return the same Chat if you already have one with that user.
|
||||||
|
|
||||||
|
Returned data:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"account": {
|
||||||
|
"id": "someflakeid",
|
||||||
|
"username": "somenick",
|
||||||
|
...
|
||||||
|
},
|
||||||
|
"id" : "1",
|
||||||
|
"unread" : 2,
|
||||||
|
"last_message" : {...}, // The last message in that chat
|
||||||
|
"updated_at": "2020-04-21T15:11:46.000Z"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Marking a chat as read
|
||||||
|
|
||||||
|
To mark a number of messages in a chat up to a certain message as read, you can use
|
||||||
|
|
||||||
|
`POST /api/v1/pleroma/chats/:id/read`
|
||||||
|
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
- last_read_id: Given this id, all chat messages until this one will be marked as read. Required.
|
||||||
|
|
||||||
|
|
||||||
|
Returned data:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"account": {
|
||||||
|
"id": "someflakeid",
|
||||||
|
"username": "somenick",
|
||||||
|
...
|
||||||
|
},
|
||||||
|
"id" : "1",
|
||||||
|
"unread" : 0,
|
||||||
|
"updated_at": "2020-04-21T15:11:46.000Z"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Marking a single chat message as read
|
||||||
|
|
||||||
|
To set the `unread` property of a message to `false`
|
||||||
|
|
||||||
|
`POST /api/v1/pleroma/chats/:id/messages/:message_id/read`
|
||||||
|
|
||||||
|
Returned data:
|
||||||
|
|
||||||
|
The modified chat message
|
||||||
|
|
||||||
|
### Getting a list of Chats
|
||||||
|
|
||||||
|
`GET /api/v1/pleroma/chats`
|
||||||
|
|
||||||
|
This will return a list of chats that you have been involved in, sorted by their
|
||||||
|
last update (so new chats will be at the top).
|
||||||
|
|
||||||
|
Returned data:
|
||||||
|
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"account": {
|
||||||
|
"id": "someflakeid",
|
||||||
|
"username": "somenick",
|
||||||
|
...
|
||||||
|
},
|
||||||
|
"id" : "1",
|
||||||
|
"unread" : 2,
|
||||||
|
"last_message" : {...}, // The last message in that chat
|
||||||
|
"updated_at": "2020-04-21T15:11:46.000Z"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
The recipient of messages that are sent to this chat is given by their AP ID.
|
||||||
|
No pagination is implemented for now.
|
||||||
|
|
||||||
|
### Getting the messages for a Chat
|
||||||
|
|
||||||
|
For a given Chat id, you can get the associated messages with
|
||||||
|
|
||||||
|
`GET /api/v1/pleroma/chats/:id/messages`
|
||||||
|
|
||||||
|
This will return all messages, sorted by most recent to least recent. The usual
|
||||||
|
pagination options are implemented.
|
||||||
|
|
||||||
|
Returned data:
|
||||||
|
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"account_id": "someflakeid",
|
||||||
|
"chat_id": "1",
|
||||||
|
"content": "Check this out :firefox:",
|
||||||
|
"created_at": "2020-04-21T15:11:46.000Z",
|
||||||
|
"emojis": [
|
||||||
|
{
|
||||||
|
"shortcode": "firefox",
|
||||||
|
"static_url": "https://dontbulling.me/emoji/Firefox.gif",
|
||||||
|
"url": "https://dontbulling.me/emoji/Firefox.gif",
|
||||||
|
"visible_in_picker": false
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"id": "13",
|
||||||
|
"unread": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"account_id": "someflakeid",
|
||||||
|
"chat_id": "1",
|
||||||
|
"content": "Whats' up?",
|
||||||
|
"created_at": "2020-04-21T15:06:45.000Z",
|
||||||
|
"emojis": [],
|
||||||
|
"id": "12",
|
||||||
|
"unread": false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Posting a chat message
|
||||||
|
|
||||||
|
Posting a chat message for given Chat id works like this:
|
||||||
|
|
||||||
|
`POST /api/v1/pleroma/chats/:id/messages`
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
- content: The text content of the message. Optional if media is attached.
|
||||||
|
- media_id: The id of an upload that will be attached to the message.
|
||||||
|
|
||||||
|
Currently, no formatting beyond basic escaping and emoji is implemented.
|
||||||
|
|
||||||
|
Returned data:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"account_id": "someflakeid",
|
||||||
|
"chat_id": "1",
|
||||||
|
"content": "Check this out :firefox:",
|
||||||
|
"created_at": "2020-04-21T15:11:46.000Z",
|
||||||
|
"emojis": [
|
||||||
|
{
|
||||||
|
"shortcode": "firefox",
|
||||||
|
"static_url": "https://dontbulling.me/emoji/Firefox.gif",
|
||||||
|
"url": "https://dontbulling.me/emoji/Firefox.gif",
|
||||||
|
"visible_in_picker": false
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"id": "13",
|
||||||
|
"unread": false
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Deleting a chat message
|
||||||
|
|
||||||
|
Deleting a chat message for given Chat id works like this:
|
||||||
|
|
||||||
|
`DELETE /api/v1/pleroma/chats/:chat_id/messages/:message_id`
|
||||||
|
|
||||||
|
Returned data is the deleted message.
|
||||||
|
|
||||||
|
### Notifications
|
||||||
|
|
||||||
|
There's a new `pleroma:chat_mention` notification, which has this form. It is not given out in the notifications endpoint by default, you need to explicitly request it with `include_types[]=pleroma:chat_mention`:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"id": "someid",
|
||||||
|
"type": "pleroma:chat_mention",
|
||||||
|
"account": { ... } // User account of the sender,
|
||||||
|
"chat_message": {
|
||||||
|
"chat_id": "1",
|
||||||
|
"id": "10",
|
||||||
|
"content": "Hello",
|
||||||
|
"account_id": "someflakeid",
|
||||||
|
"unread": false
|
||||||
|
},
|
||||||
|
"created_at": "somedate"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Streaming
|
||||||
|
|
||||||
|
There is an additional `user:pleroma_chat` stream. Incoming chat messages will make the current chat be sent to this `user` stream. The `event` of an incoming chat message is `pleroma:chat_update`. The payload is the updated chat with the incoming chat message in the `last_message` field.
|
||||||
|
|
||||||
|
### Web Push
|
||||||
|
|
||||||
|
If you want to receive push messages for this type, you'll need to add the `pleroma:chat_mention` type to your alerts in the push subscription.
|
|
@ -6,10 +6,6 @@ A Pleroma instance can be identified by "<Mastodon version> (compatible; Pleroma
|
||||||
|
|
||||||
Pleroma uses 128-bit ids as opposed to Mastodon's 64 bits. However just like Mastodon's ids they are lexically sortable strings
|
Pleroma uses 128-bit ids as opposed to Mastodon's 64 bits. However just like Mastodon's ids they are lexically sortable strings
|
||||||
|
|
||||||
## Attachment cap
|
|
||||||
|
|
||||||
Some apps operate under the assumption that no more than 4 attachments can be returned or uploaded. Pleroma however does not enforce any limits on attachment count neither when returning the status object nor when posting.
|
|
||||||
|
|
||||||
## Timelines
|
## Timelines
|
||||||
|
|
||||||
Adding the parameter `with_muted=true` to the timeline queries will also return activities by muted (not by blocked!) users.
|
Adding the parameter `with_muted=true` to the timeline queries will also return activities by muted (not by blocked!) users.
|
||||||
|
@ -31,13 +27,22 @@ Has these additional fields under the `pleroma` object:
|
||||||
- `expires_at`: a datetime (iso8601) that states when the post will expire (be deleted automatically), or empty if the post won't expire
|
- `expires_at`: a datetime (iso8601) that states when the post will expire (be deleted automatically), or empty if the post won't expire
|
||||||
- `thread_muted`: true if the thread the post belongs to is muted
|
- `thread_muted`: true if the thread the post belongs to is muted
|
||||||
- `emoji_reactions`: A list with emoji / reaction maps. The format is `{name: "☕", count: 1, me: true}`. Contains no information about the reacting users, for that use the `/statuses/:id/reactions` endpoint.
|
- `emoji_reactions`: A list with emoji / reaction maps. The format is `{name: "☕", count: 1, me: true}`. Contains no information about the reacting users, for that use the `/statuses/:id/reactions` endpoint.
|
||||||
|
- `parent_visible`: If the parent of this post is visible to the user or not.
|
||||||
|
|
||||||
## Attachments
|
## Media Attachments
|
||||||
|
|
||||||
Has these additional fields under the `pleroma` object:
|
Has these additional fields under the `pleroma` object:
|
||||||
|
|
||||||
- `mime_type`: mime type of the attachment.
|
- `mime_type`: mime type of the attachment.
|
||||||
|
|
||||||
|
### Attachment cap
|
||||||
|
|
||||||
|
Some apps operate under the assumption that no more than 4 attachments can be returned or uploaded. Pleroma however does not enforce any limits on attachment count neither when returning the status object nor when posting.
|
||||||
|
|
||||||
|
### Limitations
|
||||||
|
|
||||||
|
Pleroma does not process remote images and therefore cannot include fields such as `meta` and `blurhash`. It does not support focal points or aspect ratios. The frontend is expected to handle it.
|
||||||
|
|
||||||
## Accounts
|
## Accounts
|
||||||
|
|
||||||
The `id` parameter can also be the `nickname` of the user. This only works in these endpoints, not the deeper nested ones for following etc.
|
The `id` parameter can also be the `nickname` of the user. This only works in these endpoints, not the deeper nested ones for following etc.
|
||||||
|
@ -47,11 +52,14 @@ The `id` parameter can also be the `nickname` of the user. This only works in th
|
||||||
|
|
||||||
Has these additional fields under the `pleroma` object:
|
Has these additional fields under the `pleroma` object:
|
||||||
|
|
||||||
|
- `ap_id`: nullable URL string, ActivityPub id of the user
|
||||||
|
- `background_image`: nullable URL string, background image of the user
|
||||||
- `tags`: Lists an array of tags for the user
|
- `tags`: Lists an array of tags for the user
|
||||||
- `relationship{}`: Includes fields as documented for Mastodon API https://docs.joinmastodon.org/entities/relationship/
|
- `relationship` (object): Includes fields as documented for Mastodon API https://docs.joinmastodon.org/entities/relationship/
|
||||||
- `is_moderator`: boolean, nullable, true if user is a moderator
|
- `is_moderator`: boolean, nullable, true if user is a moderator
|
||||||
- `is_admin`: boolean, nullable, true if user is an admin
|
- `is_admin`: boolean, nullable, true if user is an admin
|
||||||
- `confirmation_pending`: boolean, true if a new user account is waiting on email confirmation to be activated
|
- `confirmation_pending`: boolean, true if a new user account is waiting on email confirmation to be activated
|
||||||
|
- `hide_favorites`: boolean, true when the user has hiding favorites enabled
|
||||||
- `hide_followers`: boolean, true when the user has follower hiding enabled
|
- `hide_followers`: boolean, true when the user has follower hiding enabled
|
||||||
- `hide_follows`: boolean, true when the user has follow hiding enabled
|
- `hide_follows`: boolean, true when the user has follow hiding enabled
|
||||||
- `hide_followers_count`: boolean, true when the user has follower stat hiding enabled
|
- `hide_followers_count`: boolean, true when the user has follower stat hiding enabled
|
||||||
|
@ -62,6 +70,7 @@ Has these additional fields under the `pleroma` object:
|
||||||
- `allow_following_move`: boolean, true when the user allows automatically follow moved following accounts
|
- `allow_following_move`: boolean, true when the user allows automatically follow moved following accounts
|
||||||
- `unread_conversation_count`: The count of unread conversations. Only returned to the account owner.
|
- `unread_conversation_count`: The count of unread conversations. Only returned to the account owner.
|
||||||
- `unread_notifications_count`: The count of unread notifications. Only returned to the account owner.
|
- `unread_notifications_count`: The count of unread notifications. Only returned to the account owner.
|
||||||
|
- `notification_settings`: object, can be absent. See `/api/pleroma/notification_settings` for the parameters/keys returned.
|
||||||
|
|
||||||
### Source
|
### Source
|
||||||
|
|
||||||
|
@ -216,6 +225,7 @@ Has theses additional parameters (which are the same as in Pleroma-API):
|
||||||
- `avatar_upload_limit`: The same for avatars
|
- `avatar_upload_limit`: The same for avatars
|
||||||
- `background_upload_limit`: The same for backgrounds
|
- `background_upload_limit`: The same for backgrounds
|
||||||
- `banner_upload_limit`: The same for banners
|
- `banner_upload_limit`: The same for banners
|
||||||
|
- `background_image`: A background image that frontends can use
|
||||||
- `pleroma.metadata.features`: A list of supported features
|
- `pleroma.metadata.features`: A list of supported features
|
||||||
- `pleroma.metadata.federation`: The federation restrictions of this instance
|
- `pleroma.metadata.federation`: The federation restrictions of this instance
|
||||||
- `vapid_public_key`: The public key needed for push messages
|
- `vapid_public_key`: The public key needed for push messages
|
||||||
|
@ -225,3 +235,47 @@ Has theses additional parameters (which are the same as in Pleroma-API):
|
||||||
Has these additional fields under the `pleroma` object:
|
Has these additional fields under the `pleroma` object:
|
||||||
|
|
||||||
- `unread_count`: contains number unread notifications
|
- `unread_count`: contains number unread notifications
|
||||||
|
|
||||||
|
## Streaming
|
||||||
|
|
||||||
|
There is an additional `user:pleroma_chat` stream. Incoming chat messages will make the current chat be sent to this `user` stream. The `event` of an incoming chat message is `pleroma:chat_update`. The payload is the updated chat with the incoming chat message in the `last_message` field.
|
||||||
|
|
||||||
|
## Not implemented
|
||||||
|
|
||||||
|
Pleroma is generally compatible with the Mastodon 2.7.2 API, but some newer features and non-essential features are omitted. These features usually return an HTTP 200 status code, but with an empty response. While they may be added in the future, they are considered low priority.
|
||||||
|
|
||||||
|
### Suggestions
|
||||||
|
|
||||||
|
*Added in Mastodon 2.4.3*
|
||||||
|
|
||||||
|
- `GET /api/v1/suggestions`: Returns an empty array, `[]`
|
||||||
|
|
||||||
|
### Trends
|
||||||
|
|
||||||
|
*Added in Mastodon 3.0.0*
|
||||||
|
|
||||||
|
- `GET /api/v1/trends`: Returns an empty array, `[]`
|
||||||
|
|
||||||
|
### Identity proofs
|
||||||
|
|
||||||
|
*Added in Mastodon 2.8.0*
|
||||||
|
|
||||||
|
- `GET /api/v1/identity_proofs`: Returns an empty array, `[]`
|
||||||
|
|
||||||
|
### Endorsements
|
||||||
|
|
||||||
|
*Added in Mastodon 2.5.0*
|
||||||
|
|
||||||
|
- `GET /api/v1/endorsements`: Returns an empty array, `[]`
|
||||||
|
|
||||||
|
### Profile directory
|
||||||
|
|
||||||
|
*Added in Mastodon 3.0.0*
|
||||||
|
|
||||||
|
- `GET /api/v1/directory`: Returns HTTP 404
|
||||||
|
|
||||||
|
### Featured tags
|
||||||
|
|
||||||
|
*Added in Mastodon 3.0.0*
|
||||||
|
|
||||||
|
- `GET /api/v1/featured_tags`: Returns HTTP 404
|
||||||
|
|
|
@ -265,7 +265,7 @@ See [Admin-API](admin_api.md)
|
||||||
* Method `PUT`
|
* Method `PUT`
|
||||||
* Authentication: required
|
* Authentication: required
|
||||||
* Params:
|
* Params:
|
||||||
* `image`: Multipart image
|
* `file`: Multipart image
|
||||||
* Response: JSON. Returns a mastodon media attachment entity
|
* Response: JSON. Returns a mastodon media attachment entity
|
||||||
when successful, otherwise returns HTTP 415 `{"error": "error_msg"}`
|
when successful, otherwise returns HTTP 415 `{"error": "error_msg"}`
|
||||||
* Example response:
|
* Example response:
|
||||||
|
@ -358,7 +358,7 @@ The status posting endpoint takes an additional parameter, `in_reply_to_conversa
|
||||||
* `recipients`: A list of ids of users that should receive posts to this conversation. This will replace the current list of recipients, so submit the full list. The owner of owner of the conversation will always be part of the set of recipients, though.
|
* `recipients`: A list of ids of users that should receive posts to this conversation. This will replace the current list of recipients, so submit the full list. The owner of owner of the conversation will always be part of the set of recipients, though.
|
||||||
* Response: JSON, statuses (200 - healthy, 503 unhealthy)
|
* Response: JSON, statuses (200 - healthy, 503 unhealthy)
|
||||||
|
|
||||||
## `GET /api/v1/pleroma/conversations/read`
|
## `POST /api/v1/pleroma/conversations/read`
|
||||||
### Marks all user's conversations as read.
|
### Marks all user's conversations as read.
|
||||||
* Method `POST`
|
* Method `POST`
|
||||||
* Authentication: required
|
* Authentication: required
|
||||||
|
@ -426,7 +426,7 @@ The status posting endpoint takes an additional parameter, `in_reply_to_conversa
|
||||||
* Authentication: required
|
* Authentication: required
|
||||||
* Params:
|
* Params:
|
||||||
* `file`: file needs to be uploaded with the multipart request or link to remote file.
|
* `file`: file needs to be uploaded with the multipart request or link to remote file.
|
||||||
* `shortcode`: (*optional*) shortcode for new emoji, must be uniq for all emoji. If not sended, shortcode will be taken from original filename.
|
* `shortcode`: (*optional*) shortcode for new emoji, must be unique for all emoji. If not sended, shortcode will be taken from original filename.
|
||||||
* `filename`: (*optional*) new emoji file name. If not specified will be taken from original filename.
|
* `filename`: (*optional*) new emoji file name. If not specified will be taken from original filename.
|
||||||
* Response: JSON, list of files for updated pack (hashmap -> shortcode => filename) with status 200, either error status with error message.
|
* Response: JSON, list of files for updated pack (hashmap -> shortcode => filename) with status 200, either error status with error message.
|
||||||
|
|
||||||
|
@ -450,18 +450,44 @@ The status posting endpoint takes an additional parameter, `in_reply_to_conversa
|
||||||
* Response: JSON, list with updated files for updated pack (hashmap -> shortcode => filename) with status 200, either error status with error message.
|
* Response: JSON, list with updated files for updated pack (hashmap -> shortcode => filename) with status 200, either error status with error message.
|
||||||
|
|
||||||
## `GET /api/pleroma/emoji/packs`
|
## `GET /api/pleroma/emoji/packs`
|
||||||
|
|
||||||
### Lists local custom emoji packs
|
### Lists local custom emoji packs
|
||||||
|
|
||||||
* Method `GET`
|
* Method `GET`
|
||||||
* Authentication: not required
|
* Authentication: not required
|
||||||
* Params: None
|
* Params:
|
||||||
* Response: JSON, "ok" and 200 status and the JSON hashmap of pack name to pack contents
|
* `page`: page number for packs (default 1)
|
||||||
|
* `page_size`: page size for packs (default 50)
|
||||||
|
* Response: `packs` key with JSON hashmap of pack name to pack contents and `count` key for count of packs.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"packs": {
|
||||||
|
"pack_name": {...}, // pack contents
|
||||||
|
...
|
||||||
|
},
|
||||||
|
"count": 0 // packs count
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
## `GET /api/pleroma/emoji/packs/:name`
|
## `GET /api/pleroma/emoji/packs/:name`
|
||||||
|
|
||||||
### Get pack.json for the pack
|
### Get pack.json for the pack
|
||||||
|
|
||||||
* Method `GET`
|
* Method `GET`
|
||||||
* Authentication: not required
|
* Authentication: not required
|
||||||
* Params: None
|
* Params:
|
||||||
* Response: JSON, pack json with `files` and `pack` keys with 200 status or 404 if the pack does not exist
|
* `page`: page number for files (default 1)
|
||||||
|
* `page_size`: page size for files (default 30)
|
||||||
|
* Response: JSON, pack json with `files`, `files_count` and `pack` keys with 200 status or 404 if the pack does not exist.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"files": {...},
|
||||||
|
"files_count": 0, // emoji count in pack
|
||||||
|
"pack": {...}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
## `GET /api/pleroma/emoji/packs/:name/archive`
|
## `GET /api/pleroma/emoji/packs/:name/archive`
|
||||||
### Requests a local pack archive from the instance
|
### Requests a local pack archive from the instance
|
||||||
|
@ -536,7 +562,7 @@ Emoji reactions work a lot like favourites do. They make it possible to react to
|
||||||
```
|
```
|
||||||
|
|
||||||
## `GET /api/v1/pleroma/statuses/:id/reactions/:emoji`
|
## `GET /api/v1/pleroma/statuses/:id/reactions/:emoji`
|
||||||
### Get an object of emoji to account mappings with accounts that reacted to the post for a specific emoji`
|
### Get an object of emoji to account mappings with accounts that reacted to the post for a specific emoji
|
||||||
* Method: `GET`
|
* Method: `GET`
|
||||||
* Authentication: optional
|
* Authentication: optional
|
||||||
* Params: None
|
* Params: None
|
||||||
|
|
|
@ -69,3 +69,32 @@ mix pleroma.database update_users_following_followers_counts
|
||||||
```sh tab="From Source"
|
```sh tab="From Source"
|
||||||
mix pleroma.database fix_likes_collections
|
mix pleroma.database fix_likes_collections
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Vacuum the database
|
||||||
|
|
||||||
|
### Analyze
|
||||||
|
|
||||||
|
Running an `analyze` vacuum job can improve performance by updating statistics used by the query planner. **It is safe to cancel this.**
|
||||||
|
|
||||||
|
```sh tab="OTP"
|
||||||
|
./bin/pleroma_ctl database vacuum analyze
|
||||||
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.database vacuum analyze
|
||||||
|
```
|
||||||
|
|
||||||
|
### Full
|
||||||
|
|
||||||
|
Running a `full` vacuum job rebuilds your entire database by reading all of the data and rewriting it into smaller
|
||||||
|
and more compact files with an optimized layout. This process will take a long time and use additional disk space as
|
||||||
|
it builds the files side-by-side the existing database files. It can make your database faster and use less disk space,
|
||||||
|
but should only be run if necessary. **It is safe to cancel this.**
|
||||||
|
|
||||||
|
```sh tab="OTP"
|
||||||
|
./bin/pleroma_ctl database vacuum full
|
||||||
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.database vacuum full
|
||||||
|
```
|
|
@ -44,3 +44,11 @@ Currently, only .zip archives are recognized as remote pack files and packs are
|
||||||
The manifest entry will either be written to a newly created `pack_name.json` file (pack name is asked in questions) or appended to the existing one, *replacing* the old pack with the same name if it was in the file previously.
|
The manifest entry will either be written to a newly created `pack_name.json` file (pack name is asked in questions) or appended to the existing one, *replacing* the old pack with the same name if it was in the file previously.
|
||||||
|
|
||||||
The file list will be written to the file specified previously, *replacing* that file. You _should_ check that the file list doesn't contain anything you don't need in the pack, that is, anything that is not an emoji (the whole pack is downloaded, but only emoji files are extracted).
|
The file list will be written to the file specified previously, *replacing* that file. You _should_ check that the file list doesn't contain anything you don't need in the pack, that is, anything that is not an emoji (the whole pack is downloaded, but only emoji files are extracted).
|
||||||
|
|
||||||
|
## Reload emoji packs
|
||||||
|
|
||||||
|
```sh tab="OTP"
|
||||||
|
./bin/pleroma_ctl emoji reload
|
||||||
|
```
|
||||||
|
|
||||||
|
This command only works with OTP releases.
|
||||||
|
|
|
@ -105,23 +105,23 @@ mix pleroma.user toggle_activated <nickname>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## Unsubscribe local users from a user and deactivate the user
|
## Deactivate a user and unsubscribes local users from the user
|
||||||
```sh tab="OTP"
|
```sh tab="OTP"
|
||||||
./bin/pleroma_ctl user unsubscribe NICKNAME
|
./bin/pleroma_ctl user deactivate NICKNAME
|
||||||
```
|
```
|
||||||
|
|
||||||
```sh tab="From Source"
|
```sh tab="From Source"
|
||||||
mix pleroma.user unsubscribe NICKNAME
|
mix pleroma.user deactivate NICKNAME
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## Unsubscribe local users from an instance and deactivate all accounts on it
|
## Deactivate all accounts from an instance and unsubscribe local users on it
|
||||||
```sh tab="OTP"
|
```sh tab="OTP"
|
||||||
./bin/pleroma_ctl user unsubscribe_all_from_instance <instance>
|
./bin/pleroma_ctl user deactivate_all_from_instance <instance>
|
||||||
```
|
```
|
||||||
|
|
||||||
```sh tab="From Source"
|
```sh tab="From Source"
|
||||||
mix pleroma.user unsubscribe_all_from_instance <instance>
|
mix pleroma.user deactivate_all_from_instance <instance>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
@ -135,6 +135,16 @@ mix pleroma.user reset_password <nickname>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Disable Multi Factor Authentication (MFA/2FA) for a user
|
||||||
|
```sh tab="OTP"
|
||||||
|
./bin/pleroma_ctl user reset_mfa <nickname>
|
||||||
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.user reset_mfa <nickname>
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## Set the value of the given user's settings
|
## Set the value of the given user's settings
|
||||||
```sh tab="OTP"
|
```sh tab="OTP"
|
||||||
./bin/pleroma_ctl user set <nickname> [option ...]
|
./bin/pleroma_ctl user set <nickname> [option ...]
|
||||||
|
@ -177,4 +187,3 @@ mix pleroma.user untag <nickname> <tags>
|
||||||
```sh tab="From Source"
|
```sh tab="From Source"
|
||||||
mix pleroma.user toggle_confirmed <nickname>
|
mix pleroma.user toggle_confirmed <nickname>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,35 @@
|
||||||
|
# ChatMessages
|
||||||
|
|
||||||
|
ChatMessages are the messages sent in 1-on-1 chats. They are similar to
|
||||||
|
`Note`s, but the addresing is done by having a single AP actor in the `to`
|
||||||
|
field. Addressing multiple actors is not allowed. These messages are always
|
||||||
|
private, there is no public version of them. They are created with a `Create`
|
||||||
|
activity.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"actor": "http://2hu.gensokyo/users/raymoo",
|
||||||
|
"id": "http://2hu.gensokyo/objects/1",
|
||||||
|
"object": {
|
||||||
|
"attributedTo": "http://2hu.gensokyo/users/raymoo",
|
||||||
|
"content": "You expected a cute girl? Too bad.",
|
||||||
|
"id": "http://2hu.gensokyo/objects/2",
|
||||||
|
"published": "2020-02-12T14:08:20Z",
|
||||||
|
"to": [
|
||||||
|
"http://2hu.gensokyo/users/marisa"
|
||||||
|
],
|
||||||
|
"type": "ChatMessage"
|
||||||
|
},
|
||||||
|
"published": "2018-02-12T14:08:20Z",
|
||||||
|
"to": [
|
||||||
|
"http://2hu.gensokyo/users/marisa"
|
||||||
|
],
|
||||||
|
"type": "Create"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This setup does not prevent multi-user chats, but these will have to go through
|
||||||
|
a `Group`, which will be the recipient of the messages and then `Announce` them
|
||||||
|
to the users in the `Group`.
|
|
@ -42,6 +42,12 @@ Feel free to contact us to be added to this list!
|
||||||
- Platforms: SailfishOS
|
- Platforms: SailfishOS
|
||||||
- Features: No Streaming
|
- Features: No Streaming
|
||||||
|
|
||||||
|
### Husky
|
||||||
|
- Source code: <https://git.mentality.rip/FWGS/Husky>
|
||||||
|
- Contact: [@Husky@enigmatic.observer](https://enigmatic.observer/users/Husky)
|
||||||
|
- Platforms: Android
|
||||||
|
- Features: No Streaming, Emoji Reactions, Text Formatting, FE Stickers
|
||||||
|
|
||||||
### Nekonium
|
### Nekonium
|
||||||
- Homepage: [F-Droid Repository](https://repo.gdgd.jp.net/), [Google Play](https://play.google.com/store/apps/details?id=com.apps.nekonium), [Amazon](https://www.amazon.co.jp/dp/B076FXPRBC/)
|
- Homepage: [F-Droid Repository](https://repo.gdgd.jp.net/), [Google Play](https://play.google.com/store/apps/details?id=com.apps.nekonium), [Amazon](https://www.amazon.co.jp/dp/B076FXPRBC/)
|
||||||
- Source: <https://gogs.gdgd.jp.net/lin/nekonium>
|
- Source: <https://gogs.gdgd.jp.net/lin/nekonium>
|
||||||
|
|
|
@ -36,30 +36,15 @@ To add configuration to your config file, you can copy it from the base config.
|
||||||
* `federation_incoming_replies_max_depth`: Max. depth of reply-to activities fetching on incoming federation, to prevent out-of-memory situations while fetching very long threads. If set to `nil`, threads of any depth will be fetched. Lower this value if you experience out-of-memory crashes.
|
* `federation_incoming_replies_max_depth`: Max. depth of reply-to activities fetching on incoming federation, to prevent out-of-memory situations while fetching very long threads. If set to `nil`, threads of any depth will be fetched. Lower this value if you experience out-of-memory crashes.
|
||||||
* `federation_reachability_timeout_days`: Timeout (in days) of each external federation target being unreachable prior to pausing federating to it.
|
* `federation_reachability_timeout_days`: Timeout (in days) of each external federation target being unreachable prior to pausing federating to it.
|
||||||
* `allow_relay`: Enable Pleroma’s Relay, which makes it possible to follow a whole instance.
|
* `allow_relay`: Enable Pleroma’s Relay, which makes it possible to follow a whole instance.
|
||||||
* `rewrite_policy`: Message Rewrite Policy, either one or a list. Here are the ones available by default:
|
* `public`: Makes the client API in authenticated mode-only except for user-profiles. Useful for disabling the Local Timeline and The Whole Known Network.
|
||||||
* `Pleroma.Web.ActivityPub.MRF.NoOpPolicy`: Doesn’t modify activities (default).
|
|
||||||
* `Pleroma.Web.ActivityPub.MRF.DropPolicy`: Drops all activities. It generally doesn’t makes sense to use in production.
|
|
||||||
* `Pleroma.Web.ActivityPub.MRF.SimplePolicy`: Restrict the visibility of activities from certains instances (See [`:mrf_simple`](#mrf_simple)).
|
|
||||||
* `Pleroma.Web.ActivityPub.MRF.TagPolicy`: Applies policies to individual users based on tags, which can be set using pleroma-fe/admin-fe/any other app that supports Pleroma Admin API. For example it allows marking posts from individual users nsfw (sensitive).
|
|
||||||
* `Pleroma.Web.ActivityPub.MRF.SubchainPolicy`: Selectively runs other MRF policies when messages match (See [`:mrf_subchain`](#mrf_subchain)).
|
|
||||||
* `Pleroma.Web.ActivityPub.MRF.RejectNonPublic`: Drops posts with non-public visibility settings (See [`:mrf_rejectnonpublic`](#mrf_rejectnonpublic)).
|
|
||||||
* `Pleroma.Web.ActivityPub.MRF.EnsureRePrepended`: Rewrites posts to ensure that replies to posts with subjects do not have an identical subject and instead begin with re:.
|
|
||||||
* `Pleroma.Web.ActivityPub.MRF.AntiLinkSpamPolicy`: Rejects posts from likely spambots by rejecting posts from new users that contain links.
|
|
||||||
* `Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy`: Crawls attachments using their MediaProxy URLs so that the MediaProxy cache is primed.
|
|
||||||
* `Pleroma.Web.ActivityPub.MRF.MentionPolicy`: Drops posts mentioning configurable users. (See [`:mrf_mention`](#mrf_mention)).
|
|
||||||
* `Pleroma.Web.ActivityPub.MRF.VocabularyPolicy`: Restricts activities to a configured set of vocabulary. (See [`:mrf_vocabulary`](#mrf_vocabulary)).
|
|
||||||
* `Pleroma.Web.ActivityPub.MRF.ObjectAgePolicy`: Rejects or delists posts based on their age when received. (See [`:mrf_object_age`](#mrf_object_age)).
|
|
||||||
* `public`: Makes the client API in authentificated mode-only except for user-profiles. Useful for disabling the Local Timeline and The Whole Known Network.
|
|
||||||
* `quarantined_instances`: List of ActivityPub instances where private(DMs, followers-only) activities will not be send.
|
* `quarantined_instances`: List of ActivityPub instances where private(DMs, followers-only) activities will not be send.
|
||||||
* `managed_config`: Whenether the config for pleroma-fe is configured in [:frontend_configurations](#frontend_configurations) or in ``static/config.json``.
|
* `managed_config`: Whenether the config for pleroma-fe is configured in [:frontend_configurations](#frontend_configurations) or in ``static/config.json``.
|
||||||
* `allowed_post_formats`: MIME-type list of formats allowed to be posted (transformed into HTML).
|
* `allowed_post_formats`: MIME-type list of formats allowed to be posted (transformed into HTML).
|
||||||
* `mrf_transparency`: Make the content of your Message Rewrite Facility settings public (via nodeinfo).
|
|
||||||
* `mrf_transparency_exclusions`: Exclude specific instance names from MRF transparency. The use of the exclusions feature will be disclosed in nodeinfo as a boolean value.
|
|
||||||
* `extended_nickname_format`: Set to `true` to use extended local nicknames format (allows underscores/dashes). This will break federation with
|
* `extended_nickname_format`: Set to `true` to use extended local nicknames format (allows underscores/dashes). This will break federation with
|
||||||
older software for theses nicknames.
|
older software for theses nicknames.
|
||||||
* `max_pinned_statuses`: The maximum number of pinned statuses. `0` will disable the feature.
|
* `max_pinned_statuses`: The maximum number of pinned statuses. `0` will disable the feature.
|
||||||
* `autofollowed_nicknames`: Set to nicknames of (local) users that every new user should automatically follow.
|
* `autofollowed_nicknames`: Set to nicknames of (local) users that every new user should automatically follow.
|
||||||
* `no_attachment_links`: Set to true to disable automatically adding attachment link text to statuses.
|
* `attachment_links`: Set to true to enable automatically adding attachment link text to statuses.
|
||||||
* `welcome_message`: A message that will be send to a newly registered users as a direct message.
|
* `welcome_message`: A message that will be send to a newly registered users as a direct message.
|
||||||
* `welcome_user_nickname`: The nickname of the local user that sends the welcome message.
|
* `welcome_user_nickname`: The nickname of the local user that sends the welcome message.
|
||||||
* `max_report_comment_size`: The maximum size of the report comment (Default: `1000`).
|
* `max_report_comment_size`: The maximum size of the report comment (Default: `1000`).
|
||||||
|
@ -77,11 +62,30 @@ To add configuration to your config file, you can copy it from the base config.
|
||||||
* `external_user_synchronization`: Enabling following/followers counters synchronization for external users.
|
* `external_user_synchronization`: Enabling following/followers counters synchronization for external users.
|
||||||
* `cleanup_attachments`: Remove attachments along with statuses. Does not affect duplicate files and attachments without status. Enabling this will increase load to database when deleting statuses on larger instances.
|
* `cleanup_attachments`: Remove attachments along with statuses. Does not affect duplicate files and attachments without status. Enabling this will increase load to database when deleting statuses on larger instances.
|
||||||
|
|
||||||
|
## Message rewrite facility
|
||||||
|
|
||||||
|
### :mrf
|
||||||
|
* `policies`: Message Rewrite Policy, either one or a list. Here are the ones available by default:
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.NoOpPolicy`: Doesn’t modify activities (default).
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.DropPolicy`: Drops all activities. It generally doesn’t makes sense to use in production.
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.SimplePolicy`: Restrict the visibility of activities from certains instances (See [`:mrf_simple`](#mrf_simple)).
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.TagPolicy`: Applies policies to individual users based on tags, which can be set using pleroma-fe/admin-fe/any other app that supports Pleroma Admin API. For example it allows marking posts from individual users nsfw (sensitive).
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.SubchainPolicy`: Selectively runs other MRF policies when messages match (See [`:mrf_subchain`](#mrf_subchain)).
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.RejectNonPublic`: Drops posts with non-public visibility settings (See [`:mrf_rejectnonpublic`](#mrf_rejectnonpublic)).
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.EnsureRePrepended`: Rewrites posts to ensure that replies to posts with subjects do not have an identical subject and instead begin with re:.
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.AntiLinkSpamPolicy`: Rejects posts from likely spambots by rejecting posts from new users that contain links.
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy`: Crawls attachments using their MediaProxy URLs so that the MediaProxy cache is primed.
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.MentionPolicy`: Drops posts mentioning configurable users. (See [`:mrf_mention`](#mrf_mention)).
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.VocabularyPolicy`: Restricts activities to a configured set of vocabulary. (See [`:mrf_vocabulary`](#mrf_vocabulary)).
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.ObjectAgePolicy`: Rejects or delists posts based on their age when received. (See [`:mrf_object_age`](#mrf_object_age)).
|
||||||
|
* `transparency`: Make the content of your Message Rewrite Facility settings public (via nodeinfo).
|
||||||
|
* `transparency_exclusions`: Exclude specific instance names from MRF transparency. The use of the exclusions feature will be disclosed in nodeinfo as a boolean value.
|
||||||
|
|
||||||
## Federation
|
## Federation
|
||||||
### MRF policies
|
### MRF policies
|
||||||
|
|
||||||
!!! note
|
!!! note
|
||||||
Configuring MRF policies is not enough for them to take effect. You have to enable them by specifying their module in `rewrite_policy` under [:instance](#instance) section.
|
Configuring MRF policies is not enough for them to take effect. You have to enable them by specifying their module in `policies` under [:mrf](#mrf) section.
|
||||||
|
|
||||||
#### :mrf_simple
|
#### :mrf_simple
|
||||||
* `media_removal`: List of instances to remove media from.
|
* `media_removal`: List of instances to remove media from.
|
||||||
|
@ -137,8 +141,9 @@ their ActivityPub ID.
|
||||||
An example:
|
An example:
|
||||||
|
|
||||||
```elixir
|
```elixir
|
||||||
config :pleroma, :mrf_user_allowlist,
|
config :pleroma, :mrf_user_allowlist, %{
|
||||||
"example.org": ["https://example.org/users/admin"]
|
"example.org" => ["https://example.org/users/admin"]
|
||||||
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
#### :mrf_object_age
|
#### :mrf_object_age
|
||||||
|
@ -149,6 +154,15 @@ config :pleroma, :mrf_user_allowlist,
|
||||||
* `:strip_followers` removes followers from the ActivityPub recipient list, ensuring they won't be delivered to home timelines
|
* `:strip_followers` removes followers from the ActivityPub recipient list, ensuring they won't be delivered to home timelines
|
||||||
* `:reject` rejects the message entirely
|
* `:reject` rejects the message entirely
|
||||||
|
|
||||||
|
#### mrf_steal_emoji
|
||||||
|
* `hosts`: List of hosts to steal emojis from
|
||||||
|
* `rejected_shortcodes`: Regex-list of shortcodes to reject
|
||||||
|
* `size_limit`: File size limit (in bytes), checked before an emoji is saved to the disk
|
||||||
|
|
||||||
|
#### :mrf_activity_expiration
|
||||||
|
|
||||||
|
* `days`: Default global expiration time for all local Create activities (in days)
|
||||||
|
|
||||||
### :activitypub
|
### :activitypub
|
||||||
* `unfollow_blocked`: Whether blocks result in people getting unfollowed
|
* `unfollow_blocked`: Whether blocks result in people getting unfollowed
|
||||||
* `outgoing_blocks`: Whether to federate blocks to other instances
|
* `outgoing_blocks`: Whether to federate blocks to other instances
|
||||||
|
@ -249,6 +263,40 @@ This section describe PWA manifest instance-specific values. Currently this opti
|
||||||
* `base_url`: The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host/CDN fronts.
|
* `base_url`: The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host/CDN fronts.
|
||||||
* `proxy_opts`: All options defined in `Pleroma.ReverseProxy` documentation, defaults to `[max_body_length: (25*1_048_576)]`.
|
* `proxy_opts`: All options defined in `Pleroma.ReverseProxy` documentation, defaults to `[max_body_length: (25*1_048_576)]`.
|
||||||
* `whitelist`: List of domains to bypass the mediaproxy
|
* `whitelist`: List of domains to bypass the mediaproxy
|
||||||
|
* `invalidation`: options for remove media from cache after delete object:
|
||||||
|
* `enabled`: Enables purge cache
|
||||||
|
* `provider`: Which one of the [purge cache strategy](#purge-cache-strategy) to use.
|
||||||
|
|
||||||
|
### Purge cache strategy
|
||||||
|
|
||||||
|
#### Pleroma.Web.MediaProxy.Invalidation.Script
|
||||||
|
|
||||||
|
This strategy allow perform external shell script to purge cache.
|
||||||
|
Urls of attachments pass to script as arguments.
|
||||||
|
|
||||||
|
* `script_path`: path to external script.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```elixir
|
||||||
|
config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Script,
|
||||||
|
script_path: "./installation/nginx-cache-purge.example"
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Pleroma.Web.MediaProxy.Invalidation.Http
|
||||||
|
|
||||||
|
This strategy allow perform custom http request to purge cache.
|
||||||
|
|
||||||
|
* `method`: http method. default is `purge`
|
||||||
|
* `headers`: http headers.
|
||||||
|
* `options`: request options.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```elixir
|
||||||
|
config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Http,
|
||||||
|
method: :purge,
|
||||||
|
headers: [],
|
||||||
|
options: []
|
||||||
|
```
|
||||||
|
|
||||||
## Link previews
|
## Link previews
|
||||||
|
|
||||||
|
@ -459,6 +507,7 @@ the source code is here: https://github.com/koto-bank/kocaptcha. The default end
|
||||||
* `base_url`: The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host.
|
* `base_url`: The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host.
|
||||||
* `proxy_remote`: If you're using a remote uploader, Pleroma will proxy media requests instead of redirecting to it.
|
* `proxy_remote`: If you're using a remote uploader, Pleroma will proxy media requests instead of redirecting to it.
|
||||||
* `proxy_opts`: Proxy options, see `Pleroma.ReverseProxy` documentation.
|
* `proxy_opts`: Proxy options, see `Pleroma.ReverseProxy` documentation.
|
||||||
|
* `filename_display_max_length`: Set max length of a filename to display. 0 = no limit. Default: 30.
|
||||||
|
|
||||||
!!! warning
|
!!! warning
|
||||||
`strip_exif` has been replaced by `Pleroma.Upload.Filter.Mogrify`.
|
`strip_exif` has been replaced by `Pleroma.Upload.Filter.Mogrify`.
|
||||||
|
@ -619,24 +668,6 @@ config :pleroma, :workers,
|
||||||
* `enabled: false` corresponds to `config :pleroma, :workers, retries: [federator_outgoing: 1]`
|
* `enabled: false` corresponds to `config :pleroma, :workers, retries: [federator_outgoing: 1]`
|
||||||
* deprecated options: `max_jobs`, `initial_timeout`
|
* deprecated options: `max_jobs`, `initial_timeout`
|
||||||
|
|
||||||
### Pleroma.Scheduler
|
|
||||||
|
|
||||||
Configuration for [Quantum](https://github.com/quantum-elixir/quantum-core) jobs scheduler.
|
|
||||||
|
|
||||||
See [Quantum readme](https://github.com/quantum-elixir/quantum-core#usage) for the list of supported options.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
```elixir
|
|
||||||
config :pleroma, Pleroma.Scheduler,
|
|
||||||
global: true,
|
|
||||||
overlap: true,
|
|
||||||
timezone: :utc,
|
|
||||||
jobs: [{"0 */6 * * * *", {Pleroma.Web.Websub, :refresh_subscriptions, []}}]
|
|
||||||
```
|
|
||||||
|
|
||||||
The above example defines a single job which invokes `Pleroma.Web.Websub.refresh_subscriptions()` every 6 hours ("0 */6 * * * *", [crontab format](https://en.wikipedia.org/wiki/Cron)).
|
|
||||||
|
|
||||||
## :web_push_encryption, :vapid_details
|
## :web_push_encryption, :vapid_details
|
||||||
|
|
||||||
Web Push Notifications configuration. You can use the mix task `mix web_push.gen.keypair` to generate it.
|
Web Push Notifications configuration. You can use the mix task `mix web_push.gen.keypair` to generate it.
|
||||||
|
@ -941,13 +972,13 @@ config :pleroma, :database_config_whitelist, [
|
||||||
|
|
||||||
Restrict access for unauthenticated users to timelines (public and federate), user profiles and statuses.
|
Restrict access for unauthenticated users to timelines (public and federate), user profiles and statuses.
|
||||||
|
|
||||||
* `timelines` - public and federated timelines
|
* `timelines`: public and federated timelines
|
||||||
* `local` - public timeline
|
* `local`: public timeline
|
||||||
* `federated`
|
* `federated`
|
||||||
* `profiles` - user profiles
|
* `profiles`: user profiles
|
||||||
* `local`
|
* `local`
|
||||||
* `remote`
|
* `remote`
|
||||||
* `activities` - statuses
|
* `activities`: statuses
|
||||||
* `local`
|
* `local`
|
||||||
* `remote`
|
* `remote`
|
||||||
|
|
||||||
|
|
|
@ -60,7 +60,7 @@ Example of `my-awesome-theme.json` where we add the name "My Awesome Theme"
|
||||||
|
|
||||||
### Set as default theme
|
### Set as default theme
|
||||||
|
|
||||||
Now we can set the new theme as default in the [Pleroma FE configuration](General-tips-for-customizing-Pleroma-FE.md).
|
Now we can set the new theme as default in the [Pleroma FE configuration](../../../frontend/CONFIGURATION).
|
||||||
|
|
||||||
Example of adding the new theme in the back-end config files
|
Example of adding the new theme in the back-end config files
|
||||||
```elixir
|
```elixir
|
||||||
|
|
|
@ -34,9 +34,9 @@ config :pleroma, :instance,
|
||||||
To use `SimplePolicy`, you must enable it. Do so by adding the following to your `:instance` config object, so that it looks like this:
|
To use `SimplePolicy`, you must enable it. Do so by adding the following to your `:instance` config object, so that it looks like this:
|
||||||
|
|
||||||
```elixir
|
```elixir
|
||||||
config :pleroma, :instance,
|
config :pleroma, :mrf,
|
||||||
[...]
|
[...]
|
||||||
rewrite_policy: Pleroma.Web.ActivityPub.MRF.SimplePolicy
|
policies: Pleroma.Web.ActivityPub.MRF.SimplePolicy
|
||||||
```
|
```
|
||||||
|
|
||||||
Once `SimplePolicy` is enabled, you can configure various groups in the `:mrf_simple` config object. These groups are:
|
Once `SimplePolicy` is enabled, you can configure various groups in the `:mrf_simple` config object. These groups are:
|
||||||
|
@ -58,8 +58,8 @@ Servers should be configured as lists.
|
||||||
This example will enable `SimplePolicy`, block media from `illegalporn.biz`, mark media as NSFW from `porn.biz` and `porn.business`, reject messages from `spam.com`, remove messages from `spam.university` from the federated timeline and block reports (flags) from `whiny.whiner`:
|
This example will enable `SimplePolicy`, block media from `illegalporn.biz`, mark media as NSFW from `porn.biz` and `porn.business`, reject messages from `spam.com`, remove messages from `spam.university` from the federated timeline and block reports (flags) from `whiny.whiner`:
|
||||||
|
|
||||||
```elixir
|
```elixir
|
||||||
config :pleroma, :instance,
|
config :pleroma, :mrf,
|
||||||
rewrite_policy: [Pleroma.Web.ActivityPub.MRF.SimplePolicy]
|
policies: [Pleroma.Web.ActivityPub.MRF.SimplePolicy]
|
||||||
|
|
||||||
config :pleroma, :mrf_simple,
|
config :pleroma, :mrf_simple,
|
||||||
media_removal: ["illegalporn.biz"],
|
media_removal: ["illegalporn.biz"],
|
||||||
|
@ -75,7 +75,7 @@ The effects of MRF policies can be very drastic. It is important to use this fun
|
||||||
|
|
||||||
## Writing your own MRF Policy
|
## Writing your own MRF Policy
|
||||||
|
|
||||||
As discussed above, the MRF system is a modular system that supports pluggable policies. This means that an admin may write a custom MRF policy in Elixir or any other language that runs on the Erlang VM, by specifying the module name in the `rewrite_policy` config setting.
|
As discussed above, the MRF system is a modular system that supports pluggable policies. This means that an admin may write a custom MRF policy in Elixir or any other language that runs on the Erlang VM, by specifying the module name in the `policies` config setting.
|
||||||
|
|
||||||
For example, here is a sample policy module which rewrites all messages to "new message content":
|
For example, here is a sample policy module which rewrites all messages to "new message content":
|
||||||
|
|
||||||
|
@ -125,8 +125,8 @@ end
|
||||||
If you save this file as `lib/pleroma/web/activity_pub/mrf/rewrite_policy.ex`, it will be included when you next rebuild Pleroma. You can enable it in the configuration like so:
|
If you save this file as `lib/pleroma/web/activity_pub/mrf/rewrite_policy.ex`, it will be included when you next rebuild Pleroma. You can enable it in the configuration like so:
|
||||||
|
|
||||||
```elixir
|
```elixir
|
||||||
config :pleroma, :instance,
|
config :pleroma, :mrf,
|
||||||
rewrite_policy: [
|
policies: [
|
||||||
Pleroma.Web.ActivityPub.MRF.SimplePolicy,
|
Pleroma.Web.ActivityPub.MRF.SimplePolicy,
|
||||||
Pleroma.Web.ActivityPub.MRF.RewritePolicy
|
Pleroma.Web.ActivityPub.MRF.RewritePolicy
|
||||||
]
|
]
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
# Optimizing your PostgreSQL performance
|
||||||
|
|
||||||
|
Pleroma performance depends to a large extent on good database performance. The default PostgreSQL settings are mostly fine, but often you can get better performance by changing a few settings.
|
||||||
|
|
||||||
|
You can use [PGTune](https://pgtune.leopard.in.ua) to get recommendations for your setup. If you do, set the "Number of Connections" field to 20, as Pleroma will only use 10 concurrent connections anyway. If you don't, it will give you advice that might even hurt your performance.
|
||||||
|
|
||||||
|
We also recommend not using the "Network Storage" option.
|
||||||
|
|
||||||
|
## Example configurations
|
||||||
|
|
||||||
|
Here are some configuration suggestions for PostgreSQL 10+.
|
||||||
|
|
||||||
|
### 1GB RAM, 1 CPU
|
||||||
|
```
|
||||||
|
shared_buffers = 256MB
|
||||||
|
effective_cache_size = 768MB
|
||||||
|
maintenance_work_mem = 64MB
|
||||||
|
work_mem = 13107kB
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2GB RAM, 2 CPU
|
||||||
|
```
|
||||||
|
shared_buffers = 512MB
|
||||||
|
effective_cache_size = 1536MB
|
||||||
|
maintenance_work_mem = 128MB
|
||||||
|
work_mem = 26214kB
|
||||||
|
max_worker_processes = 2
|
||||||
|
max_parallel_workers_per_gather = 1
|
||||||
|
max_parallel_workers = 2
|
||||||
|
```
|
||||||
|
|
|
@ -0,0 +1,38 @@
|
||||||
|
# Storing Remote Media
|
||||||
|
|
||||||
|
Pleroma does not store remote/federated media by default. The best way to achieve this is to change Nginx to keep its reverse proxy cache
|
||||||
|
for a year and to activate the `MediaProxyWarmingPolicy` MRF policy in Pleroma which will automatically fetch all media through the proxy
|
||||||
|
as soon as the post is received by your instance.
|
||||||
|
|
||||||
|
## Nginx
|
||||||
|
|
||||||
|
```
|
||||||
|
proxy_cache_path /long/term/storage/path/pleroma-media-cache levels=1:2
|
||||||
|
keys_zone=pleroma_media_cache:10m inactive=1y use_temp_path=off;
|
||||||
|
|
||||||
|
location ~ ^/(media|proxy) {
|
||||||
|
proxy_cache pleroma_media_cache;
|
||||||
|
slice 1m;
|
||||||
|
proxy_cache_key $host$uri$is_args$args$slice_range;
|
||||||
|
proxy_set_header Range $slice_range;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_cache_valid 206 301 302 304 1h;
|
||||||
|
proxy_cache_valid 200 1y;
|
||||||
|
proxy_cache_use_stale error timeout invalid_header updating;
|
||||||
|
proxy_ignore_client_abort on;
|
||||||
|
proxy_buffering on;
|
||||||
|
chunked_transfer_encoding on;
|
||||||
|
proxy_ignore_headers Cache-Control Expires;
|
||||||
|
proxy_hide_header Cache-Control Expires;
|
||||||
|
proxy_pass http://127.0.0.1:4000;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Pleroma
|
||||||
|
|
||||||
|
Add to your `prod.secret.exs`:
|
||||||
|
|
||||||
|
```
|
||||||
|
config :pleroma, :mrf,
|
||||||
|
policies: [Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy]
|
||||||
|
```
|
|
@ -20,4 +20,4 @@ This document contains notes and guidelines for Pleroma developers.
|
||||||
|
|
||||||
## Auth-related configuration, OAuth consumer mode etc.
|
## Auth-related configuration, OAuth consumer mode etc.
|
||||||
|
|
||||||
See `Authentication` section of [`docs/configuration/cheatsheet.md`](docs/configuration/cheatsheet.md#authentication).
|
See `Authentication` section of [the configuration cheatsheet](configuration/cheatsheet.md#authentication).
|
||||||
|
|
|
@ -0,0 +1,26 @@
|
||||||
|
# Introduction to Pleroma
|
||||||
|
## What is Pleroma?
|
||||||
|
Pleroma is a federated social networking platform, compatible with Mastodon and other ActivityPub implementations. It is free software licensed under the AGPLv3.
|
||||||
|
It actually consists of two components: a backend, named simply Pleroma, and a user-facing frontend, named Pleroma-FE. It also includes the Mastodon frontend, if that's your thing.
|
||||||
|
It's part of what we call the fediverse, a federated network of instances which speak common protocols and can communicate with each other.
|
||||||
|
One account on an instance is enough to talk to the entire fediverse!
|
||||||
|
|
||||||
|
## How can I use it?
|
||||||
|
|
||||||
|
Pleroma instances are already widely deployed, a list can be found at <https://the-federation.info/pleroma> and <https://fediverse.network/pleroma>.
|
||||||
|
|
||||||
|
If you don't feel like joining an existing instance, but instead prefer to deploy your own instance, that's easy too!
|
||||||
|
Installation instructions can be found in the installation section of these docs.
|
||||||
|
|
||||||
|
## I got an account, now what?
|
||||||
|
Great! Now you can explore the fediverse! Open the login page for your Pleroma instance (e.g. <https://pleroma.soykaf.com>) and login with your username and password. (If you don't have an account yet, click on Register)
|
||||||
|
|
||||||
|
### Pleroma-FE
|
||||||
|
The default front-end used by Pleroma is Pleroma-FE. You can find more information on what it is and how to use it in the [Introduction to Pleroma-FE](../frontend).
|
||||||
|
|
||||||
|
### Mastodon interface
|
||||||
|
If the Pleroma interface isn't your thing, or you're just trying something new but you want to keep using the familiar Mastodon interface, we got that too!
|
||||||
|
Just add a "/web" after your instance url (e.g. <https://pleroma.soycaf.com/web>) and you'll end on the Mastodon web interface, but with a Pleroma backend! MAGIC!
|
||||||
|
The Mastodon interface is from the Glitch-soc fork. For more information on the Mastodon interface you can check the [Mastodon](https://docs.joinmastodon.org/) and [Glitch-soc](https://glitch-soc.github.io/docs/) documentation.
|
||||||
|
|
||||||
|
Remember, what you see is only the frontend part of Mastodon, the backend is still Pleroma.
|
|
@ -225,10 +225,7 @@ sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress
|
||||||
|
|
||||||
#### Further reading
|
#### Further reading
|
||||||
|
|
||||||
* [Backup your instance](../administration/backup.md)
|
{! backend/installation/further_reading.include !}
|
||||||
* [Hardening your instance](../configuration/hardening.md)
|
|
||||||
* [How to activate mediaproxy](../configuration/howto_mediaproxy.md)
|
|
||||||
* [Updating your instance](../administration/updating.md)
|
|
||||||
|
|
||||||
## Questions
|
## Questions
|
||||||
|
|
||||||
|
|
|
@ -200,10 +200,7 @@ sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress
|
||||||
|
|
||||||
#### Further reading
|
#### Further reading
|
||||||
|
|
||||||
* [Backup your instance](../administration/backup.md)
|
{! backend/installation/further_reading.include !}
|
||||||
* [Hardening your instance](../configuration/hardening.md)
|
|
||||||
* [How to activate mediaproxy](../configuration/howto_mediaproxy.md)
|
|
||||||
* [Updating your instance](../administration/updating.md)
|
|
||||||
|
|
||||||
## Questions
|
## Questions
|
||||||
|
|
||||||
|
|
|
@ -38,8 +38,8 @@ sudo apt install git build-essential postgresql postgresql-contrib
|
||||||
* Download and add the Erlang repository:
|
* Download and add the Erlang repository:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
wget -P /tmp/ https://packages.erlang-solutions.com/erlang-solutions_1.0_all.deb
|
wget -P /tmp/ https://packages.erlang-solutions.com/erlang-solutions_2.0_all.deb
|
||||||
sudo dpkg -i /tmp/erlang-solutions_1.0_all.deb
|
sudo dpkg -i /tmp/erlang-solutions_2.0_all.deb
|
||||||
```
|
```
|
||||||
|
|
||||||
* Install Elixir and Erlang:
|
* Install Elixir and Erlang:
|
||||||
|
@ -186,10 +186,7 @@ sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress
|
||||||
|
|
||||||
#### Further reading
|
#### Further reading
|
||||||
|
|
||||||
* [Backup your instance](../administration/backup.md)
|
{! backend/installation/further_reading.include !}
|
||||||
* [Hardening your instance](../configuration/hardening.md)
|
|
||||||
* [How to activate mediaproxy](../configuration/howto_mediaproxy.md)
|
|
||||||
* [Updating your instance](../administration/updating.md)
|
|
||||||
|
|
||||||
## Questions
|
## Questions
|
||||||
|
|
||||||
|
|
|
@ -40,8 +40,8 @@ sudo apt install git build-essential postgresql postgresql-contrib
|
||||||
|
|
||||||
* Erlangのリポジトリをダウンロードおよびインストールします。
|
* Erlangのリポジトリをダウンロードおよびインストールします。
|
||||||
```
|
```
|
||||||
wget -P /tmp/ https://packages.erlang-solutions.com/erlang-solutions_1.0_all.deb
|
wget -P /tmp/ https://packages.erlang-solutions.com/erlang-solutions_2.0_all.deb
|
||||||
sudo dpkg -i /tmp/erlang-solutions_1.0_all.deb
|
sudo dpkg -i /tmp/erlang-solutions_2.0_all.deb
|
||||||
```
|
```
|
||||||
|
|
||||||
* ElixirとErlangをインストールします、
|
* ElixirとErlangをインストールします、
|
||||||
|
@ -175,10 +175,7 @@ sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress
|
||||||
|
|
||||||
#### その他の設定とカスタマイズ
|
#### その他の設定とカスタマイズ
|
||||||
|
|
||||||
* [Backup your instance](../administration/backup.md)
|
{! backend/installation/further_reading.include !}
|
||||||
* [Hardening your instance](../configuration/hardening.md)
|
|
||||||
* [How to activate mediaproxy](../configuration/howto_mediaproxy.md)
|
|
||||||
* [Updating your instance](../administration/updating.md)
|
|
||||||
|
|
||||||
## 質問ある?
|
## 質問ある?
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
* [How Federation Works/Why is my Federated Timeline empty?](https://blog.soykaf.com/post/how-federation-works/)
|
||||||
|
* [Backup your instance](../administration/backup.md)
|
||||||
|
* [Updating your instance](../administration/updating.md)
|
||||||
|
* [Hardening your instance](../configuration/hardening.md)
|
||||||
|
* [How to activate mediaproxy](../configuration/howto_mediaproxy.md)
|
|
@ -283,10 +283,7 @@ If you opted to allow sudo for the `pleroma` user but would like to remove the a
|
||||||
|
|
||||||
#### Further reading
|
#### Further reading
|
||||||
|
|
||||||
* [Backup your instance](../administration/backup.md)
|
{! backend/installation/further_reading.include !}
|
||||||
* [Hardening your instance](../configuration/hardening.md)
|
|
||||||
* [How to activate mediaproxy](../configuration/howto_mediaproxy.md)
|
|
||||||
* [Updating your instance](../administration/updating.md)
|
|
||||||
|
|
||||||
## Questions
|
## Questions
|
||||||
|
|
||||||
|
|
|
@ -196,3 +196,11 @@ incorrect timestamps. You should have ntpd running.
|
||||||
## Instances running NetBSD
|
## Instances running NetBSD
|
||||||
|
|
||||||
* <https://catgirl.science>
|
* <https://catgirl.science>
|
||||||
|
|
||||||
|
#### Further reading
|
||||||
|
|
||||||
|
{! backend/installation/further_reading.include !}
|
||||||
|
|
||||||
|
## Questions
|
||||||
|
|
||||||
|
Questions about the installation or didn’t it work as it should be, ask in [#pleroma:matrix.org](https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org) or IRC Channel **#pleroma** on **Freenode**.
|
||||||
|
|
|
@ -242,3 +242,11 @@ If your instance is up and running, you can create your first user with administ
|
||||||
```
|
```
|
||||||
LC_ALL=en_US.UTF-8 MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress> --admin
|
LC_ALL=en_US.UTF-8 MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress> --admin
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Further reading
|
||||||
|
|
||||||
|
{! backend/installation/further_reading.include !}
|
||||||
|
|
||||||
|
## Questions
|
||||||
|
|
||||||
|
Questions about the installation or didn’t it work as it should be, ask in [#pleroma:matrix.org](https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org) or IRC Channel **#pleroma** on **Freenode**.
|
||||||
|
|
|
@ -63,7 +63,7 @@ apt install postgresql-11-rum
|
||||||
```
|
```
|
||||||
|
|
||||||
#### (Optional) Performance configuration
|
#### (Optional) Performance configuration
|
||||||
For optimal performance, you may use [PGTune](https://pgtune.leopard.in.ua), don't forget to restart postgresql after editing the configuration
|
It is encouraged to check [Optimizing your PostgreSQL performance](../configuration/postgresql.md) document, for tips on PostgreSQL tuning.
|
||||||
|
|
||||||
```sh tab="Alpine"
|
```sh tab="Alpine"
|
||||||
rc-service postgresql restart
|
rc-service postgresql restart
|
||||||
|
@ -270,10 +270,7 @@ This will create an account withe the username of 'joeuser' with the email addre
|
||||||
|
|
||||||
## Further reading
|
## Further reading
|
||||||
|
|
||||||
* [Backup your instance](../administration/backup.md)
|
{! backend/installation/further_reading.include !}
|
||||||
* [Hardening your instance](../configuration/hardening.md)
|
|
||||||
* [How to activate mediaproxy](../configuration/howto_mediaproxy.md)
|
|
||||||
* [Updating your instance](../administration/updating.md)
|
|
||||||
|
|
||||||
## Questions
|
## Questions
|
||||||
|
|
||||||
|
|
|
@ -1,65 +0,0 @@
|
||||||
# Introduction to Pleroma
|
|
||||||
## What is Pleroma?
|
|
||||||
Pleroma is a federated social networking platform, compatible with GNU social, Mastodon and other OStatus and ActivityPub implementations. It is free software licensed under the AGPLv3.
|
|
||||||
It actually consists of two components: a backend, named simply Pleroma, and a user-facing frontend, named Pleroma-FE. It also includes the Mastodon frontend, if that's your thing.
|
|
||||||
It's part of what we call the fediverse, a federated network of instances which speak common protocols and can communicate with each other.
|
|
||||||
One account on an instance is enough to talk to the entire fediverse!
|
|
||||||
|
|
||||||
## How can I use it?
|
|
||||||
|
|
||||||
Pleroma instances are already widely deployed, a list can be found at <http://distsn.org/pleroma-instances.html>. Information on all existing fediverse instances can be found at <https://fediverse.network/>.
|
|
||||||
|
|
||||||
If you don't feel like joining an existing instance, but instead prefer to deploy your own instance, that's easy too!
|
|
||||||
Installation instructions can be found in the installation section of these docs.
|
|
||||||
|
|
||||||
## I got an account, now what?
|
|
||||||
Great! Now you can explore the fediverse! Open the login page for your Pleroma instance (e.g. <https://pleroma.soykaf.com>) and login with your username and password. (If you don't have an account yet, click on Register)
|
|
||||||
|
|
||||||
At this point you will have two columns in front of you.
|
|
||||||
|
|
||||||
### Left column
|
|
||||||
|
|
||||||
- first block: here you can see your avatar, your nickname and statistics (Statuses, Following, Followers). Clicking your profile pic will open your profile.
|
|
||||||
Under that you have a text form which allows you to post new statuses. The number on the bottom of the text form is a character counter, every instance can have a different character limit (the default is 5000).
|
|
||||||
If you want to mention someone, type @ + name of the person. A drop-down menu will help you in finding the right person.
|
|
||||||
Under the text form there are also several visibility options and there is the option to use rich text.
|
|
||||||
Under that the icon on the left is for uploading media files and attach them to your post. There is also an emoji-picker and an option to post a poll.
|
|
||||||
To post your status, simply press Submit.
|
|
||||||
On the top right you will also see a wrench icon. This opens your personal settings.
|
|
||||||
|
|
||||||
- second block: Here you can switch between the different timelines:
|
|
||||||
- Timeline: all the people that you follow
|
|
||||||
- Interactions: here you can switch between different timelines where there was interaction with your account. There is Mentions, Repeats and Favorites, and New follows
|
|
||||||
- Direct Messages: these are the Direct Messages sent to you
|
|
||||||
- Public Timeline: all the statutes from the local instance
|
|
||||||
- The Whole Known Network: all public posts the instance knows about, both local and remote!
|
|
||||||
- About: This isn't a Timeline but shows relevant info about the instance. You can find a list of the moderators and admins, Terms of Service, MRF policies and enabled features.
|
|
||||||
- Optional third block: This is the Instance panel that can be activated, but is deactivated by default. It's fully customisable and by default has links to the pleroma-fe and Mastodon-fe.
|
|
||||||
- fourth block: This is the Notifications block, here you will get notified whenever somebody mentions you, follows you, repeats or favorites one of your statuses.
|
|
||||||
|
|
||||||
### Right column
|
|
||||||
This is where the interesting stuff happens!
|
|
||||||
Depending on the timeline you will see different statuses, but each status has a standard structure:
|
|
||||||
|
|
||||||
- Profile pic, name and link to profile. An optional left-arrow if it's a reply to another status (hovering will reveal the reply-to status). Clicking on the profile pic will uncollapse the user's profile.
|
|
||||||
- A `+` button on the right allows you to Expand/Collapse an entire discussion thread. It also updates in realtime!
|
|
||||||
- An arrow icon allows you to open the status on the instance where it's originating from.
|
|
||||||
- The text of the status, including mentions and attachements. If you click on a mention, it will automatically open the profile page of that person.
|
|
||||||
- Three buttons (left to right): Reply, Repeat, Favorite. There is also a forth button, this is a dropdown menu for simple moderation like muting the conversation or, if you have moderation rights, delete the status from the server.
|
|
||||||
|
|
||||||
### Top right
|
|
||||||
|
|
||||||
- The magnifier icon opens the search screen where you can search for statuses, people and hashtags. It's also possible to import statusses from remote servers by pasting the url to the post in the search field.
|
|
||||||
- The gear icon gives you general settings
|
|
||||||
- If you have admin rights, you'll see an icon that opens the admin interface
|
|
||||||
- The last icon is to log out
|
|
||||||
|
|
||||||
### Bottom right
|
|
||||||
On the bottom right you have a chatbox. Here you can communicate with people on the same instance in realtime. It is local-only, for now, but there are plans to make it extendable to the entire fediverse!
|
|
||||||
|
|
||||||
### Mastodon interface
|
|
||||||
If the Pleroma interface isn't your thing, or you're just trying something new but you want to keep using the familiar Mastodon interface, we got that too!
|
|
||||||
Just add a "/web" after your instance url (e.g. <https://pleroma.soycaf.com/web>) and you'll end on the Mastodon web interface, but with a Pleroma backend! MAGIC!
|
|
||||||
The Mastodon interface is from the Glitch-soc fork. For more information on the Mastodon interface you can check the [Mastodon](https://docs.joinmastodon.org/) and [Glitch-soc](https://glitch-soc.github.io/docs/) documentation.
|
|
||||||
|
|
||||||
Remember, what you see is only the frontend part of Mastodon, the backend is still Pleroma.
|
|
|
@ -1,2 +1,2 @@
|
||||||
elixir_version=1.8.2
|
elixir_version=1.9.4
|
||||||
erlang_version=21.3.7
|
erlang_version=22.3.4.1
|
||||||
|
|
|
@ -0,0 +1,40 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# A simple shell script to delete a media from the Nginx cache.
|
||||||
|
|
||||||
|
SCRIPTNAME=${0##*/}
|
||||||
|
|
||||||
|
# NGINX cache directory
|
||||||
|
CACHE_DIRECTORY="/tmp/pleroma-media-cache"
|
||||||
|
|
||||||
|
## Return the files where the items are cached.
|
||||||
|
## $1 - the filename, can be a pattern .
|
||||||
|
## $2 - the cache directory.
|
||||||
|
## $3 - (optional) the number of parallel processes to run for grep.
|
||||||
|
get_cache_files() {
|
||||||
|
local max_parallel=${3-16}
|
||||||
|
find $2 -maxdepth 2 -type d | xargs -P $max_parallel -n 1 grep -E -Rl "^KEY:.*$1" | sort -u
|
||||||
|
}
|
||||||
|
|
||||||
|
## Removes an item from the given cache zone.
|
||||||
|
## $1 - the filename, can be a pattern .
|
||||||
|
## $2 - the cache directory.
|
||||||
|
purge_item() {
|
||||||
|
for f in $(get_cache_files $1 $2); do
|
||||||
|
echo "found file: $f"
|
||||||
|
[ -f $f ] || continue
|
||||||
|
echo "Deleting $f from $2."
|
||||||
|
rm $f
|
||||||
|
done
|
||||||
|
} # purge_item
|
||||||
|
|
||||||
|
purge() {
|
||||||
|
for url in "$@"
|
||||||
|
do
|
||||||
|
echo "$SCRIPTNAME delete \`$url\` from cache ($CACHE_DIRECTORY)"
|
||||||
|
purge_item $url $CACHE_DIRECTORY
|
||||||
|
done
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
purge $@
|
|
@ -37,18 +37,17 @@ server {
|
||||||
|
|
||||||
listen 443 ssl http2;
|
listen 443 ssl http2;
|
||||||
listen [::]:443 ssl http2;
|
listen [::]:443 ssl http2;
|
||||||
ssl_session_timeout 5m;
|
ssl_session_timeout 1d;
|
||||||
|
ssl_session_cache shared:MozSSL:10m; # about 40000 sessions
|
||||||
|
ssl_session_tickets off;
|
||||||
|
|
||||||
ssl_trusted_certificate /etc/letsencrypt/live/example.tld/chain.pem;
|
ssl_trusted_certificate /etc/letsencrypt/live/example.tld/chain.pem;
|
||||||
ssl_certificate /etc/letsencrypt/live/example.tld/fullchain.pem;
|
ssl_certificate /etc/letsencrypt/live/example.tld/fullchain.pem;
|
||||||
ssl_certificate_key /etc/letsencrypt/live/example.tld/privkey.pem;
|
ssl_certificate_key /etc/letsencrypt/live/example.tld/privkey.pem;
|
||||||
|
|
||||||
# Add TLSv1.0 to support older devices
|
ssl_protocols TLSv1.2 TLSv1.3;
|
||||||
ssl_protocols TLSv1.2;
|
|
||||||
# Uncomment line below if you want to support older devices (Before Android 4.4.2, IE 8, etc.)
|
|
||||||
# ssl_ciphers "HIGH:!aNULL:!MD5 or HIGH:!aNULL:!MD5:!3DES";
|
|
||||||
ssl_ciphers "ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:!aNULL:!eNULL:!EXPORT:!DES:!MD5:!PSK:!RC4";
|
ssl_ciphers "ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:!aNULL:!eNULL:!EXPORT:!DES:!MD5:!PSK:!RC4";
|
||||||
ssl_prefer_server_ciphers on;
|
ssl_prefer_server_ciphers off;
|
||||||
# In case of an old server with an OpenSSL version of 1.0.2 or below,
|
# In case of an old server with an OpenSSL version of 1.0.2 or below,
|
||||||
# leave only prime256v1 or comment out the following line.
|
# leave only prime256v1 or comment out the following line.
|
||||||
ssl_ecdh_curve X25519:prime256v1:secp384r1:secp521r1;
|
ssl_ecdh_curve X25519:prime256v1:secp384r1:secp521r1;
|
||||||
|
|
|
@ -67,8 +67,7 @@ def run(["render_timeline", nickname | _] = args) do
|
||||||
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
|
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
|
||||||
activities: activities,
|
activities: activities,
|
||||||
for: user,
|
for: user,
|
||||||
as: :activity,
|
as: :activity
|
||||||
skip_relationships: true
|
|
||||||
})
|
})
|
||||||
end
|
end
|
||||||
},
|
},
|
||||||
|
|
|
@ -52,6 +52,7 @@ def migrate_to_db(file_path \\ nil) do
|
||||||
|
|
||||||
defp do_migrate_to_db(config_file) do
|
defp do_migrate_to_db(config_file) do
|
||||||
if File.exists?(config_file) do
|
if File.exists?(config_file) do
|
||||||
|
shell_info("Migrating settings from file: #{Path.expand(config_file)}")
|
||||||
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE config;")
|
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE config;")
|
||||||
Ecto.Adapters.SQL.query!(Repo, "ALTER SEQUENCE config_id_seq RESTART;")
|
Ecto.Adapters.SQL.query!(Repo, "ALTER SEQUENCE config_id_seq RESTART;")
|
||||||
|
|
||||||
|
@ -72,8 +73,7 @@ defp create(group, settings) do
|
||||||
group
|
group
|
||||||
|> Pleroma.Config.Loader.filter_group(settings)
|
|> Pleroma.Config.Loader.filter_group(settings)
|
||||||
|> Enum.each(fn {key, value} ->
|
|> Enum.each(fn {key, value} ->
|
||||||
key = inspect(key)
|
{:ok, _} = ConfigDB.update_or_create(%{group: group, key: key, value: value})
|
||||||
{:ok, _} = ConfigDB.update_or_create(%{group: inspect(group), key: key, value: value})
|
|
||||||
|
|
||||||
shell_info("Settings for key #{key} migrated.")
|
shell_info("Settings for key #{key} migrated.")
|
||||||
end)
|
end)
|
||||||
|
@ -131,12 +131,9 @@ defp write_and_delete(config, file, delete?) do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp write(config, file) do
|
defp write(config, file) do
|
||||||
value =
|
value = inspect(config.value, limit: :infinity)
|
||||||
config.value
|
|
||||||
|> ConfigDB.from_binary()
|
|
||||||
|> inspect(limit: :infinity)
|
|
||||||
|
|
||||||
IO.write(file, "config #{config.group}, #{config.key}, #{value}\r\n\r\n")
|
IO.write(file, "config #{inspect(config.group)}, #{inspect(config.key)}, #{value}\r\n\r\n")
|
||||||
|
|
||||||
config
|
config
|
||||||
end
|
end
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
|
|
||||||
defmodule Mix.Tasks.Pleroma.Database do
|
defmodule Mix.Tasks.Pleroma.Database do
|
||||||
alias Pleroma.Conversation
|
alias Pleroma.Conversation
|
||||||
|
alias Pleroma.Maintenance
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
@ -34,13 +35,7 @@ def run(["remove_embedded_objects" | args]) do
|
||||||
)
|
)
|
||||||
|
|
||||||
if Keyword.get(options, :vacuum) do
|
if Keyword.get(options, :vacuum) do
|
||||||
Logger.info("Runnning VACUUM FULL")
|
Maintenance.vacuum("full")
|
||||||
|
|
||||||
Repo.query!(
|
|
||||||
"vacuum full;",
|
|
||||||
[],
|
|
||||||
timeout: :infinity
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -94,13 +89,7 @@ def run(["prune_objects" | args]) do
|
||||||
|> Repo.delete_all(timeout: :infinity)
|
|> Repo.delete_all(timeout: :infinity)
|
||||||
|
|
||||||
if Keyword.get(options, :vacuum) do
|
if Keyword.get(options, :vacuum) do
|
||||||
Logger.info("Runnning VACUUM FULL")
|
Maintenance.vacuum("full")
|
||||||
|
|
||||||
Repo.query!(
|
|
||||||
"vacuum full;",
|
|
||||||
[],
|
|
||||||
timeout: :infinity
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -135,4 +124,10 @@ def run(["fix_likes_collections"]) do
|
||||||
end)
|
end)
|
||||||
|> Stream.run()
|
|> Stream.run()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def run(["vacuum", args]) do
|
||||||
|
start_pleroma()
|
||||||
|
|
||||||
|
Maintenance.vacuum(args)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -15,7 +15,7 @@ def run(["ls-packs" | args]) do
|
||||||
{options, [], []} = parse_global_opts(args)
|
{options, [], []} = parse_global_opts(args)
|
||||||
|
|
||||||
url_or_path = options[:manifest] || default_manifest()
|
url_or_path = options[:manifest] || default_manifest()
|
||||||
manifest = fetch_manifest(url_or_path)
|
manifest = fetch_and_decode(url_or_path)
|
||||||
|
|
||||||
Enum.each(manifest, fn {name, info} ->
|
Enum.each(manifest, fn {name, info} ->
|
||||||
to_print = [
|
to_print = [
|
||||||
|
@ -42,12 +42,12 @@ def run(["get-packs" | args]) do
|
||||||
|
|
||||||
url_or_path = options[:manifest] || default_manifest()
|
url_or_path = options[:manifest] || default_manifest()
|
||||||
|
|
||||||
manifest = fetch_manifest(url_or_path)
|
manifest = fetch_and_decode(url_or_path)
|
||||||
|
|
||||||
for pack_name <- pack_names do
|
for pack_name <- pack_names do
|
||||||
if Map.has_key?(manifest, pack_name) do
|
if Map.has_key?(manifest, pack_name) do
|
||||||
pack = manifest[pack_name]
|
pack = manifest[pack_name]
|
||||||
src_url = pack["src"]
|
src = pack["src"]
|
||||||
|
|
||||||
IO.puts(
|
IO.puts(
|
||||||
IO.ANSI.format([
|
IO.ANSI.format([
|
||||||
|
@ -57,11 +57,11 @@ def run(["get-packs" | args]) do
|
||||||
:normal,
|
:normal,
|
||||||
" from ",
|
" from ",
|
||||||
:underline,
|
:underline,
|
||||||
src_url
|
src
|
||||||
])
|
])
|
||||||
)
|
)
|
||||||
|
|
||||||
binary_archive = Tesla.get!(client(), src_url).body
|
{:ok, binary_archive} = fetch(src)
|
||||||
archive_sha = :crypto.hash(:sha256, binary_archive) |> Base.encode16()
|
archive_sha = :crypto.hash(:sha256, binary_archive) |> Base.encode16()
|
||||||
|
|
||||||
sha_status_text = ["SHA256 of ", :bright, pack_name, :normal, " source file is ", :bright]
|
sha_status_text = ["SHA256 of ", :bright, pack_name, :normal, " source file is ", :bright]
|
||||||
|
@ -74,8 +74,8 @@ def run(["get-packs" | args]) do
|
||||||
raise "Bad SHA256 for #{pack_name}"
|
raise "Bad SHA256 for #{pack_name}"
|
||||||
end
|
end
|
||||||
|
|
||||||
# The url specified in files should be in the same directory
|
# The location specified in files should be in the same directory
|
||||||
files_url =
|
files_loc =
|
||||||
url_or_path
|
url_or_path
|
||||||
|> Path.dirname()
|
|> Path.dirname()
|
||||||
|> Path.join(pack["files"])
|
|> Path.join(pack["files"])
|
||||||
|
@ -88,11 +88,11 @@ def run(["get-packs" | args]) do
|
||||||
:normal,
|
:normal,
|
||||||
" from ",
|
" from ",
|
||||||
:underline,
|
:underline,
|
||||||
files_url
|
files_loc
|
||||||
])
|
])
|
||||||
)
|
)
|
||||||
|
|
||||||
files = Tesla.get!(client(), files_url).body |> Jason.decode!()
|
files = fetch_and_decode(files_loc)
|
||||||
|
|
||||||
IO.puts(IO.ANSI.format(["Unpacking ", :bright, pack_name]))
|
IO.puts(IO.ANSI.format(["Unpacking ", :bright, pack_name]))
|
||||||
|
|
||||||
|
@ -237,15 +237,25 @@ def run(["gen-pack" | args]) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp fetch_manifest(from) do
|
def run(["reload"]) do
|
||||||
Jason.decode!(
|
start_pleroma()
|
||||||
if String.starts_with?(from, "http") do
|
Pleroma.Emoji.reload()
|
||||||
Tesla.get!(client(), from).body
|
IO.puts("Emoji packs have been reloaded.")
|
||||||
else
|
|
||||||
File.read!(from)
|
|
||||||
end
|
end
|
||||||
)
|
|
||||||
|
defp fetch_and_decode(from) do
|
||||||
|
with {:ok, json} <- fetch(from) do
|
||||||
|
Jason.decode!(json)
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fetch("http" <> _ = from) do
|
||||||
|
with {:ok, %{body: body}} <- Tesla.get(client(), from) do
|
||||||
|
{:ok, body}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fetch(path), do: File.read(path)
|
||||||
|
|
||||||
defp parse_global_opts(args) do
|
defp parse_global_opts(args) do
|
||||||
OptionParser.parse(
|
OptionParser.parse(
|
||||||
|
|
|
@ -147,6 +147,7 @@ def run(["gen" | rest]) do
|
||||||
"What directory should media uploads go in (when using the local uploader)?",
|
"What directory should media uploads go in (when using the local uploader)?",
|
||||||
Pleroma.Config.get([Pleroma.Uploaders.Local, :uploads])
|
Pleroma.Config.get([Pleroma.Uploaders.Local, :uploads])
|
||||||
)
|
)
|
||||||
|
|> Path.expand()
|
||||||
|
|
||||||
static_dir =
|
static_dir =
|
||||||
get_option(
|
get_option(
|
||||||
|
@ -155,6 +156,7 @@ def run(["gen" | rest]) do
|
||||||
"What directory should custom public files be read from (custom emojis, frontend bundle overrides, robots.txt, etc.)?",
|
"What directory should custom public files be read from (custom emojis, frontend bundle overrides, robots.txt, etc.)?",
|
||||||
Pleroma.Config.get([:instance, :static_dir])
|
Pleroma.Config.get([:instance, :static_dir])
|
||||||
)
|
)
|
||||||
|
|> Path.expand()
|
||||||
|
|
||||||
Config.put([:instance, :static_dir], static_dir)
|
Config.put([:instance, :static_dir], static_dir)
|
||||||
|
|
||||||
|
@ -204,7 +206,7 @@ def run(["gen" | rest]) do
|
||||||
shell_info("Writing the postgres script to #{psql_path}.")
|
shell_info("Writing the postgres script to #{psql_path}.")
|
||||||
File.write(psql_path, result_psql)
|
File.write(psql_path, result_psql)
|
||||||
|
|
||||||
write_robots_txt(indexable, template_dir)
|
write_robots_txt(static_dir, indexable, template_dir)
|
||||||
|
|
||||||
shell_info(
|
shell_info(
|
||||||
"\n All files successfully written! Refer to the installation instructions for your platform for next steps."
|
"\n All files successfully written! Refer to the installation instructions for your platform for next steps."
|
||||||
|
@ -224,15 +226,13 @@ def run(["gen" | rest]) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp write_robots_txt(indexable, template_dir) do
|
defp write_robots_txt(static_dir, indexable, template_dir) do
|
||||||
robots_txt =
|
robots_txt =
|
||||||
EEx.eval_file(
|
EEx.eval_file(
|
||||||
template_dir <> "/robots_txt.eex",
|
template_dir <> "/robots_txt.eex",
|
||||||
indexable: indexable
|
indexable: indexable
|
||||||
)
|
)
|
||||||
|
|
||||||
static_dir = Pleroma.Config.get([:instance, :static_dir], "instance/static/")
|
|
||||||
|
|
||||||
unless File.exists?(static_dir) do
|
unless File.exists?(static_dir) do
|
||||||
File.mkdir_p!(static_dir)
|
File.mkdir_p!(static_dir)
|
||||||
end
|
end
|
||||||
|
|
|
@ -17,30 +17,53 @@ defmodule Mix.Tasks.Pleroma.RefreshCounterCache do
|
||||||
def run([]) do
|
def run([]) do
|
||||||
Mix.Pleroma.start_pleroma()
|
Mix.Pleroma.start_pleroma()
|
||||||
|
|
||||||
["public", "unlisted", "private", "direct"]
|
instances =
|
||||||
|> Enum.each(fn visibility ->
|
Activity
|
||||||
count = status_visibility_count_query(visibility)
|
|> distinct([a], true)
|
||||||
name = "status_visibility_#{visibility}"
|
|> select([a], fragment("split_part(?, '/', 3)", a.actor))
|
||||||
CounterCache.set(name, count)
|
|> Repo.all()
|
||||||
Mix.Pleroma.shell_info("Set #{name} to #{count}")
|
|
||||||
|
instances
|
||||||
|
|> Enum.with_index(1)
|
||||||
|
|> Enum.each(fn {instance, i} ->
|
||||||
|
counters = instance_counters(instance)
|
||||||
|
CounterCache.set(instance, counters)
|
||||||
|
|
||||||
|
Mix.Pleroma.shell_info(
|
||||||
|
"[#{i}/#{length(instances)}] Setting #{instance} counters: #{inspect(counters)}"
|
||||||
|
)
|
||||||
end)
|
end)
|
||||||
|
|
||||||
Mix.Pleroma.shell_info("Done")
|
Mix.Pleroma.shell_info("Done")
|
||||||
end
|
end
|
||||||
|
|
||||||
defp status_visibility_count_query(visibility) do
|
defp instance_counters(instance) do
|
||||||
|
counters = %{"public" => 0, "unlisted" => 0, "private" => 0, "direct" => 0}
|
||||||
|
|
||||||
Activity
|
Activity
|
||||||
|> where(
|
|> where([a], fragment("(? ->> 'type'::text) = 'Create'", a.data))
|
||||||
|
|> where([a], fragment("split_part(?, '/', 3) = ?", a.actor, ^instance))
|
||||||
|
|> select(
|
||||||
[a],
|
[a],
|
||||||
fragment(
|
{fragment(
|
||||||
"activity_visibility(?, ?, ?) = ?",
|
"activity_visibility(?, ?, ?)",
|
||||||
a.actor,
|
a.actor,
|
||||||
a.recipients,
|
a.recipients,
|
||||||
a.data,
|
a.data
|
||||||
^visibility
|
), count(a.id)}
|
||||||
|
)
|
||||||
|
|> group_by(
|
||||||
|
[a],
|
||||||
|
fragment(
|
||||||
|
"activity_visibility(?, ?, ?)",
|
||||||
|
a.actor,
|
||||||
|
a.recipients,
|
||||||
|
a.data
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|> where([a], fragment("(? ->> 'type'::text) = 'Create'", a.data))
|
|> Repo.all(timeout: :timer.minutes(30))
|
||||||
|> Repo.aggregate(:count, :id, timeout: :timer.minutes(30))
|
|> Enum.reduce(counters, fn {visibility, count}, acc ->
|
||||||
|
Map.put(acc, visibility, count)
|
||||||
|
end)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -144,28 +144,30 @@ def run(["reset_password", nickname]) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["unsubscribe", nickname]) do
|
def run(["reset_mfa", nickname]) do
|
||||||
|
start_pleroma()
|
||||||
|
|
||||||
|
with %User{local: true} = user <- User.get_cached_by_nickname(nickname),
|
||||||
|
{:ok, _token} <- Pleroma.MFA.disable(user) do
|
||||||
|
shell_info("Multi-Factor Authentication disabled for #{user.nickname}")
|
||||||
|
else
|
||||||
|
_ ->
|
||||||
|
shell_error("No local user #{nickname}")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def run(["deactivate", nickname]) do
|
||||||
start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
shell_info("Deactivating #{user.nickname}")
|
shell_info("Deactivating #{user.nickname}")
|
||||||
User.deactivate(user)
|
User.deactivate(user)
|
||||||
|
|
||||||
user
|
|
||||||
|> User.get_friends()
|
|
||||||
|> Enum.each(fn friend ->
|
|
||||||
user = User.get_cached_by_id(user.id)
|
|
||||||
|
|
||||||
shell_info("Unsubscribing #{friend.nickname} from #{user.nickname}")
|
|
||||||
User.unfollow(user, friend)
|
|
||||||
end)
|
|
||||||
|
|
||||||
:timer.sleep(500)
|
:timer.sleep(500)
|
||||||
|
|
||||||
user = User.get_cached_by_id(user.id)
|
user = User.get_cached_by_id(user.id)
|
||||||
|
|
||||||
if Enum.empty?(User.get_friends(user)) do
|
if Enum.empty?(Enum.filter(User.get_friends(user), & &1.local)) do
|
||||||
shell_info("Successfully unsubscribed all followers from #{user.nickname}")
|
shell_info("Successfully unsubscribed all local followers from #{user.nickname}")
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
|
@ -173,7 +175,7 @@ def run(["unsubscribe", nickname]) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["unsubscribe_all_from_instance", instance]) do
|
def run(["deactivate_all_from_instance", instance]) do
|
||||||
start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
Pleroma.User.Query.build(%{nickname: "@#{instance}"})
|
Pleroma.User.Query.build(%{nickname: "@#{instance}"})
|
||||||
|
@ -181,7 +183,7 @@ def run(["unsubscribe_all_from_instance", instance]) do
|
||||||
|> Stream.each(fn users ->
|
|> Stream.each(fn users ->
|
||||||
users
|
users
|
||||||
|> Enum.each(fn user ->
|
|> Enum.each(fn user ->
|
||||||
run(["unsubscribe", user.nickname])
|
run(["deactivate", user.nickname])
|
||||||
end)
|
end)
|
||||||
end)
|
end)
|
||||||
|> Stream.run()
|
|> Stream.run()
|
||||||
|
|
|
@ -24,16 +24,6 @@ defmodule Pleroma.Activity do
|
||||||
|
|
||||||
@primary_key {:id, FlakeId.Ecto.CompatType, autogenerate: true}
|
@primary_key {:id, FlakeId.Ecto.CompatType, autogenerate: true}
|
||||||
|
|
||||||
# https://github.com/tootsuite/mastodon/blob/master/app/models/notification.rb#L19
|
|
||||||
@mastodon_notification_types %{
|
|
||||||
"Create" => "mention",
|
|
||||||
"Follow" => ["follow", "follow_request"],
|
|
||||||
"Announce" => "reblog",
|
|
||||||
"Like" => "favourite",
|
|
||||||
"Move" => "move",
|
|
||||||
"EmojiReact" => "pleroma:emoji_reaction"
|
|
||||||
}
|
|
||||||
|
|
||||||
schema "activities" do
|
schema "activities" do
|
||||||
field(:data, :map)
|
field(:data, :map)
|
||||||
field(:local, :boolean, default: true)
|
field(:local, :boolean, default: true)
|
||||||
|
@ -41,6 +31,10 @@ defmodule Pleroma.Activity do
|
||||||
field(:recipients, {:array, :string}, default: [])
|
field(:recipients, {:array, :string}, default: [])
|
||||||
field(:thread_muted?, :boolean, virtual: true)
|
field(:thread_muted?, :boolean, virtual: true)
|
||||||
|
|
||||||
|
# A field that can be used if you need to join some kind of other
|
||||||
|
# id to order / paginate this field by
|
||||||
|
field(:pagination_id, :string, virtual: true)
|
||||||
|
|
||||||
# This is a fake relation,
|
# This is a fake relation,
|
||||||
# do not use outside of with_preloaded_user_actor/with_joined_user_actor
|
# do not use outside of with_preloaded_user_actor/with_joined_user_actor
|
||||||
has_one(:user_actor, User, on_delete: :nothing, foreign_key: :id)
|
has_one(:user_actor, User, on_delete: :nothing, foreign_key: :id)
|
||||||
|
@ -300,32 +294,6 @@ def follow_accepted?(
|
||||||
|
|
||||||
def follow_accepted?(_), do: false
|
def follow_accepted?(_), do: false
|
||||||
|
|
||||||
@spec mastodon_notification_type(Activity.t()) :: String.t() | nil
|
|
||||||
|
|
||||||
for {ap_type, type} <- @mastodon_notification_types, not is_list(type) do
|
|
||||||
def mastodon_notification_type(%Activity{data: %{"type" => unquote(ap_type)}}),
|
|
||||||
do: unquote(type)
|
|
||||||
end
|
|
||||||
|
|
||||||
def mastodon_notification_type(%Activity{data: %{"type" => "Follow"}} = activity) do
|
|
||||||
if follow_accepted?(activity) do
|
|
||||||
"follow"
|
|
||||||
else
|
|
||||||
"follow_request"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def mastodon_notification_type(%Activity{}), do: nil
|
|
||||||
|
|
||||||
@spec from_mastodon_notification_type(String.t()) :: String.t() | nil
|
|
||||||
@doc "Converts Mastodon notification type to AR activity type"
|
|
||||||
def from_mastodon_notification_type(type) do
|
|
||||||
with {k, _v} <-
|
|
||||||
Enum.find(@mastodon_notification_types, fn {_k, v} -> type in List.wrap(v) end) do
|
|
||||||
k
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def all_by_actor_and_id(actor, status_ids \\ [])
|
def all_by_actor_and_id(actor, status_ids \\ [])
|
||||||
def all_by_actor_and_id(_actor, []), do: []
|
def all_by_actor_and_id(_actor, []), do: []
|
||||||
|
|
||||||
|
|
|
@ -24,10 +24,7 @@ def by_ap_id(query \\ Activity, ap_id) do
|
||||||
|
|
||||||
@spec by_actor(query, String.t()) :: query
|
@spec by_actor(query, String.t()) :: query
|
||||||
def by_actor(query \\ Activity, actor) do
|
def by_actor(query \\ Activity, actor) do
|
||||||
from(
|
from(a in query, where: a.actor == ^actor)
|
||||||
activity in query,
|
|
||||||
where: fragment("(?)->>'actor' = ?", activity.data, ^actor)
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec by_author(query, User.t()) :: query
|
@spec by_author(query, User.t()) :: query
|
||||||
|
|
|
@ -39,7 +39,7 @@ def start(_type, _args) do
|
||||||
Pleroma.HTML.compile_scrubbers()
|
Pleroma.HTML.compile_scrubbers()
|
||||||
Config.DeprecationWarnings.warn()
|
Config.DeprecationWarnings.warn()
|
||||||
Pleroma.Plugs.HTTPSecurityPlug.warn_if_disabled()
|
Pleroma.Plugs.HTTPSecurityPlug.warn_if_disabled()
|
||||||
Pleroma.Repo.check_migrations_applied!()
|
Pleroma.ApplicationRequirements.verify!()
|
||||||
setup_instrumenters()
|
setup_instrumenters()
|
||||||
load_custom_modules()
|
load_custom_modules()
|
||||||
|
|
||||||
|
@ -148,7 +148,8 @@ defp cachex_children do
|
||||||
build_cachex("idempotency", expiration: idempotency_expiration(), limit: 2500),
|
build_cachex("idempotency", expiration: idempotency_expiration(), limit: 2500),
|
||||||
build_cachex("web_resp", limit: 2500),
|
build_cachex("web_resp", limit: 2500),
|
||||||
build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10),
|
build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10),
|
||||||
build_cachex("failed_proxy_url", limit: 2500)
|
build_cachex("failed_proxy_url", limit: 2500),
|
||||||
|
build_cachex("banned_urls", default_ttl: :timer.hours(24 * 30), limit: 5_000)
|
||||||
]
|
]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,107 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.ApplicationRequirements do
|
||||||
|
@moduledoc """
|
||||||
|
The module represents the collection of validations to runs before start server.
|
||||||
|
"""
|
||||||
|
|
||||||
|
defmodule VerifyError, do: defexception([:message])
|
||||||
|
|
||||||
|
import Ecto.Query
|
||||||
|
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
@spec verify!() :: :ok | VerifyError.t()
|
||||||
|
def verify! do
|
||||||
|
:ok
|
||||||
|
|> check_migrations_applied!()
|
||||||
|
|> check_rum!()
|
||||||
|
|> handle_result()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp handle_result(:ok), do: :ok
|
||||||
|
defp handle_result({:error, message}), do: raise(VerifyError, message: message)
|
||||||
|
|
||||||
|
# Checks for pending migrations.
|
||||||
|
#
|
||||||
|
def check_migrations_applied!(:ok) do
|
||||||
|
unless Pleroma.Config.get(
|
||||||
|
[:i_am_aware_this_may_cause_data_loss, :disable_migration_check],
|
||||||
|
false
|
||||||
|
) do
|
||||||
|
{_, res, _} =
|
||||||
|
Ecto.Migrator.with_repo(Pleroma.Repo, fn repo ->
|
||||||
|
down_migrations =
|
||||||
|
Ecto.Migrator.migrations(repo)
|
||||||
|
|> Enum.reject(fn
|
||||||
|
{:up, _, _} -> true
|
||||||
|
{:down, _, _} -> false
|
||||||
|
end)
|
||||||
|
|
||||||
|
if length(down_migrations) > 0 do
|
||||||
|
down_migrations_text =
|
||||||
|
Enum.map(down_migrations, fn {:down, id, name} -> "- #{name} (#{id})\n" end)
|
||||||
|
|
||||||
|
Logger.error(
|
||||||
|
"The following migrations were not applied:\n#{down_migrations_text}If you want to start Pleroma anyway, set\nconfig :pleroma, :i_am_aware_this_may_cause_data_loss, disable_migration_check: true"
|
||||||
|
)
|
||||||
|
|
||||||
|
{:error, "Unapplied Migrations detected"}
|
||||||
|
else
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|
||||||
|
res
|
||||||
|
else
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def check_migrations_applied!(result), do: result
|
||||||
|
|
||||||
|
# Checks for settings of RUM indexes.
|
||||||
|
#
|
||||||
|
defp check_rum!(:ok) do
|
||||||
|
{_, res, _} =
|
||||||
|
Ecto.Migrator.with_repo(Pleroma.Repo, fn repo ->
|
||||||
|
migrate =
|
||||||
|
from(o in "columns",
|
||||||
|
where: o.table_name == "objects",
|
||||||
|
where: o.column_name == "fts_content"
|
||||||
|
)
|
||||||
|
|> repo.exists?(prefix: "information_schema")
|
||||||
|
|
||||||
|
setting = Pleroma.Config.get([:database, :rum_enabled], false)
|
||||||
|
|
||||||
|
do_check_rum!(setting, migrate)
|
||||||
|
end)
|
||||||
|
|
||||||
|
res
|
||||||
|
end
|
||||||
|
|
||||||
|
defp check_rum!(result), do: result
|
||||||
|
|
||||||
|
defp do_check_rum!(setting, migrate) do
|
||||||
|
case {setting, migrate} do
|
||||||
|
{true, false} ->
|
||||||
|
Logger.error(
|
||||||
|
"Use `RUM` index is enabled, but were not applied migrations for it.\nIf you want to start Pleroma anyway, set\nconfig :pleroma, :database, rum_enabled: false\nOtherwise apply the following migrations:\n`mix ecto.migrate --migrations-path priv/repo/optional_migrations/rum_indexing/`"
|
||||||
|
)
|
||||||
|
|
||||||
|
{:error, "Unapplied RUM Migrations detected"}
|
||||||
|
|
||||||
|
{false, true} ->
|
||||||
|
Logger.error(
|
||||||
|
"Detected applied migrations to use `RUM` index, but `RUM` isn't enable in settings.\nIf you want to use `RUM`, set\nconfig :pleroma, :database, rum_enabled: true\nOtherwise roll `RUM` migrations back.\n`mix ecto.rollback --migrations-path priv/repo/optional_migrations/rum_indexing/`"
|
||||||
|
)
|
||||||
|
|
||||||
|
{:error, "RUM Migrations detected"}
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -92,10 +92,10 @@ def handle_command(state, "home") do
|
||||||
|
|
||||||
params =
|
params =
|
||||||
%{}
|
%{}
|
||||||
|> Map.put("type", ["Create"])
|
|> Map.put(:type, ["Create"])
|
||||||
|> Map.put("blocking_user", user)
|
|> Map.put(:blocking_user, user)
|
||||||
|> Map.put("muting_user", user)
|
|> Map.put(:muting_user, user)
|
||||||
|> Map.put("user", user)
|
|> Map.put(:user, user)
|
||||||
|
|
||||||
activities =
|
activities =
|
||||||
[user.ap_id | Pleroma.User.following(user)]
|
[user.ap_id | Pleroma.User.following(user)]
|
||||||
|
|
|
@ -0,0 +1,72 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Chat do
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
import Ecto.Changeset
|
||||||
|
|
||||||
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
@moduledoc """
|
||||||
|
Chat keeps a reference to ChatMessage conversations between a user and an recipient. The recipient can be a user (for now) or a group (not implemented yet).
|
||||||
|
|
||||||
|
It is a helper only, to make it easy to display a list of chats with other people, ordered by last bump. The actual messages are retrieved by querying the recipients of the ChatMessages.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@primary_key {:id, FlakeId.Ecto.CompatType, autogenerate: true}
|
||||||
|
|
||||||
|
schema "chats" do
|
||||||
|
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
|
||||||
|
field(:recipient, :string)
|
||||||
|
|
||||||
|
timestamps()
|
||||||
|
end
|
||||||
|
|
||||||
|
def changeset(struct, params) do
|
||||||
|
struct
|
||||||
|
|> cast(params, [:user_id, :recipient])
|
||||||
|
|> validate_change(:recipient, fn
|
||||||
|
:recipient, recipient ->
|
||||||
|
case User.get_cached_by_ap_id(recipient) do
|
||||||
|
nil -> [recipient: "must be an existing user"]
|
||||||
|
_ -> []
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|> validate_required([:user_id, :recipient])
|
||||||
|
|> unique_constraint(:user_id, name: :chats_user_id_recipient_index)
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_by_id(id) do
|
||||||
|
__MODULE__
|
||||||
|
|> Repo.get(id)
|
||||||
|
end
|
||||||
|
|
||||||
|
def get(user_id, recipient) do
|
||||||
|
__MODULE__
|
||||||
|
|> Repo.get_by(user_id: user_id, recipient: recipient)
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_or_create(user_id, recipient) do
|
||||||
|
%__MODULE__{}
|
||||||
|
|> changeset(%{user_id: user_id, recipient: recipient})
|
||||||
|
|> Repo.insert(
|
||||||
|
# Need to set something, otherwise we get nothing back at all
|
||||||
|
on_conflict: [set: [recipient: recipient]],
|
||||||
|
returning: true,
|
||||||
|
conflict_target: [:user_id, :recipient]
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def bump_or_create(user_id, recipient) do
|
||||||
|
%__MODULE__{}
|
||||||
|
|> changeset(%{user_id: user_id, recipient: recipient})
|
||||||
|
|> Repo.insert(
|
||||||
|
on_conflict: [set: [updated_at: NaiveDateTime.utc_now()]],
|
||||||
|
returning: true,
|
||||||
|
conflict_target: [:user_id, :recipient]
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,117 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Chat.MessageReference do
|
||||||
|
@moduledoc """
|
||||||
|
A reference that builds a relation between an AP chat message that a user can see and whether it has been seen
|
||||||
|
by them, or should be displayed to them. Used to build the chat view that is presented to the user.
|
||||||
|
"""
|
||||||
|
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
alias Pleroma.Chat
|
||||||
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.Repo
|
||||||
|
|
||||||
|
import Ecto.Changeset
|
||||||
|
import Ecto.Query
|
||||||
|
|
||||||
|
@primary_key {:id, FlakeId.Ecto.Type, autogenerate: true}
|
||||||
|
|
||||||
|
schema "chat_message_references" do
|
||||||
|
belongs_to(:object, Object)
|
||||||
|
belongs_to(:chat, Chat, type: FlakeId.Ecto.CompatType)
|
||||||
|
|
||||||
|
field(:unread, :boolean, default: true)
|
||||||
|
|
||||||
|
timestamps()
|
||||||
|
end
|
||||||
|
|
||||||
|
def changeset(struct, params) do
|
||||||
|
struct
|
||||||
|
|> cast(params, [:object_id, :chat_id, :unread])
|
||||||
|
|> validate_required([:object_id, :chat_id, :unread])
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_by_id(id) do
|
||||||
|
__MODULE__
|
||||||
|
|> Repo.get(id)
|
||||||
|
|> Repo.preload(:object)
|
||||||
|
end
|
||||||
|
|
||||||
|
def delete(cm_ref) do
|
||||||
|
cm_ref
|
||||||
|
|> Repo.delete()
|
||||||
|
end
|
||||||
|
|
||||||
|
def delete_for_object(%{id: object_id}) do
|
||||||
|
from(cr in __MODULE__,
|
||||||
|
where: cr.object_id == ^object_id
|
||||||
|
)
|
||||||
|
|> Repo.delete_all()
|
||||||
|
end
|
||||||
|
|
||||||
|
def for_chat_and_object(%{id: chat_id}, %{id: object_id}) do
|
||||||
|
__MODULE__
|
||||||
|
|> Repo.get_by(chat_id: chat_id, object_id: object_id)
|
||||||
|
|> Repo.preload(:object)
|
||||||
|
end
|
||||||
|
|
||||||
|
def for_chat_query(chat) do
|
||||||
|
from(cr in __MODULE__,
|
||||||
|
where: cr.chat_id == ^chat.id,
|
||||||
|
order_by: [desc: :id],
|
||||||
|
preload: [:object]
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def last_message_for_chat(chat) do
|
||||||
|
chat
|
||||||
|
|> for_chat_query()
|
||||||
|
|> limit(1)
|
||||||
|
|> Repo.one()
|
||||||
|
end
|
||||||
|
|
||||||
|
def create(chat, object, unread) do
|
||||||
|
params = %{
|
||||||
|
chat_id: chat.id,
|
||||||
|
object_id: object.id,
|
||||||
|
unread: unread
|
||||||
|
}
|
||||||
|
|
||||||
|
%__MODULE__{}
|
||||||
|
|> changeset(params)
|
||||||
|
|> Repo.insert()
|
||||||
|
end
|
||||||
|
|
||||||
|
def unread_count_for_chat(chat) do
|
||||||
|
chat
|
||||||
|
|> for_chat_query()
|
||||||
|
|> where([cmr], cmr.unread == true)
|
||||||
|
|> Repo.aggregate(:count)
|
||||||
|
end
|
||||||
|
|
||||||
|
def mark_as_read(cm_ref) do
|
||||||
|
cm_ref
|
||||||
|
|> changeset(%{unread: false})
|
||||||
|
|> Repo.update()
|
||||||
|
end
|
||||||
|
|
||||||
|
def set_all_seen_for_chat(chat, last_read_id \\ nil) do
|
||||||
|
query =
|
||||||
|
chat
|
||||||
|
|> for_chat_query()
|
||||||
|
|> exclude(:order_by)
|
||||||
|
|> exclude(:preload)
|
||||||
|
|> where([cmr], cmr.unread == true)
|
||||||
|
|
||||||
|
if last_read_id do
|
||||||
|
query
|
||||||
|
|> where([cmr], cmr.id <= ^last_read_id)
|
||||||
|
else
|
||||||
|
query
|
||||||
|
end
|
||||||
|
|> Repo.update_all(set: [unread: false])
|
||||||
|
end
|
||||||
|
end
|
|
@ -6,7 +6,7 @@ defmodule Pleroma.ConfigDB do
|
||||||
use Ecto.Schema
|
use Ecto.Schema
|
||||||
|
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
import Ecto.Query
|
import Ecto.Query, only: [select: 3]
|
||||||
import Pleroma.Web.Gettext
|
import Pleroma.Web.Gettext
|
||||||
|
|
||||||
alias __MODULE__
|
alias __MODULE__
|
||||||
|
@ -14,16 +14,6 @@ defmodule Pleroma.ConfigDB do
|
||||||
|
|
||||||
@type t :: %__MODULE__{}
|
@type t :: %__MODULE__{}
|
||||||
|
|
||||||
@full_key_update [
|
|
||||||
{:pleroma, :ecto_repos},
|
|
||||||
{:quack, :meta},
|
|
||||||
{:mime, :types},
|
|
||||||
{:cors_plug, [:max_age, :methods, :expose, :headers]},
|
|
||||||
{:auto_linker, :opts},
|
|
||||||
{:swarm, :node_blacklist},
|
|
||||||
{:logger, :backends}
|
|
||||||
]
|
|
||||||
|
|
||||||
@full_subkey_update [
|
@full_subkey_update [
|
||||||
{:pleroma, :assets, :mascots},
|
{:pleroma, :assets, :mascots},
|
||||||
{:pleroma, :emoji, :groups},
|
{:pleroma, :emoji, :groups},
|
||||||
|
@ -32,14 +22,10 @@ defmodule Pleroma.ConfigDB do
|
||||||
{:pleroma, :mrf_keyword, :replace}
|
{:pleroma, :mrf_keyword, :replace}
|
||||||
]
|
]
|
||||||
|
|
||||||
@regex ~r/^~r(?'delimiter'[\/|"'([{<]{1})(?'pattern'.+)[\/|"')\]}>]{1}(?'modifier'[uismxfU]*)/u
|
|
||||||
|
|
||||||
@delimiters ["/", "|", "\"", "'", {"(", ")"}, {"[", "]"}, {"{", "}"}, {"<", ">"}]
|
|
||||||
|
|
||||||
schema "config" do
|
schema "config" do
|
||||||
field(:key, :string)
|
field(:key, Pleroma.EctoType.Config.Atom)
|
||||||
field(:group, :string)
|
field(:group, Pleroma.EctoType.Config.Atom)
|
||||||
field(:value, :binary)
|
field(:value, Pleroma.EctoType.Config.BinaryValue)
|
||||||
field(:db, {:array, :string}, virtual: true, default: [])
|
field(:db, {:array, :string}, virtual: true, default: [])
|
||||||
|
|
||||||
timestamps()
|
timestamps()
|
||||||
|
@ -51,10 +37,6 @@ def get_all_as_keyword do
|
||||||
|> select([c], {c.group, c.key, c.value})
|
|> select([c], {c.group, c.key, c.value})
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
|> Enum.reduce([], fn {group, key, value}, acc ->
|
|> Enum.reduce([], fn {group, key, value}, acc ->
|
||||||
group = ConfigDB.from_string(group)
|
|
||||||
key = ConfigDB.from_string(key)
|
|
||||||
value = from_binary(value)
|
|
||||||
|
|
||||||
Keyword.update(acc, group, [{key, value}], &Keyword.merge(&1, [{key, value}]))
|
Keyword.update(acc, group, [{key, value}], &Keyword.merge(&1, [{key, value}]))
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
@ -64,50 +46,41 @@ def get_by_params(params), do: Repo.get_by(ConfigDB, params)
|
||||||
|
|
||||||
@spec changeset(ConfigDB.t(), map()) :: Changeset.t()
|
@spec changeset(ConfigDB.t(), map()) :: Changeset.t()
|
||||||
def changeset(config, params \\ %{}) do
|
def changeset(config, params \\ %{}) do
|
||||||
params = Map.put(params, :value, transform(params[:value]))
|
|
||||||
|
|
||||||
config
|
config
|
||||||
|> cast(params, [:key, :group, :value])
|
|> cast(params, [:key, :group, :value])
|
||||||
|> validate_required([:key, :group, :value])
|
|> validate_required([:key, :group, :value])
|
||||||
|> unique_constraint(:key, name: :config_group_key_index)
|
|> unique_constraint(:key, name: :config_group_key_index)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
|
defp create(params) do
|
||||||
def create(params) do
|
|
||||||
%ConfigDB{}
|
%ConfigDB{}
|
||||||
|> changeset(params)
|
|> changeset(params)
|
||||||
|> Repo.insert()
|
|> Repo.insert()
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec update(ConfigDB.t(), map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
|
defp update(%ConfigDB{} = config, %{value: value}) do
|
||||||
def update(%ConfigDB{} = config, %{value: value}) do
|
|
||||||
config
|
config
|
||||||
|> changeset(%{value: value})
|
|> changeset(%{value: value})
|
||||||
|> Repo.update()
|
|> Repo.update()
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec get_db_keys(ConfigDB.t()) :: [String.t()]
|
|
||||||
def get_db_keys(%ConfigDB{} = config) do
|
|
||||||
config.value
|
|
||||||
|> ConfigDB.from_binary()
|
|
||||||
|> get_db_keys(config.key)
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec get_db_keys(keyword(), any()) :: [String.t()]
|
@spec get_db_keys(keyword(), any()) :: [String.t()]
|
||||||
def get_db_keys(value, key) do
|
def get_db_keys(value, key) do
|
||||||
|
keys =
|
||||||
if Keyword.keyword?(value) do
|
if Keyword.keyword?(value) do
|
||||||
value |> Keyword.keys() |> Enum.map(&convert(&1))
|
Keyword.keys(value)
|
||||||
else
|
else
|
||||||
[convert(key)]
|
[key]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
Enum.map(keys, &to_json_types(&1))
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec merge_group(atom(), atom(), keyword(), keyword()) :: keyword()
|
@spec merge_group(atom(), atom(), keyword(), keyword()) :: keyword()
|
||||||
def merge_group(group, key, old_value, new_value) do
|
def merge_group(group, key, old_value, new_value) do
|
||||||
new_keys = to_map_set(new_value)
|
new_keys = to_mapset(new_value)
|
||||||
|
|
||||||
intersect_keys =
|
intersect_keys = old_value |> to_mapset() |> MapSet.intersection(new_keys) |> MapSet.to_list()
|
||||||
old_value |> to_map_set() |> MapSet.intersection(new_keys) |> MapSet.to_list()
|
|
||||||
|
|
||||||
merged_value = ConfigDB.merge(old_value, new_value)
|
merged_value = ConfigDB.merge(old_value, new_value)
|
||||||
|
|
||||||
|
@ -120,12 +93,10 @@ def merge_group(group, key, old_value, new_value) do
|
||||||
[]
|
[]
|
||||||
end)
|
end)
|
||||||
|> List.flatten()
|
|> List.flatten()
|
||||||
|> Enum.reduce(merged_value, fn subkey, acc ->
|
|> Enum.reduce(merged_value, &Keyword.put(&2, &1, new_value[&1]))
|
||||||
Keyword.put(acc, subkey, new_value[subkey])
|
|
||||||
end)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp to_map_set(keyword) do
|
defp to_mapset(keyword) do
|
||||||
keyword
|
keyword
|
||||||
|> Keyword.keys()
|
|> Keyword.keys()
|
||||||
|> MapSet.new()
|
|> MapSet.new()
|
||||||
|
@ -159,57 +130,55 @@ defp deep_merge(_key, value1, value2) do
|
||||||
|
|
||||||
@spec update_or_create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
|
@spec update_or_create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
|
||||||
def update_or_create(params) do
|
def update_or_create(params) do
|
||||||
|
params = Map.put(params, :value, to_elixir_types(params[:value]))
|
||||||
search_opts = Map.take(params, [:group, :key])
|
search_opts = Map.take(params, [:group, :key])
|
||||||
|
|
||||||
with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts),
|
with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts),
|
||||||
{:partial_update, true, config} <-
|
{_, true, config} <- {:partial_update, can_be_partially_updated?(config), config},
|
||||||
{:partial_update, can_be_partially_updated?(config), config},
|
{_, true, config} <-
|
||||||
old_value <- from_binary(config.value),
|
{:can_be_merged, is_list(params[:value]) and is_list(config.value), config} do
|
||||||
transformed_value <- do_transform(params[:value]),
|
new_value = merge_group(config.group, config.key, config.value, params[:value])
|
||||||
{:can_be_merged, true, config} <- {:can_be_merged, is_list(transformed_value), config},
|
update(config, %{value: new_value})
|
||||||
new_value <-
|
|
||||||
merge_group(
|
|
||||||
ConfigDB.from_string(config.group),
|
|
||||||
ConfigDB.from_string(config.key),
|
|
||||||
old_value,
|
|
||||||
transformed_value
|
|
||||||
) do
|
|
||||||
ConfigDB.update(config, %{value: new_value})
|
|
||||||
else
|
else
|
||||||
{reason, false, config} when reason in [:partial_update, :can_be_merged] ->
|
{reason, false, config} when reason in [:partial_update, :can_be_merged] ->
|
||||||
ConfigDB.update(config, params)
|
update(config, params)
|
||||||
|
|
||||||
nil ->
|
nil ->
|
||||||
ConfigDB.create(params)
|
create(params)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp can_be_partially_updated?(%ConfigDB{} = config), do: not only_full_update?(config)
|
defp can_be_partially_updated?(%ConfigDB{} = config), do: not only_full_update?(config)
|
||||||
|
|
||||||
defp only_full_update?(%ConfigDB{} = config) do
|
defp only_full_update?(%ConfigDB{group: group, key: key}) do
|
||||||
config_group = ConfigDB.from_string(config.group)
|
full_key_update = [
|
||||||
config_key = ConfigDB.from_string(config.key)
|
{:pleroma, :ecto_repos},
|
||||||
|
{:quack, :meta},
|
||||||
|
{:mime, :types},
|
||||||
|
{:cors_plug, [:max_age, :methods, :expose, :headers]},
|
||||||
|
{:auto_linker, :opts},
|
||||||
|
{:swarm, :node_blacklist},
|
||||||
|
{:logger, :backends}
|
||||||
|
]
|
||||||
|
|
||||||
Enum.any?(@full_key_update, fn
|
Enum.any?(full_key_update, fn
|
||||||
{group, key} when is_list(key) ->
|
{s_group, s_key} ->
|
||||||
config_group == group and config_key in key
|
group == s_group and ((is_list(s_key) and key in s_key) or key == s_key)
|
||||||
|
|
||||||
{group, key} ->
|
|
||||||
config_group == group and config_key == key
|
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec delete(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
|
@spec delete(ConfigDB.t() | map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
|
||||||
|
def delete(%ConfigDB{} = config), do: Repo.delete(config)
|
||||||
|
|
||||||
def delete(params) do
|
def delete(params) do
|
||||||
search_opts = Map.delete(params, :subkeys)
|
search_opts = Map.delete(params, :subkeys)
|
||||||
|
|
||||||
with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts),
|
with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts),
|
||||||
{config, sub_keys} when is_list(sub_keys) <- {config, params[:subkeys]},
|
{config, sub_keys} when is_list(sub_keys) <- {config, params[:subkeys]},
|
||||||
old_value <- from_binary(config.value),
|
keys <- Enum.map(sub_keys, &string_to_elixir_types(&1)),
|
||||||
keys <- Enum.map(sub_keys, &do_transform_string(&1)),
|
{_, config, new_value} when new_value != [] <-
|
||||||
{:partial_remove, config, new_value} when new_value != [] <-
|
{:partial_remove, config, Keyword.drop(config.value, keys)} do
|
||||||
{:partial_remove, config, Keyword.drop(old_value, keys)} do
|
update(config, %{value: new_value})
|
||||||
ConfigDB.update(config, %{value: new_value})
|
|
||||||
else
|
else
|
||||||
{:partial_remove, config, []} ->
|
{:partial_remove, config, []} ->
|
||||||
Repo.delete(config)
|
Repo.delete(config)
|
||||||
|
@ -225,37 +194,32 @@ def delete(params) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec from_binary(binary()) :: term()
|
@spec to_json_types(term()) :: map() | list() | boolean() | String.t()
|
||||||
def from_binary(binary), do: :erlang.binary_to_term(binary)
|
def to_json_types(entity) when is_list(entity) do
|
||||||
|
Enum.map(entity, &to_json_types/1)
|
||||||
@spec from_binary_with_convert(binary()) :: any()
|
|
||||||
def from_binary_with_convert(binary) do
|
|
||||||
binary
|
|
||||||
|> from_binary()
|
|
||||||
|> do_convert()
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec from_string(String.t()) :: atom() | no_return()
|
def to_json_types(%Regex{} = entity), do: inspect(entity)
|
||||||
def from_string(string), do: do_transform_string(string)
|
|
||||||
|
|
||||||
@spec convert(any()) :: any()
|
def to_json_types(entity) when is_map(entity) do
|
||||||
def convert(entity), do: do_convert(entity)
|
Map.new(entity, fn {k, v} -> {to_json_types(k), to_json_types(v)} end)
|
||||||
|
|
||||||
defp do_convert(entity) when is_list(entity) do
|
|
||||||
for v <- entity, into: [], do: do_convert(v)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp do_convert(%Regex{} = entity), do: inspect(entity)
|
def to_json_types({:args, args}) when is_list(args) do
|
||||||
|
arguments =
|
||||||
|
Enum.map(args, fn
|
||||||
|
arg when is_tuple(arg) -> inspect(arg)
|
||||||
|
arg -> to_json_types(arg)
|
||||||
|
end)
|
||||||
|
|
||||||
defp do_convert(entity) when is_map(entity) do
|
%{"tuple" => [":args", arguments]}
|
||||||
for {k, v} <- entity, into: %{}, do: {do_convert(k), do_convert(v)}
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp do_convert({:proxy_url, {type, :localhost, port}}) do
|
def to_json_types({:proxy_url, {type, :localhost, port}}) do
|
||||||
%{"tuple" => [":proxy_url", %{"tuple" => [do_convert(type), "localhost", port]}]}
|
%{"tuple" => [":proxy_url", %{"tuple" => [to_json_types(type), "localhost", port]}]}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp do_convert({:proxy_url, {type, host, port}}) when is_tuple(host) do
|
def to_json_types({:proxy_url, {type, host, port}}) when is_tuple(host) do
|
||||||
ip =
|
ip =
|
||||||
host
|
host
|
||||||
|> :inet_parse.ntoa()
|
|> :inet_parse.ntoa()
|
||||||
|
@ -264,66 +228,64 @@ defp do_convert({:proxy_url, {type, host, port}}) when is_tuple(host) do
|
||||||
%{
|
%{
|
||||||
"tuple" => [
|
"tuple" => [
|
||||||
":proxy_url",
|
":proxy_url",
|
||||||
%{"tuple" => [do_convert(type), ip, port]}
|
%{"tuple" => [to_json_types(type), ip, port]}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp do_convert({:proxy_url, {type, host, port}}) do
|
def to_json_types({:proxy_url, {type, host, port}}) do
|
||||||
%{
|
%{
|
||||||
"tuple" => [
|
"tuple" => [
|
||||||
":proxy_url",
|
":proxy_url",
|
||||||
%{"tuple" => [do_convert(type), to_string(host), port]}
|
%{"tuple" => [to_json_types(type), to_string(host), port]}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp do_convert({:partial_chain, entity}), do: %{"tuple" => [":partial_chain", inspect(entity)]}
|
def to_json_types({:partial_chain, entity}),
|
||||||
|
do: %{"tuple" => [":partial_chain", inspect(entity)]}
|
||||||
|
|
||||||
defp do_convert(entity) when is_tuple(entity) do
|
def to_json_types(entity) when is_tuple(entity) do
|
||||||
value =
|
value =
|
||||||
entity
|
entity
|
||||||
|> Tuple.to_list()
|
|> Tuple.to_list()
|
||||||
|> do_convert()
|
|> to_json_types()
|
||||||
|
|
||||||
%{"tuple" => value}
|
%{"tuple" => value}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp do_convert(entity) when is_boolean(entity) or is_number(entity) or is_nil(entity) do
|
def to_json_types(entity) when is_binary(entity), do: entity
|
||||||
|
|
||||||
|
def to_json_types(entity) when is_boolean(entity) or is_number(entity) or is_nil(entity) do
|
||||||
entity
|
entity
|
||||||
end
|
end
|
||||||
|
|
||||||
defp do_convert(entity)
|
def to_json_types(entity) when entity in [:"tlsv1.1", :"tlsv1.2", :"tlsv1.3"] do
|
||||||
when is_atom(entity) and entity in [:"tlsv1.1", :"tlsv1.2", :"tlsv1.3"] do
|
|
||||||
":#{entity}"
|
":#{entity}"
|
||||||
end
|
end
|
||||||
|
|
||||||
defp do_convert(entity) when is_atom(entity), do: inspect(entity)
|
def to_json_types(entity) when is_atom(entity), do: inspect(entity)
|
||||||
|
|
||||||
defp do_convert(entity) when is_binary(entity), do: entity
|
@spec to_elixir_types(boolean() | String.t() | map() | list()) :: term()
|
||||||
|
def to_elixir_types(%{"tuple" => [":args", args]}) when is_list(args) do
|
||||||
|
arguments =
|
||||||
|
Enum.map(args, fn arg ->
|
||||||
|
if String.contains?(arg, ["{", "}"]) do
|
||||||
|
{elem, []} = Code.eval_string(arg)
|
||||||
|
elem
|
||||||
|
else
|
||||||
|
to_elixir_types(arg)
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|
||||||
@spec transform(any()) :: binary() | no_return()
|
{:args, arguments}
|
||||||
def transform(entity) when is_binary(entity) or is_map(entity) or is_list(entity) do
|
|
||||||
entity
|
|
||||||
|> do_transform()
|
|
||||||
|> to_binary()
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def transform(entity), do: to_binary(entity)
|
def to_elixir_types(%{"tuple" => [":proxy_url", %{"tuple" => [type, host, port]}]}) do
|
||||||
|
{:proxy_url, {string_to_elixir_types(type), parse_host(host), port}}
|
||||||
@spec transform_with_out_binary(any()) :: any()
|
|
||||||
def transform_with_out_binary(entity), do: do_transform(entity)
|
|
||||||
|
|
||||||
@spec to_binary(any()) :: binary()
|
|
||||||
def to_binary(entity), do: :erlang.term_to_binary(entity)
|
|
||||||
|
|
||||||
defp do_transform(%Regex{} = entity), do: entity
|
|
||||||
|
|
||||||
defp do_transform(%{"tuple" => [":proxy_url", %{"tuple" => [type, host, port]}]}) do
|
|
||||||
{:proxy_url, {do_transform_string(type), parse_host(host), port}}
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp do_transform(%{"tuple" => [":partial_chain", entity]}) do
|
def to_elixir_types(%{"tuple" => [":partial_chain", entity]}) do
|
||||||
{partial_chain, []} =
|
{partial_chain, []} =
|
||||||
entity
|
entity
|
||||||
|> String.replace(~r/[^\w|^{:,[|^,|^[|^\]^}|^\/|^\.|^"]^\s/, "")
|
|> String.replace(~r/[^\w|^{:,[|^,|^[|^\]^}|^\/|^\.|^"]^\s/, "")
|
||||||
|
@ -332,25 +294,51 @@ defp do_transform(%{"tuple" => [":partial_chain", entity]}) do
|
||||||
{:partial_chain, partial_chain}
|
{:partial_chain, partial_chain}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp do_transform(%{"tuple" => entity}) do
|
def to_elixir_types(%{"tuple" => entity}) do
|
||||||
Enum.reduce(entity, {}, fn val, acc -> Tuple.append(acc, do_transform(val)) end)
|
Enum.reduce(entity, {}, &Tuple.append(&2, to_elixir_types(&1)))
|
||||||
end
|
end
|
||||||
|
|
||||||
defp do_transform(entity) when is_map(entity) do
|
def to_elixir_types(entity) when is_map(entity) do
|
||||||
for {k, v} <- entity, into: %{}, do: {do_transform(k), do_transform(v)}
|
Map.new(entity, fn {k, v} -> {to_elixir_types(k), to_elixir_types(v)} end)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp do_transform(entity) when is_list(entity) do
|
def to_elixir_types(entity) when is_list(entity) do
|
||||||
for v <- entity, into: [], do: do_transform(v)
|
Enum.map(entity, &to_elixir_types/1)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp do_transform(entity) when is_binary(entity) do
|
def to_elixir_types(entity) when is_binary(entity) do
|
||||||
entity
|
entity
|
||||||
|> String.trim()
|
|> String.trim()
|
||||||
|> do_transform_string()
|
|> string_to_elixir_types()
|
||||||
end
|
end
|
||||||
|
|
||||||
defp do_transform(entity), do: entity
|
def to_elixir_types(entity), do: entity
|
||||||
|
|
||||||
|
@spec string_to_elixir_types(String.t()) ::
|
||||||
|
atom() | Regex.t() | module() | String.t() | no_return()
|
||||||
|
def string_to_elixir_types("~r" <> _pattern = regex) do
|
||||||
|
pattern =
|
||||||
|
~r/^~r(?'delimiter'[\/|"'([{<]{1})(?'pattern'.+)[\/|"')\]}>]{1}(?'modifier'[uismxfU]*)/u
|
||||||
|
|
||||||
|
delimiters = ["/", "|", "\"", "'", {"(", ")"}, {"[", "]"}, {"{", "}"}, {"<", ">"}]
|
||||||
|
|
||||||
|
with %{"modifier" => modifier, "pattern" => pattern, "delimiter" => regex_delimiter} <-
|
||||||
|
Regex.named_captures(pattern, regex),
|
||||||
|
{:ok, {leading, closing}} <- find_valid_delimiter(delimiters, pattern, regex_delimiter),
|
||||||
|
{result, _} <- Code.eval_string("~r#{leading}#{pattern}#{closing}#{modifier}") do
|
||||||
|
result
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def string_to_elixir_types(":" <> atom), do: String.to_atom(atom)
|
||||||
|
|
||||||
|
def string_to_elixir_types(value) do
|
||||||
|
if module_name?(value) do
|
||||||
|
String.to_existing_atom("Elixir." <> value)
|
||||||
|
else
|
||||||
|
value
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
defp parse_host("localhost"), do: :localhost
|
defp parse_host("localhost"), do: :localhost
|
||||||
|
|
||||||
|
@ -387,27 +375,8 @@ defp find_valid_delimiter([delimiter | others], pattern, regex_delimiter) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp do_transform_string("~r" <> _pattern = regex) do
|
@spec module_name?(String.t()) :: boolean()
|
||||||
with %{"modifier" => modifier, "pattern" => pattern, "delimiter" => regex_delimiter} <-
|
def module_name?(string) do
|
||||||
Regex.named_captures(@regex, regex),
|
|
||||||
{:ok, {leading, closing}} <- find_valid_delimiter(@delimiters, pattern, regex_delimiter),
|
|
||||||
{result, _} <- Code.eval_string("~r#{leading}#{pattern}#{closing}#{modifier}") do
|
|
||||||
result
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_transform_string(":" <> atom), do: String.to_atom(atom)
|
|
||||||
|
|
||||||
defp do_transform_string(value) do
|
|
||||||
if is_module_name?(value) do
|
|
||||||
String.to_existing_atom("Elixir." <> value)
|
|
||||||
else
|
|
||||||
value
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec is_module_name?(String.t()) :: boolean()
|
|
||||||
def is_module_name?(string) do
|
|
||||||
Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Quack|Ueberauth|Swoosh)\./, string) or
|
Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Quack|Ueberauth|Swoosh)\./, string) or
|
||||||
string in ["Oban", "Ueberauth", "ExSyslogger"]
|
string in ["Oban", "Ueberauth", "ExSyslogger"]
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,10 +3,25 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Config.DeprecationWarnings do
|
defmodule Pleroma.Config.DeprecationWarnings do
|
||||||
|
alias Pleroma.Config
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
alias Pleroma.Config
|
||||||
|
|
||||||
|
@type config_namespace() :: [atom()]
|
||||||
|
@type config_map() :: {config_namespace(), config_namespace(), String.t()}
|
||||||
|
|
||||||
|
@mrf_config_map [
|
||||||
|
{[:instance, :rewrite_policy], [:mrf, :policies],
|
||||||
|
"\n* `config :pleroma, :instance, rewrite_policy` is now `config :pleroma, :mrf, policies`"},
|
||||||
|
{[:instance, :mrf_transparency], [:mrf, :transparency],
|
||||||
|
"\n* `config :pleroma, :instance, mrf_transparency` is now `config :pleroma, :mrf, transparency`"},
|
||||||
|
{[:instance, :mrf_transparency_exclusions], [:mrf, :transparency_exclusions],
|
||||||
|
"\n* `config :pleroma, :instance, mrf_transparency_exclusions` is now `config :pleroma, :mrf, transparency_exclusions`"}
|
||||||
|
]
|
||||||
|
|
||||||
def check_hellthread_threshold do
|
def check_hellthread_threshold do
|
||||||
if Pleroma.Config.get([:mrf_hellthread, :threshold]) do
|
if Config.get([:mrf_hellthread, :threshold]) do
|
||||||
Logger.warn("""
|
Logger.warn("""
|
||||||
!!!DEPRECATION WARNING!!!
|
!!!DEPRECATION WARNING!!!
|
||||||
You are using the old configuration mechanism for the hellthread filter. Please check config.md.
|
You are using the old configuration mechanism for the hellthread filter. Please check config.md.
|
||||||
|
@ -14,7 +29,59 @@ def check_hellthread_threshold do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def mrf_user_allowlist do
|
||||||
|
config = Config.get(:mrf_user_allowlist)
|
||||||
|
|
||||||
|
if config && Enum.any?(config, fn {k, _} -> is_atom(k) end) do
|
||||||
|
rewritten =
|
||||||
|
Enum.reduce(Config.get(:mrf_user_allowlist), Map.new(), fn {k, v}, acc ->
|
||||||
|
Map.put(acc, to_string(k), v)
|
||||||
|
end)
|
||||||
|
|
||||||
|
Config.put(:mrf_user_allowlist, rewritten)
|
||||||
|
|
||||||
|
Logger.error("""
|
||||||
|
!!!DEPRECATION WARNING!!!
|
||||||
|
As of Pleroma 2.0.7, the `mrf_user_allowlist` setting changed of format.
|
||||||
|
Pleroma 2.1 will remove support for the old format. Please change your configuration to match this:
|
||||||
|
|
||||||
|
config :pleroma, :mrf_user_allowlist, #{inspect(rewritten, pretty: true)}
|
||||||
|
""")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def warn do
|
def warn do
|
||||||
check_hellthread_threshold()
|
check_hellthread_threshold()
|
||||||
|
mrf_user_allowlist()
|
||||||
|
check_old_mrf_config()
|
||||||
|
end
|
||||||
|
|
||||||
|
def check_old_mrf_config do
|
||||||
|
warning_preface = """
|
||||||
|
!!!DEPRECATION WARNING!!!
|
||||||
|
Your config is using old namespaces for MRF configuration. They should work for now, but you are advised to change to new namespaces to prevent possible issues later:
|
||||||
|
"""
|
||||||
|
|
||||||
|
move_namespace_and_warn(@mrf_config_map, warning_preface)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec move_namespace_and_warn([config_map()], String.t()) :: :ok
|
||||||
|
def move_namespace_and_warn(config_map, warning_preface) do
|
||||||
|
warning =
|
||||||
|
Enum.reduce(config_map, "", fn
|
||||||
|
{old, new, err_msg}, acc ->
|
||||||
|
old_config = Config.get(old)
|
||||||
|
|
||||||
|
if old_config do
|
||||||
|
Config.put(new, old_config)
|
||||||
|
acc <> err_msg
|
||||||
|
else
|
||||||
|
acc
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|
||||||
|
if warning != "" do
|
||||||
|
Logger.warn(warning_preface <> warning)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -28,10 +28,6 @@ defmodule Pleroma.Config.TransferTask do
|
||||||
{:pleroma, Pleroma.Captcha, [:seconds_valid]},
|
{:pleroma, Pleroma.Captcha, [:seconds_valid]},
|
||||||
{:pleroma, Pleroma.Upload, [:proxy_remote]},
|
{:pleroma, Pleroma.Upload, [:proxy_remote]},
|
||||||
{:pleroma, :instance, [:upload_limit]},
|
{:pleroma, :instance, [:upload_limit]},
|
||||||
{:pleroma, :email_notifications, [:digest]},
|
|
||||||
{:pleroma, :oauth2, [:clean_expired_tokens]},
|
|
||||||
{:pleroma, Pleroma.ActivityExpiration, [:enabled]},
|
|
||||||
{:pleroma, Pleroma.ScheduledActivity, [:enabled]},
|
|
||||||
{:pleroma, :gopher, [:enabled]}
|
{:pleroma, :gopher, [:enabled]}
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -48,7 +44,7 @@ def load_and_update_env(deleted_settings \\ [], restart_pleroma? \\ true) do
|
||||||
|
|
||||||
{logger, other} =
|
{logger, other} =
|
||||||
(Repo.all(ConfigDB) ++ deleted_settings)
|
(Repo.all(ConfigDB) ++ deleted_settings)
|
||||||
|> Enum.map(&transform_and_merge/1)
|
|> Enum.map(&merge_with_default/1)
|
||||||
|> Enum.split_with(fn {group, _, _, _} -> group in [:logger, :quack] end)
|
|> Enum.split_with(fn {group, _, _, _} -> group in [:logger, :quack] end)
|
||||||
|
|
||||||
logger
|
logger
|
||||||
|
@ -92,11 +88,7 @@ defp maybe_set_pleroma_last(apps) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp transform_and_merge(%{group: group, key: key, value: value} = setting) do
|
defp merge_with_default(%{group: group, key: key, value: value} = setting) do
|
||||||
group = ConfigDB.from_string(group)
|
|
||||||
key = ConfigDB.from_string(key)
|
|
||||||
value = ConfigDB.from_binary(value)
|
|
||||||
|
|
||||||
default = Config.Holder.default_config(group, key)
|
default = Config.Holder.default_config(group, key)
|
||||||
|
|
||||||
merged =
|
merged =
|
||||||
|
|
|
@ -24,6 +24,6 @@ defmodule Pleroma.Constants do
|
||||||
|
|
||||||
const(static_only_files,
|
const(static_only_files,
|
||||||
do:
|
do:
|
||||||
~w(index.html robots.txt static static-fe finmoji emoji packs sounds images instance sw.js sw-pleroma.js favicon.png schemas doc)
|
~w(index.html robots.txt static static-fe finmoji emoji packs sounds images instance sw.js sw-pleroma.js favicon.png schemas doc embed.js embed.css)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
|
@ -63,7 +63,7 @@ def create_or_bump_for(activity, opts \\ []) do
|
||||||
ap_id when is_binary(ap_id) and byte_size(ap_id) > 0 <- object.data["context"] do
|
ap_id when is_binary(ap_id) and byte_size(ap_id) > 0 <- object.data["context"] do
|
||||||
{:ok, conversation} = create_for_ap_id(ap_id)
|
{:ok, conversation} = create_for_ap_id(ap_id)
|
||||||
|
|
||||||
users = User.get_users_from_set(activity.recipients, false)
|
users = User.get_users_from_set(activity.recipients, local_only: false)
|
||||||
|
|
||||||
participations =
|
participations =
|
||||||
Enum.map(users, fn user ->
|
Enum.map(users, fn user ->
|
||||||
|
|
|
@ -162,10 +162,13 @@ def for_user_with_last_activity_id(user, params \\ %{}) do
|
||||||
for_user(user, params)
|
for_user(user, params)
|
||||||
|> Enum.map(fn participation ->
|
|> Enum.map(fn participation ->
|
||||||
activity_id =
|
activity_id =
|
||||||
ActivityPub.fetch_latest_activity_id_for_context(participation.conversation.ap_id, %{
|
ActivityPub.fetch_latest_direct_activity_id_for_context(
|
||||||
"user" => user,
|
participation.conversation.ap_id,
|
||||||
"blocking_user" => user
|
%{
|
||||||
})
|
user: user,
|
||||||
|
blocking_user: user
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
%{
|
%{
|
||||||
participation
|
participation
|
||||||
|
|
|
@ -10,32 +10,70 @@ defmodule Pleroma.CounterCache do
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
|
||||||
schema "counter_cache" do
|
schema "counter_cache" do
|
||||||
field(:name, :string)
|
field(:instance, :string)
|
||||||
field(:count, :integer)
|
field(:public, :integer)
|
||||||
|
field(:unlisted, :integer)
|
||||||
|
field(:private, :integer)
|
||||||
|
field(:direct, :integer)
|
||||||
end
|
end
|
||||||
|
|
||||||
def changeset(struct, params) do
|
def changeset(struct, params) do
|
||||||
struct
|
struct
|
||||||
|> cast(params, [:name, :count])
|
|> cast(params, [:instance, :public, :unlisted, :private, :direct])
|
||||||
|> validate_required([:name])
|
|> validate_required([:instance])
|
||||||
|> unique_constraint(:name)
|
|> unique_constraint(:instance)
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_as_map(names) when is_list(names) do
|
def get_by_instance(instance) do
|
||||||
CounterCache
|
CounterCache
|
||||||
|> where([cc], cc.name in ^names)
|
|> select([c], %{
|
||||||
|> Repo.all()
|
"public" => c.public,
|
||||||
|> Enum.group_by(& &1.name, & &1.count)
|
"unlisted" => c.unlisted,
|
||||||
|> Map.new(fn {k, v} -> {k, hd(v)} end)
|
"private" => c.private,
|
||||||
|
"direct" => c.direct
|
||||||
|
})
|
||||||
|
|> where([c], c.instance == ^instance)
|
||||||
|
|> Repo.one()
|
||||||
|
|> case do
|
||||||
|
nil -> %{"public" => 0, "unlisted" => 0, "private" => 0, "direct" => 0}
|
||||||
|
val -> val
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def set(name, count) do
|
def get_sum do
|
||||||
|
CounterCache
|
||||||
|
|> select([c], %{
|
||||||
|
"public" => type(sum(c.public), :integer),
|
||||||
|
"unlisted" => type(sum(c.unlisted), :integer),
|
||||||
|
"private" => type(sum(c.private), :integer),
|
||||||
|
"direct" => type(sum(c.direct), :integer)
|
||||||
|
})
|
||||||
|
|> Repo.one()
|
||||||
|
end
|
||||||
|
|
||||||
|
def set(instance, values) do
|
||||||
|
params =
|
||||||
|
Enum.reduce(
|
||||||
|
["public", "private", "unlisted", "direct"],
|
||||||
|
%{"instance" => instance},
|
||||||
|
fn param, acc ->
|
||||||
|
Map.put_new(acc, param, Map.get(values, param, 0))
|
||||||
|
end
|
||||||
|
)
|
||||||
|
|
||||||
%CounterCache{}
|
%CounterCache{}
|
||||||
|> changeset(%{"name" => name, "count" => count})
|
|> changeset(params)
|
||||||
|> Repo.insert(
|
|> Repo.insert(
|
||||||
on_conflict: [set: [count: count]],
|
on_conflict: [
|
||||||
|
set: [
|
||||||
|
public: params["public"],
|
||||||
|
private: params["private"],
|
||||||
|
unlisted: params["unlisted"],
|
||||||
|
direct: params["direct"]
|
||||||
|
]
|
||||||
|
],
|
||||||
returning: true,
|
returning: true,
|
||||||
conflict_target: :name
|
conflict_target: :instance
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,4 +1,8 @@
|
||||||
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.DateTime do
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.DateTime do
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
The AP standard defines the date fields in AP as xsd:DateTime. Elixir's
|
The AP standard defines the date fields in AP as xsd:DateTime. Elixir's
|
||||||
DateTime can't parse this, but it can parse the related iso8601. This
|
DateTime can't parse this, but it can parse the related iso8601. This
|
|
@ -1,4 +1,8 @@
|
||||||
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.ObjectID do
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.ObjectID do
|
||||||
use Ecto.Type
|
use Ecto.Type
|
||||||
|
|
||||||
def type, do: :string
|
def type, do: :string
|
|
@ -0,0 +1,40 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.Recipients do
|
||||||
|
use Ecto.Type
|
||||||
|
|
||||||
|
alias Pleroma.EctoType.ActivityPub.ObjectValidators.ObjectID
|
||||||
|
|
||||||
|
def type, do: {:array, ObjectID}
|
||||||
|
|
||||||
|
def cast(object) when is_binary(object) do
|
||||||
|
cast([object])
|
||||||
|
end
|
||||||
|
|
||||||
|
def cast(data) when is_list(data) do
|
||||||
|
data
|
||||||
|
|> Enum.reduce_while({:ok, []}, fn element, {:ok, list} ->
|
||||||
|
case ObjectID.cast(element) do
|
||||||
|
{:ok, id} ->
|
||||||
|
{:cont, {:ok, [id | list]}}
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
{:halt, :error}
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
def cast(_) do
|
||||||
|
:error
|
||||||
|
end
|
||||||
|
|
||||||
|
def dump(data) do
|
||||||
|
{:ok, data}
|
||||||
|
end
|
||||||
|
|
||||||
|
def load(data) do
|
||||||
|
{:ok, data}
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,25 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.SafeText do
|
||||||
|
use Ecto.Type
|
||||||
|
|
||||||
|
alias Pleroma.HTML
|
||||||
|
|
||||||
|
def type, do: :string
|
||||||
|
|
||||||
|
def cast(str) when is_binary(str) do
|
||||||
|
{:ok, HTML.filter_tags(str)}
|
||||||
|
end
|
||||||
|
|
||||||
|
def cast(_), do: :error
|
||||||
|
|
||||||
|
def dump(data) do
|
||||||
|
{:ok, data}
|
||||||
|
end
|
||||||
|
|
||||||
|
def load(data) do
|
||||||
|
{:ok, data}
|
||||||
|
end
|
||||||
|
end
|
|
@ -1,4 +1,8 @@
|
||||||
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.Uri do
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.Uri do
|
||||||
use Ecto.Type
|
use Ecto.Type
|
||||||
|
|
||||||
def type, do: :string
|
def type, do: :string
|
|
@ -0,0 +1,26 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.EctoType.Config.Atom do
|
||||||
|
use Ecto.Type
|
||||||
|
|
||||||
|
def type, do: :atom
|
||||||
|
|
||||||
|
def cast(key) when is_atom(key) do
|
||||||
|
{:ok, key}
|
||||||
|
end
|
||||||
|
|
||||||
|
def cast(key) when is_binary(key) do
|
||||||
|
{:ok, Pleroma.ConfigDB.string_to_elixir_types(key)}
|
||||||
|
end
|
||||||
|
|
||||||
|
def cast(_), do: :error
|
||||||
|
|
||||||
|
def load(key) do
|
||||||
|
{:ok, Pleroma.ConfigDB.string_to_elixir_types(key)}
|
||||||
|
end
|
||||||
|
|
||||||
|
def dump(key) when is_atom(key), do: {:ok, inspect(key)}
|
||||||
|
def dump(_), do: :error
|
||||||
|
end
|
|
@ -0,0 +1,27 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.EctoType.Config.BinaryValue do
|
||||||
|
use Ecto.Type
|
||||||
|
|
||||||
|
def type, do: :term
|
||||||
|
|
||||||
|
def cast(value) when is_binary(value) do
|
||||||
|
if String.valid?(value) do
|
||||||
|
{:ok, value}
|
||||||
|
else
|
||||||
|
{:ok, :erlang.binary_to_term(value)}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def cast(value), do: {:ok, value}
|
||||||
|
|
||||||
|
def load(value) when is_binary(value) do
|
||||||
|
{:ok, :erlang.binary_to_term(value)}
|
||||||
|
end
|
||||||
|
|
||||||
|
def dump(value) do
|
||||||
|
{:ok, :erlang.term_to_binary(value)}
|
||||||
|
end
|
||||||
|
end
|
|
@ -14,8 +14,10 @@ def new_users(to, users_and_statuses) do
|
||||||
styling = Pleroma.Config.get([Pleroma.Emails.UserEmail, :styling])
|
styling = Pleroma.Config.get([Pleroma.Emails.UserEmail, :styling])
|
||||||
|
|
||||||
logo_url =
|
logo_url =
|
||||||
Pleroma.Web.Endpoint.url() <>
|
Pleroma.Helpers.UriHelper.maybe_add_base(
|
||||||
Pleroma.Config.get([:frontend_configurations, :pleroma_fe, :logo])
|
Pleroma.Config.get([:frontend_configurations, :pleroma_fe, :logo]),
|
||||||
|
Pleroma.Web.Endpoint.url()
|
||||||
|
)
|
||||||
|
|
||||||
new()
|
new()
|
||||||
|> to({to.name, to.email})
|
|> to({to.name, to.email})
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
defmodule Pleroma.Emoji.Pack do
|
defmodule Pleroma.Emoji.Pack do
|
||||||
@derive {Jason.Encoder, only: [:files, :pack]}
|
@derive {Jason.Encoder, only: [:files, :pack, :files_count]}
|
||||||
defstruct files: %{},
|
defstruct files: %{},
|
||||||
|
files_count: 0,
|
||||||
pack_file: nil,
|
pack_file: nil,
|
||||||
path: nil,
|
path: nil,
|
||||||
pack: %{},
|
pack: %{},
|
||||||
|
@ -8,6 +9,7 @@ defmodule Pleroma.Emoji.Pack do
|
||||||
|
|
||||||
@type t() :: %__MODULE__{
|
@type t() :: %__MODULE__{
|
||||||
files: %{String.t() => Path.t()},
|
files: %{String.t() => Path.t()},
|
||||||
|
files_count: non_neg_integer(),
|
||||||
pack_file: Path.t(),
|
pack_file: Path.t(),
|
||||||
path: Path.t(),
|
path: Path.t(),
|
||||||
pack: map(),
|
pack: map(),
|
||||||
|
@ -16,162 +18,96 @@ defmodule Pleroma.Emoji.Pack do
|
||||||
|
|
||||||
alias Pleroma.Emoji
|
alias Pleroma.Emoji
|
||||||
|
|
||||||
@spec emoji_path() :: Path.t()
|
@spec create(String.t()) :: {:ok, t()} | {:error, File.posix()} | {:error, :empty_values}
|
||||||
def emoji_path do
|
def create(name) do
|
||||||
static = Pleroma.Config.get!([:instance, :static_dir])
|
with :ok <- validate_not_empty([name]),
|
||||||
Path.join(static, "emoji")
|
dir <- Path.join(emoji_path(), name),
|
||||||
end
|
:ok <- File.mkdir(dir) do
|
||||||
|
%__MODULE__{pack_file: Path.join(dir, "pack.json")}
|
||||||
@spec create(String.t()) :: :ok | {:error, File.posix()} | {:error, :empty_values}
|
|
||||||
def create(name) when byte_size(name) > 0 do
|
|
||||||
dir = Path.join(emoji_path(), name)
|
|
||||||
|
|
||||||
with :ok <- File.mkdir(dir) do
|
|
||||||
%__MODULE__{
|
|
||||||
pack_file: Path.join(dir, "pack.json")
|
|
||||||
}
|
|
||||||
|> save_pack()
|
|> save_pack()
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def create(_), do: {:error, :empty_values}
|
defp paginate(entities, 1, page_size), do: Enum.take(entities, page_size)
|
||||||
|
|
||||||
@spec show(String.t()) :: {:ok, t()} | {:loaded, nil} | {:error, :empty_values}
|
defp paginate(entities, page, page_size) do
|
||||||
def show(name) when byte_size(name) > 0 do
|
entities
|
||||||
with {_, %__MODULE__{} = pack} <- {:loaded, load_pack(name)},
|
|> Enum.chunk_every(page_size)
|
||||||
{_, pack} <- validate_pack(pack) do
|
|> Enum.at(page - 1)
|
||||||
{:ok, pack}
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def show(_), do: {:error, :empty_values}
|
@spec show(keyword()) :: {:ok, t()} | {:error, atom()}
|
||||||
|
def show(opts) do
|
||||||
|
name = opts[:name]
|
||||||
|
|
||||||
|
with :ok <- validate_not_empty([name]),
|
||||||
|
{:ok, pack} <- load_pack(name) do
|
||||||
|
shortcodes =
|
||||||
|
pack.files
|
||||||
|
|> Map.keys()
|
||||||
|
|> Enum.sort()
|
||||||
|
|> paginate(opts[:page], opts[:page_size])
|
||||||
|
|
||||||
|
pack = Map.put(pack, :files, Map.take(pack.files, shortcodes))
|
||||||
|
|
||||||
|
{:ok, validate_pack(pack)}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
@spec delete(String.t()) ::
|
@spec delete(String.t()) ::
|
||||||
{:ok, [binary()]} | {:error, File.posix(), binary()} | {:error, :empty_values}
|
{:ok, [binary()]} | {:error, File.posix(), binary()} | {:error, :empty_values}
|
||||||
def delete(name) when byte_size(name) > 0 do
|
def delete(name) do
|
||||||
|
with :ok <- validate_not_empty([name]) do
|
||||||
emoji_path()
|
emoji_path()
|
||||||
|> Path.join(name)
|
|> Path.join(name)
|
||||||
|> File.rm_rf()
|
|> File.rm_rf()
|
||||||
end
|
end
|
||||||
|
end
|
||||||
def delete(_), do: {:error, :empty_values}
|
|
||||||
|
|
||||||
@spec add_file(String.t(), String.t(), Path.t(), Plug.Upload.t() | String.t()) ::
|
@spec add_file(String.t(), String.t(), Path.t(), Plug.Upload.t() | String.t()) ::
|
||||||
{:ok, t()} | {:error, File.posix()} | {:error, :empty_values}
|
{:ok, t()} | {:error, File.posix() | atom()}
|
||||||
def add_file(name, shortcode, filename, file)
|
def add_file(name, shortcode, filename, file) do
|
||||||
when byte_size(name) > 0 and byte_size(shortcode) > 0 and byte_size(filename) > 0 do
|
with :ok <- validate_not_empty([name, shortcode, filename]),
|
||||||
with {_, nil} <- {:exists, Emoji.get(shortcode)},
|
:ok <- validate_emoji_not_exists(shortcode),
|
||||||
{_, %__MODULE__{} = pack} <- {:loaded, load_pack(name)} do
|
{:ok, pack} <- load_pack(name),
|
||||||
file_path = Path.join(pack.path, filename)
|
:ok <- save_file(file, pack, filename),
|
||||||
|
{:ok, updated_pack} <- pack |> put_emoji(shortcode, filename) |> save_pack() do
|
||||||
create_subdirs(file_path)
|
|
||||||
|
|
||||||
case file do
|
|
||||||
%Plug.Upload{path: upload_path} ->
|
|
||||||
# Copy the uploaded file from the temporary directory
|
|
||||||
File.copy!(upload_path, file_path)
|
|
||||||
|
|
||||||
url when is_binary(url) ->
|
|
||||||
# Download and write the file
|
|
||||||
file_contents = Tesla.get!(url).body
|
|
||||||
File.write!(file_path, file_contents)
|
|
||||||
end
|
|
||||||
|
|
||||||
files = Map.put(pack.files, shortcode, filename)
|
|
||||||
|
|
||||||
updated_pack = %{pack | files: files}
|
|
||||||
|
|
||||||
case save_pack(updated_pack) do
|
|
||||||
:ok ->
|
|
||||||
Emoji.reload()
|
Emoji.reload()
|
||||||
{:ok, updated_pack}
|
{:ok, updated_pack}
|
||||||
|
|
||||||
e ->
|
|
||||||
e
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def add_file(_, _, _, _), do: {:error, :empty_values}
|
|
||||||
|
|
||||||
defp create_subdirs(file_path) do
|
|
||||||
if String.contains?(file_path, "/") do
|
|
||||||
file_path
|
|
||||||
|> Path.dirname()
|
|
||||||
|> File.mkdir_p!()
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec delete_file(String.t(), String.t()) ::
|
@spec delete_file(String.t(), String.t()) ::
|
||||||
{:ok, t()} | {:error, File.posix()} | {:error, :empty_values}
|
{:ok, t()} | {:error, File.posix() | atom()}
|
||||||
def delete_file(name, shortcode) when byte_size(name) > 0 and byte_size(shortcode) > 0 do
|
def delete_file(name, shortcode) do
|
||||||
with {_, %__MODULE__{} = pack} <- {:loaded, load_pack(name)},
|
with :ok <- validate_not_empty([name, shortcode]),
|
||||||
{_, {filename, files}} when not is_nil(filename) <-
|
{:ok, pack} <- load_pack(name),
|
||||||
{:exists, Map.pop(pack.files, shortcode)},
|
:ok <- remove_file(pack, shortcode),
|
||||||
emoji <- Path.join(pack.path, filename),
|
{:ok, updated_pack} <- pack |> delete_emoji(shortcode) |> save_pack() do
|
||||||
{_, true} <- {:exists, File.exists?(emoji)} do
|
|
||||||
emoji_dir = Path.dirname(emoji)
|
|
||||||
|
|
||||||
File.rm!(emoji)
|
|
||||||
|
|
||||||
if String.contains?(filename, "/") and File.ls!(emoji_dir) == [] do
|
|
||||||
File.rmdir!(emoji_dir)
|
|
||||||
end
|
|
||||||
|
|
||||||
updated_pack = %{pack | files: files}
|
|
||||||
|
|
||||||
case save_pack(updated_pack) do
|
|
||||||
:ok ->
|
|
||||||
Emoji.reload()
|
Emoji.reload()
|
||||||
{:ok, updated_pack}
|
{:ok, updated_pack}
|
||||||
|
|
||||||
e ->
|
|
||||||
e
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
|
||||||
|
|
||||||
def delete_file(_, _), do: {:error, :empty_values}
|
|
||||||
|
|
||||||
@spec update_file(String.t(), String.t(), String.t(), String.t(), boolean()) ::
|
@spec update_file(String.t(), String.t(), String.t(), String.t(), boolean()) ::
|
||||||
{:ok, t()} | {:error, File.posix()} | {:error, :empty_values}
|
{:ok, t()} | {:error, File.posix() | atom()}
|
||||||
def update_file(name, shortcode, new_shortcode, new_filename, force)
|
def update_file(name, shortcode, new_shortcode, new_filename, force) do
|
||||||
when byte_size(name) > 0 and byte_size(shortcode) > 0 and byte_size(new_shortcode) > 0 and
|
with :ok <- validate_not_empty([name, shortcode, new_shortcode, new_filename]),
|
||||||
byte_size(new_filename) > 0 do
|
{:ok, pack} <- load_pack(name),
|
||||||
with {_, %__MODULE__{} = pack} <- {:loaded, load_pack(name)},
|
{:ok, filename} <- get_filename(pack, shortcode),
|
||||||
{_, {filename, files}} when not is_nil(filename) <-
|
:ok <- validate_emoji_not_exists(new_shortcode, force),
|
||||||
{:exists, Map.pop(pack.files, shortcode)},
|
:ok <- rename_file(pack, filename, new_filename),
|
||||||
{_, true} <- {:not_used, force or is_nil(Emoji.get(new_shortcode))} do
|
{:ok, updated_pack} <-
|
||||||
old_path = Path.join(pack.path, filename)
|
pack
|
||||||
old_dir = Path.dirname(old_path)
|
|> delete_emoji(shortcode)
|
||||||
new_path = Path.join(pack.path, new_filename)
|
|> put_emoji(new_shortcode, new_filename)
|
||||||
|
|> save_pack() do
|
||||||
create_subdirs(new_path)
|
|
||||||
|
|
||||||
:ok = File.rename(old_path, new_path)
|
|
||||||
|
|
||||||
if String.contains?(filename, "/") and File.ls!(old_dir) == [] do
|
|
||||||
File.rmdir!(old_dir)
|
|
||||||
end
|
|
||||||
|
|
||||||
files = Map.put(files, new_shortcode, new_filename)
|
|
||||||
|
|
||||||
updated_pack = %{pack | files: files}
|
|
||||||
|
|
||||||
case save_pack(updated_pack) do
|
|
||||||
:ok ->
|
|
||||||
Emoji.reload()
|
Emoji.reload()
|
||||||
{:ok, updated_pack}
|
{:ok, updated_pack}
|
||||||
|
|
||||||
e ->
|
|
||||||
e
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def update_file(_, _, _, _, _), do: {:error, :empty_values}
|
@spec import_from_filesystem() :: {:ok, [String.t()]} | {:error, File.posix() | atom()}
|
||||||
|
|
||||||
@spec import_from_filesystem() :: {:ok, [String.t()]} | {:error, atom()}
|
|
||||||
def import_from_filesystem do
|
def import_from_filesystem do
|
||||||
emoji_path = emoji_path()
|
emoji_path = emoji_path()
|
||||||
|
|
||||||
|
@ -184,7 +120,7 @@ def import_from_filesystem do
|
||||||
File.dir?(path) and File.exists?(Path.join(path, "pack.json"))
|
File.dir?(path) and File.exists?(Path.join(path, "pack.json"))
|
||||||
end)
|
end)
|
||||||
|> Enum.map(&write_pack_contents/1)
|
|> Enum.map(&write_pack_contents/1)
|
||||||
|> Enum.filter(& &1)
|
|> Enum.reject(&is_nil/1)
|
||||||
|
|
||||||
{:ok, names}
|
{:ok, names}
|
||||||
else
|
else
|
||||||
|
@ -193,6 +129,126 @@ def import_from_filesystem do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@spec list_remote(String.t()) :: {:ok, map()} | {:error, atom()}
|
||||||
|
def list_remote(url) do
|
||||||
|
uri = url |> String.trim() |> URI.parse()
|
||||||
|
|
||||||
|
with :ok <- validate_shareable_packs_available(uri) do
|
||||||
|
uri
|
||||||
|
|> URI.merge("/api/pleroma/emoji/packs")
|
||||||
|
|> http_get()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec list_local(keyword()) :: {:ok, map(), non_neg_integer()}
|
||||||
|
def list_local(opts) do
|
||||||
|
with {:ok, results} <- list_packs_dir() do
|
||||||
|
all_packs =
|
||||||
|
results
|
||||||
|
|> Enum.map(fn name ->
|
||||||
|
case load_pack(name) do
|
||||||
|
{:ok, pack} -> pack
|
||||||
|
_ -> nil
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|> Enum.reject(&is_nil/1)
|
||||||
|
|
||||||
|
packs =
|
||||||
|
all_packs
|
||||||
|
|> paginate(opts[:page], opts[:page_size])
|
||||||
|
|> Map.new(fn pack -> {pack.name, validate_pack(pack)} end)
|
||||||
|
|
||||||
|
{:ok, packs, length(all_packs)}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec get_archive(String.t()) :: {:ok, binary()} | {:error, atom()}
|
||||||
|
def get_archive(name) do
|
||||||
|
with {:ok, pack} <- load_pack(name),
|
||||||
|
:ok <- validate_downloadable(pack) do
|
||||||
|
{:ok, fetch_archive(pack)}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec download(String.t(), String.t(), String.t()) :: {:ok, t()} | {:error, atom()}
|
||||||
|
def download(name, url, as) do
|
||||||
|
uri = url |> String.trim() |> URI.parse()
|
||||||
|
|
||||||
|
with :ok <- validate_shareable_packs_available(uri),
|
||||||
|
{:ok, remote_pack} <- uri |> URI.merge("/api/pleroma/emoji/packs/#{name}") |> http_get(),
|
||||||
|
{:ok, %{sha: sha, url: url} = pack_info} <- fetch_pack_info(remote_pack, uri, name),
|
||||||
|
{:ok, archive} <- download_archive(url, sha),
|
||||||
|
pack <- copy_as(remote_pack, as || name),
|
||||||
|
{:ok, _} = unzip(archive, pack_info, remote_pack, pack) do
|
||||||
|
# Fallback can't contain a pack.json file, since that would cause the fallback-src-sha256
|
||||||
|
# in it to depend on itself
|
||||||
|
if pack_info[:fallback] do
|
||||||
|
save_pack(pack)
|
||||||
|
else
|
||||||
|
{:ok, pack}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec save_metadata(map(), t()) :: {:ok, t()} | {:error, File.posix()}
|
||||||
|
def save_metadata(metadata, %__MODULE__{} = pack) do
|
||||||
|
pack
|
||||||
|
|> Map.put(:pack, metadata)
|
||||||
|
|> save_pack()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec update_metadata(String.t(), map()) :: {:ok, t()} | {:error, File.posix()}
|
||||||
|
def update_metadata(name, data) do
|
||||||
|
with {:ok, pack} <- load_pack(name) do
|
||||||
|
if fallback_sha_changed?(pack, data) do
|
||||||
|
update_sha_and_save_metadata(pack, data)
|
||||||
|
else
|
||||||
|
save_metadata(data, pack)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec load_pack(String.t()) :: {:ok, t()} | {:error, :not_found}
|
||||||
|
def load_pack(name) do
|
||||||
|
pack_file = Path.join([emoji_path(), name, "pack.json"])
|
||||||
|
|
||||||
|
if File.exists?(pack_file) do
|
||||||
|
pack =
|
||||||
|
pack_file
|
||||||
|
|> File.read!()
|
||||||
|
|> from_json()
|
||||||
|
|> Map.put(:pack_file, pack_file)
|
||||||
|
|> Map.put(:path, Path.dirname(pack_file))
|
||||||
|
|> Map.put(:name, name)
|
||||||
|
|
||||||
|
files_count =
|
||||||
|
pack.files
|
||||||
|
|> Map.keys()
|
||||||
|
|> length()
|
||||||
|
|
||||||
|
{:ok, Map.put(pack, :files_count, files_count)}
|
||||||
|
else
|
||||||
|
{:error, :not_found}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec emoji_path() :: Path.t()
|
||||||
|
defp emoji_path do
|
||||||
|
[:instance, :static_dir]
|
||||||
|
|> Pleroma.Config.get!()
|
||||||
|
|> Path.join("emoji")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp validate_emoji_not_exists(shortcode, force \\ false)
|
||||||
|
defp validate_emoji_not_exists(_shortcode, true), do: :ok
|
||||||
|
|
||||||
|
defp validate_emoji_not_exists(shortcode, _) do
|
||||||
|
case Emoji.get(shortcode) do
|
||||||
|
nil -> :ok
|
||||||
|
_ -> {:error, :already_exists}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
defp write_pack_contents(path) do
|
defp write_pack_contents(path) do
|
||||||
pack = %__MODULE__{
|
pack = %__MODULE__{
|
||||||
files: files_from_path(path),
|
files: files_from_path(path),
|
||||||
|
@ -201,7 +257,7 @@ defp write_pack_contents(path) do
|
||||||
}
|
}
|
||||||
|
|
||||||
case save_pack(pack) do
|
case save_pack(pack) do
|
||||||
:ok -> Path.basename(path)
|
{:ok, _pack} -> Path.basename(path)
|
||||||
_ -> nil
|
_ -> nil
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -216,7 +272,8 @@ defp files_from_path(path) do
|
||||||
# FIXME: Copy-pasted from Pleroma.Emoji/load_from_file_stream/2
|
# FIXME: Copy-pasted from Pleroma.Emoji/load_from_file_stream/2
|
||||||
|
|
||||||
# Create a map of shortcodes to filenames from emoji.txt
|
# Create a map of shortcodes to filenames from emoji.txt
|
||||||
File.read!(txt_path)
|
txt_path
|
||||||
|
|> File.read!()
|
||||||
|> String.split("\n")
|
|> String.split("\n")
|
||||||
|> Enum.map(&String.trim/1)
|
|> Enum.map(&String.trim/1)
|
||||||
|> Enum.map(fn line ->
|
|> Enum.map(fn line ->
|
||||||
|
@ -226,21 +283,18 @@ defp files_from_path(path) do
|
||||||
[name, file | _] ->
|
[name, file | _] ->
|
||||||
file_dir_name = Path.dirname(file)
|
file_dir_name = Path.dirname(file)
|
||||||
|
|
||||||
file =
|
|
||||||
if String.ends_with?(path, file_dir_name) do
|
if String.ends_with?(path, file_dir_name) do
|
||||||
Path.basename(file)
|
{name, Path.basename(file)}
|
||||||
else
|
else
|
||||||
file
|
|
||||||
end
|
|
||||||
|
|
||||||
{name, file}
|
{name, file}
|
||||||
|
end
|
||||||
|
|
||||||
_ ->
|
_ ->
|
||||||
nil
|
nil
|
||||||
end
|
end
|
||||||
end)
|
end)
|
||||||
|> Enum.filter(& &1)
|
|> Enum.reject(&is_nil/1)
|
||||||
|> Enum.into(%{})
|
|> Map.new()
|
||||||
else
|
else
|
||||||
# If there's no emoji.txt, assume all files
|
# If there's no emoji.txt, assume all files
|
||||||
# that are of certain extensions from the config are emojis and import them all
|
# that are of certain extensions from the config are emojis and import them all
|
||||||
|
@ -249,60 +303,20 @@ defp files_from_path(path) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec list_remote(String.t()) :: {:ok, map()}
|
|
||||||
def list_remote(url) do
|
|
||||||
uri =
|
|
||||||
url
|
|
||||||
|> String.trim()
|
|
||||||
|> URI.parse()
|
|
||||||
|
|
||||||
with {_, true} <- {:shareable, shareable_packs_available?(uri)} do
|
|
||||||
packs =
|
|
||||||
uri
|
|
||||||
|> URI.merge("/api/pleroma/emoji/packs")
|
|
||||||
|> to_string()
|
|
||||||
|> Tesla.get!()
|
|
||||||
|> Map.get(:body)
|
|
||||||
|> Jason.decode!()
|
|
||||||
|
|
||||||
{:ok, packs}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec list_local() :: {:ok, map()}
|
|
||||||
def list_local do
|
|
||||||
emoji_path = emoji_path()
|
|
||||||
|
|
||||||
# Create the directory first if it does not exist. This is probably the first request made
|
|
||||||
# with the API so it should be sufficient
|
|
||||||
with {:create_dir, :ok} <- {:create_dir, File.mkdir_p(emoji_path)},
|
|
||||||
{:ls, {:ok, results}} <- {:ls, File.ls(emoji_path)} do
|
|
||||||
packs =
|
|
||||||
results
|
|
||||||
|> Enum.map(&load_pack/1)
|
|
||||||
|> Enum.filter(& &1)
|
|
||||||
|> Enum.map(&validate_pack/1)
|
|
||||||
|> Map.new()
|
|
||||||
|
|
||||||
{:ok, packs}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp validate_pack(pack) do
|
defp validate_pack(pack) do
|
||||||
|
info =
|
||||||
if downloadable?(pack) do
|
if downloadable?(pack) do
|
||||||
archive = fetch_archive(pack)
|
archive = fetch_archive(pack)
|
||||||
archive_sha = :crypto.hash(:sha256, archive) |> Base.encode16()
|
archive_sha = :crypto.hash(:sha256, archive) |> Base.encode16()
|
||||||
|
|
||||||
info =
|
|
||||||
pack.pack
|
pack.pack
|
||||||
|> Map.put("can-download", true)
|
|> Map.put("can-download", true)
|
||||||
|> Map.put("download-sha256", archive_sha)
|
|> Map.put("download-sha256", archive_sha)
|
||||||
|
|
||||||
{pack.name, Map.put(pack, :pack, info)}
|
|
||||||
else
|
else
|
||||||
info = Map.put(pack.pack, "can-download", false)
|
Map.put(pack.pack, "can-download", false)
|
||||||
{pack.name, Map.put(pack, :pack, info)}
|
|
||||||
end
|
end
|
||||||
|
|
||||||
|
Map.put(pack, :pack, info)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp downloadable?(pack) do
|
defp downloadable?(pack) do
|
||||||
|
@ -311,30 +325,12 @@ defp downloadable?(pack) do
|
||||||
# Otherwise, they'd have to download it from external-src
|
# Otherwise, they'd have to download it from external-src
|
||||||
pack.pack["share-files"] &&
|
pack.pack["share-files"] &&
|
||||||
Enum.all?(pack.files, fn {_, file} ->
|
Enum.all?(pack.files, fn {_, file} ->
|
||||||
File.exists?(Path.join(pack.path, file))
|
pack.path
|
||||||
|
|> Path.join(file)
|
||||||
|
|> File.exists?()
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec get_archive(String.t()) :: {:ok, binary()}
|
|
||||||
def get_archive(name) do
|
|
||||||
with {_, %__MODULE__{} = pack} <- {:exists?, load_pack(name)},
|
|
||||||
{_, true} <- {:can_download?, downloadable?(pack)} do
|
|
||||||
{:ok, fetch_archive(pack)}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp fetch_archive(pack) do
|
|
||||||
hash = :crypto.hash(:md5, File.read!(pack.pack_file))
|
|
||||||
|
|
||||||
case Cachex.get!(:emoji_packs_cache, pack.name) do
|
|
||||||
%{hash: ^hash, pack_data: archive} ->
|
|
||||||
archive
|
|
||||||
|
|
||||||
_ ->
|
|
||||||
create_archive_and_cache(pack, hash)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp create_archive_and_cache(pack, hash) do
|
defp create_archive_and_cache(pack, hash) do
|
||||||
files = ['pack.json' | Enum.map(pack.files, fn {_, file} -> to_charlist(file) end)]
|
files = ['pack.json' | Enum.map(pack.files, fn {_, file} -> to_charlist(file) end)]
|
||||||
|
|
||||||
|
@ -356,23 +352,158 @@ defp create_archive_and_cache(pack, hash) do
|
||||||
result
|
result
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec download(String.t(), String.t(), String.t()) :: :ok
|
defp save_pack(pack) do
|
||||||
def download(name, url, as) do
|
with {:ok, json} <- Jason.encode(pack, pretty: true),
|
||||||
uri =
|
:ok <- File.write(pack.pack_file, json) do
|
||||||
url
|
{:ok, pack}
|
||||||
|> String.trim()
|
end
|
||||||
|> URI.parse()
|
end
|
||||||
|
|
||||||
with {_, true} <- {:shareable, shareable_packs_available?(uri)} do
|
defp from_json(json) do
|
||||||
remote_pack =
|
map = Jason.decode!(json)
|
||||||
uri
|
|
||||||
|> URI.merge("/api/pleroma/emoji/packs/#{name}")
|
|
||||||
|> to_string()
|
|
||||||
|> Tesla.get!()
|
|
||||||
|> Map.get(:body)
|
|
||||||
|> Jason.decode!()
|
|
||||||
|
|
||||||
result =
|
struct(__MODULE__, %{files: map["files"], pack: map["pack"]})
|
||||||
|
end
|
||||||
|
|
||||||
|
defp validate_shareable_packs_available(uri) do
|
||||||
|
with {:ok, %{"links" => links}} <- uri |> URI.merge("/.well-known/nodeinfo") |> http_get(),
|
||||||
|
# Get the actual nodeinfo address and fetch it
|
||||||
|
{:ok, %{"metadata" => %{"features" => features}}} <-
|
||||||
|
links |> List.last() |> Map.get("href") |> http_get() do
|
||||||
|
if Enum.member?(features, "shareable_emoji_packs") do
|
||||||
|
:ok
|
||||||
|
else
|
||||||
|
{:error, :not_shareable}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp validate_not_empty(list) do
|
||||||
|
if Enum.all?(list, fn i -> is_binary(i) and i != "" end) do
|
||||||
|
:ok
|
||||||
|
else
|
||||||
|
{:error, :empty_values}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp save_file(file, pack, filename) do
|
||||||
|
file_path = Path.join(pack.path, filename)
|
||||||
|
create_subdirs(file_path)
|
||||||
|
|
||||||
|
case file do
|
||||||
|
%Plug.Upload{path: upload_path} ->
|
||||||
|
# Copy the uploaded file from the temporary directory
|
||||||
|
with {:ok, _} <- File.copy(upload_path, file_path), do: :ok
|
||||||
|
|
||||||
|
url when is_binary(url) ->
|
||||||
|
# Download and write the file
|
||||||
|
file_contents = Tesla.get!(url).body
|
||||||
|
File.write(file_path, file_contents)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp put_emoji(pack, shortcode, filename) do
|
||||||
|
files = Map.put(pack.files, shortcode, filename)
|
||||||
|
%{pack | files: files}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp delete_emoji(pack, shortcode) do
|
||||||
|
files = Map.delete(pack.files, shortcode)
|
||||||
|
%{pack | files: files}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp rename_file(pack, filename, new_filename) do
|
||||||
|
old_path = Path.join(pack.path, filename)
|
||||||
|
new_path = Path.join(pack.path, new_filename)
|
||||||
|
create_subdirs(new_path)
|
||||||
|
|
||||||
|
with :ok <- File.rename(old_path, new_path) do
|
||||||
|
remove_dir_if_empty(old_path, filename)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp create_subdirs(file_path) do
|
||||||
|
if String.contains?(file_path, "/") do
|
||||||
|
file_path
|
||||||
|
|> Path.dirname()
|
||||||
|
|> File.mkdir_p!()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp remove_file(pack, shortcode) do
|
||||||
|
with {:ok, filename} <- get_filename(pack, shortcode),
|
||||||
|
emoji <- Path.join(pack.path, filename),
|
||||||
|
:ok <- File.rm(emoji) do
|
||||||
|
remove_dir_if_empty(emoji, filename)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp remove_dir_if_empty(emoji, filename) do
|
||||||
|
dir = Path.dirname(emoji)
|
||||||
|
|
||||||
|
if String.contains?(filename, "/") and File.ls!(dir) == [] do
|
||||||
|
File.rmdir!(dir)
|
||||||
|
else
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp get_filename(pack, shortcode) do
|
||||||
|
with %{^shortcode => filename} when is_binary(filename) <- pack.files,
|
||||||
|
true <- pack.path |> Path.join(filename) |> File.exists?() do
|
||||||
|
{:ok, filename}
|
||||||
|
else
|
||||||
|
_ -> {:error, :doesnt_exist}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp http_get(%URI{} = url), do: url |> to_string() |> http_get()
|
||||||
|
|
||||||
|
defp http_get(url) do
|
||||||
|
with {:ok, %{body: body}} <- url |> Pleroma.HTTP.get() do
|
||||||
|
Jason.decode(body)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp list_packs_dir do
|
||||||
|
emoji_path = emoji_path()
|
||||||
|
# Create the directory first if it does not exist. This is probably the first request made
|
||||||
|
# with the API so it should be sufficient
|
||||||
|
with {:create_dir, :ok} <- {:create_dir, File.mkdir_p(emoji_path)},
|
||||||
|
{:ls, {:ok, results}} <- {:ls, File.ls(emoji_path)} do
|
||||||
|
{:ok, Enum.sort(results)}
|
||||||
|
else
|
||||||
|
{:create_dir, {:error, e}} -> {:error, :create_dir, e}
|
||||||
|
{:ls, {:error, e}} -> {:error, :ls, e}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp validate_downloadable(pack) do
|
||||||
|
if downloadable?(pack), do: :ok, else: {:error, :cant_download}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp copy_as(remote_pack, local_name) do
|
||||||
|
path = Path.join(emoji_path(), local_name)
|
||||||
|
|
||||||
|
%__MODULE__{
|
||||||
|
name: local_name,
|
||||||
|
path: path,
|
||||||
|
files: remote_pack["files"],
|
||||||
|
pack_file: Path.join(path, "pack.json")
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp unzip(archive, pack_info, remote_pack, local_pack) do
|
||||||
|
with :ok <- File.mkdir_p!(local_pack.path) do
|
||||||
|
files = Enum.map(remote_pack["files"], fn {_, path} -> to_charlist(path) end)
|
||||||
|
# Fallback cannot contain a pack.json file
|
||||||
|
files = if pack_info[:fallback], do: files, else: ['pack.json' | files]
|
||||||
|
|
||||||
|
:zip.unzip(archive, cwd: to_charlist(local_pack.path), file_list: files)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fetch_pack_info(remote_pack, uri, name) do
|
||||||
case remote_pack["pack"] do
|
case remote_pack["pack"] do
|
||||||
%{"share-files" => true, "can-download" => true, "download-sha256" => sha} ->
|
%{"share-files" => true, "can-download" => true, "download-sha256" => sha} ->
|
||||||
{:ok,
|
{:ok,
|
||||||
|
@ -390,118 +521,52 @@ def download(name, url, as) do
|
||||||
}}
|
}}
|
||||||
|
|
||||||
_ ->
|
_ ->
|
||||||
{:error,
|
{:error, "The pack was not set as shared and there is no fallback src to download from"}
|
||||||
"The pack was not set as shared and there is no fallback src to download from"}
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
with {:ok, %{sha: sha, url: url} = pinfo} <- result,
|
defp download_archive(url, sha) do
|
||||||
%{body: archive} <- Tesla.get!(url),
|
with {:ok, %{body: archive}} <- Tesla.get(url) do
|
||||||
{_, true} <- {:checksum, Base.decode16!(sha) == :crypto.hash(:sha256, archive)} do
|
if Base.decode16!(sha) == :crypto.hash(:sha256, archive) do
|
||||||
local_name = as || name
|
{:ok, archive}
|
||||||
|
else
|
||||||
path = Path.join(emoji_path(), local_name)
|
{:error, :invalid_checksum}
|
||||||
|
|
||||||
pack = %__MODULE__{
|
|
||||||
name: local_name,
|
|
||||||
path: path,
|
|
||||||
files: remote_pack["files"],
|
|
||||||
pack_file: Path.join(path, "pack.json")
|
|
||||||
}
|
|
||||||
|
|
||||||
File.mkdir_p!(pack.path)
|
|
||||||
|
|
||||||
files = Enum.map(remote_pack["files"], fn {_, path} -> to_charlist(path) end)
|
|
||||||
# Fallback cannot contain a pack.json file
|
|
||||||
files = if pinfo[:fallback], do: files, else: ['pack.json' | files]
|
|
||||||
|
|
||||||
{:ok, _} = :zip.unzip(archive, cwd: to_charlist(pack.path), file_list: files)
|
|
||||||
|
|
||||||
# Fallback can't contain a pack.json file, since that would cause the fallback-src-sha256
|
|
||||||
# in it to depend on itself
|
|
||||||
if pinfo[:fallback] do
|
|
||||||
save_pack(pack)
|
|
||||||
end
|
|
||||||
|
|
||||||
:ok
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp save_pack(pack), do: File.write(pack.pack_file, Jason.encode!(pack, pretty: true))
|
defp fetch_archive(pack) do
|
||||||
|
hash = :crypto.hash(:md5, File.read!(pack.pack_file))
|
||||||
|
|
||||||
@spec save_metadata(map(), t()) :: {:ok, t()} | {:error, File.posix()}
|
case Cachex.get!(:emoji_packs_cache, pack.name) do
|
||||||
def save_metadata(metadata, %__MODULE__{} = pack) do
|
%{hash: ^hash, pack_data: archive} -> archive
|
||||||
pack = Map.put(pack, :pack, metadata)
|
_ -> create_archive_and_cache(pack, hash)
|
||||||
|
|
||||||
with :ok <- save_pack(pack) do
|
|
||||||
{:ok, pack}
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec update_metadata(String.t(), map()) :: {:ok, t()} | {:error, File.posix()}
|
defp fallback_sha_changed?(pack, data) do
|
||||||
def update_metadata(name, data) do
|
is_binary(data[:"fallback-src"]) and data[:"fallback-src"] != pack.pack["fallback-src"]
|
||||||
pack = load_pack(name)
|
end
|
||||||
|
|
||||||
fb_sha_changed? =
|
defp update_sha_and_save_metadata(pack, data) do
|
||||||
not is_nil(data["fallback-src"]) and data["fallback-src"] != pack.pack["fallback-src"]
|
with {:ok, %{body: zip}} <- Tesla.get(data[:"fallback-src"]),
|
||||||
|
:ok <- validate_has_all_files(pack, zip) do
|
||||||
with {_, true} <- {:update?, fb_sha_changed?},
|
fallback_sha = :sha256 |> :crypto.hash(zip) |> Base.encode16()
|
||||||
{:ok, %{body: zip}} <- Tesla.get(data["fallback-src"]),
|
|
||||||
{:ok, f_list} <- :zip.unzip(zip, [:memory]),
|
|
||||||
{_, true} <- {:has_all_files?, has_all_files?(pack.files, f_list)} do
|
|
||||||
fallback_sha = :crypto.hash(:sha256, zip) |> Base.encode16()
|
|
||||||
|
|
||||||
data
|
data
|
||||||
|> Map.put("fallback-src-sha256", fallback_sha)
|
|> Map.put("fallback-src-sha256", fallback_sha)
|
||||||
|> save_metadata(pack)
|
|> save_metadata(pack)
|
||||||
else
|
|
||||||
{:update?, _} -> save_metadata(data, pack)
|
|
||||||
e -> e
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp validate_has_all_files(pack, zip) do
|
||||||
|
with {:ok, f_list} <- :zip.unzip(zip, [:memory]) do
|
||||||
# Check if all files from the pack.json are in the archive
|
# Check if all files from the pack.json are in the archive
|
||||||
defp has_all_files?(files, f_list) do
|
pack.files
|
||||||
Enum.all?(files, fn {_, from_manifest} ->
|
|> Enum.all?(fn {_, from_manifest} ->
|
||||||
List.keyfind(f_list, to_charlist(from_manifest), 0)
|
List.keyfind(f_list, to_charlist(from_manifest), 0)
|
||||||
end)
|
end)
|
||||||
|
|> if(do: :ok, else: {:error, :incomplete})
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec load_pack(String.t()) :: t() | nil
|
|
||||||
def load_pack(name) do
|
|
||||||
pack_file = Path.join([emoji_path(), name, "pack.json"])
|
|
||||||
|
|
||||||
if File.exists?(pack_file) do
|
|
||||||
pack_file
|
|
||||||
|> File.read!()
|
|
||||||
|> from_json()
|
|
||||||
|> Map.put(:pack_file, pack_file)
|
|
||||||
|> Map.put(:path, Path.dirname(pack_file))
|
|
||||||
|> Map.put(:name, name)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp from_json(json) do
|
|
||||||
map = Jason.decode!(json)
|
|
||||||
|
|
||||||
struct(__MODULE__, %{files: map["files"], pack: map["pack"]})
|
|
||||||
end
|
|
||||||
|
|
||||||
defp shareable_packs_available?(uri) do
|
|
||||||
uri
|
|
||||||
|> URI.merge("/.well-known/nodeinfo")
|
|
||||||
|> to_string()
|
|
||||||
|> Tesla.get!()
|
|
||||||
|> Map.get(:body)
|
|
||||||
|> Jason.decode!()
|
|
||||||
|> Map.get("links")
|
|
||||||
|> List.last()
|
|
||||||
|> Map.get("href")
|
|
||||||
# Get the actual nodeinfo address and fetch it
|
|
||||||
|> Tesla.get!()
|
|
||||||
|> Map.get(:body)
|
|
||||||
|> Jason.decode!()
|
|
||||||
|> get_in(["metadata", "features"])
|
|
||||||
|> Enum.member?("shareable_emoji_packs")
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -124,6 +124,7 @@ def get_follow_requests(%User{id: id}) do
|
||||||
|> join(:inner, [r], f in assoc(r, :follower))
|
|> join(:inner, [r], f in assoc(r, :follower))
|
||||||
|> where([r], r.state == ^:follow_pending)
|
|> where([r], r.state == ^:follow_pending)
|
||||||
|> where([r], r.following_id == ^id)
|
|> where([r], r.following_id == ^id)
|
||||||
|
|> where([r, f], f.deactivated != true)
|
||||||
|> select([r, f], f)
|
|> select([r, f], f)
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
end
|
end
|
||||||
|
@ -141,6 +142,12 @@ def following_query(%User{} = user) do
|
||||||
|> where([r], r.state == ^:follow_accept)
|
|> where([r], r.state == ^:follow_accept)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def outgoing_pending_follow_requests_query(%User{} = follower) do
|
||||||
|
__MODULE__
|
||||||
|
|> where([r], r.follower_id == ^follower.id)
|
||||||
|
|> where([r], r.state == ^:follow_pending)
|
||||||
|
end
|
||||||
|
|
||||||
def following(%User{} = user) do
|
def following(%User{} = user) do
|
||||||
following =
|
following =
|
||||||
following_query(user)
|
following_query(user)
|
||||||
|
|
|
@ -17,11 +17,6 @@ def append_uri_params(uri, appended_params) do
|
||||||
|> URI.to_string()
|
|> URI.to_string()
|
||||||
end
|
end
|
||||||
|
|
||||||
def append_param_if_present(%{} = params, param_name, param_value) do
|
def maybe_add_base("/" <> uri, base), do: Path.join([base, uri])
|
||||||
if param_value do
|
def maybe_add_base(uri, _base), do: uri
|
||||||
Map.put(params, param_name, param_value)
|
|
||||||
else
|
|
||||||
params
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -22,22 +22,7 @@ def options(connection_opts \\ [], %URI{} = uri) do
|
||||||
|> Pleroma.HTTP.AdapterHelper.maybe_add_proxy(proxy)
|
|> Pleroma.HTTP.AdapterHelper.maybe_add_proxy(proxy)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp add_scheme_opts(opts, %URI{scheme: "http"}), do: opts
|
defp add_scheme_opts(opts, _), do: opts
|
||||||
|
|
||||||
defp add_scheme_opts(opts, %URI{scheme: "https", host: host}) do
|
|
||||||
ssl_opts = [
|
|
||||||
ssl_options: [
|
|
||||||
# Workaround for remote server certificate chain issues
|
|
||||||
partial_chain: &:hackney_connect.partial_chain/1,
|
|
||||||
|
|
||||||
# We don't support TLS v1.3 yet
|
|
||||||
versions: [:tlsv1, :"tlsv1.1", :"tlsv1.2"],
|
|
||||||
server_name_indication: to_charlist(host)
|
|
||||||
]
|
|
||||||
]
|
|
||||||
|
|
||||||
Keyword.merge(opts, ssl_opts)
|
|
||||||
end
|
|
||||||
|
|
||||||
def after_request(_), do: :ok
|
def after_request(_), do: :ok
|
||||||
end
|
end
|
||||||
|
|
|
@ -0,0 +1,22 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.HTTP.ExAws do
|
||||||
|
@moduledoc false
|
||||||
|
|
||||||
|
@behaviour ExAws.Request.HttpClient
|
||||||
|
|
||||||
|
alias Pleroma.HTTP
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def request(method, url, body \\ "", headers \\ [], http_opts \\ []) do
|
||||||
|
case HTTP.request(method, url, body, headers, http_opts) do
|
||||||
|
{:ok, env} ->
|
||||||
|
{:ok, %{status_code: env.status, headers: env.headers, body: env.body}}
|
||||||
|
|
||||||
|
{:error, reason} ->
|
||||||
|
{:error, %{reason: reason}}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -16,6 +16,7 @@ defmodule Pleroma.HTTP do
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
@type t :: __MODULE__
|
@type t :: __MODULE__
|
||||||
|
@type method() :: :get | :post | :put | :delete | :head
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Performs GET request.
|
Performs GET request.
|
||||||
|
@ -28,6 +29,9 @@ def get(url, headers \\ [], options \\ [])
|
||||||
def get(nil, _, _), do: nil
|
def get(nil, _, _), do: nil
|
||||||
def get(url, headers, options), do: request(:get, url, "", headers, options)
|
def get(url, headers, options), do: request(:get, url, "", headers, options)
|
||||||
|
|
||||||
|
@spec head(Request.url(), Request.headers(), keyword()) :: {:ok, Env.t()} | {:error, any()}
|
||||||
|
def head(url, headers \\ [], options \\ []), do: request(:head, url, "", headers, options)
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Performs POST request.
|
Performs POST request.
|
||||||
|
|
||||||
|
@ -42,7 +46,7 @@ def post(url, body, headers \\ [], options \\ []),
|
||||||
Builds and performs http request.
|
Builds and performs http request.
|
||||||
|
|
||||||
# Arguments:
|
# Arguments:
|
||||||
`method` - :get, :post, :put, :delete
|
`method` - :get, :post, :put, :delete, :head
|
||||||
`url` - full url
|
`url` - full url
|
||||||
`body` - request body
|
`body` - request body
|
||||||
`headers` - a keyworld list of headers, e.g. `[{"content-type", "text/plain"}]`
|
`headers` - a keyworld list of headers, e.g. `[{"content-type", "text/plain"}]`
|
||||||
|
@ -52,7 +56,7 @@ def post(url, body, headers \\ [], options \\ []),
|
||||||
`{:ok, %Tesla.Env{}}` or `{:error, error}`
|
`{:ok, %Tesla.Env{}}` or `{:error, error}`
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@spec request(atom(), Request.url(), String.t(), Request.headers(), keyword()) ::
|
@spec request(method(), Request.url(), String.t(), Request.headers(), keyword()) ::
|
||||||
{:ok, Env.t()} | {:error, any()}
|
{:ok, Env.t()} | {:error, any()}
|
||||||
def request(method, url, body, headers, options) when is_binary(url) do
|
def request(method, url, body, headers, options) when is_binary(url) do
|
||||||
uri = URI.parse(url)
|
uri = URI.parse(url)
|
||||||
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.HTTP.Tzdata do
|
||||||
|
@moduledoc false
|
||||||
|
|
||||||
|
@behaviour Tzdata.HTTPClient
|
||||||
|
|
||||||
|
alias Pleroma.HTTP
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def get(url, headers, options) do
|
||||||
|
with {:ok, %Tesla.Env{} = env} <- HTTP.get(url, headers, options) do
|
||||||
|
{:ok, {env.status, env.headers, env.body}}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def head(url, headers, options) do
|
||||||
|
with {:ok, %Tesla.Env{} = env} <- HTTP.head(url, headers, options) do
|
||||||
|
{:ok, {env.status, env.headers}}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,37 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Maintenance do
|
||||||
|
alias Pleroma.Repo
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
def vacuum(args) do
|
||||||
|
case args do
|
||||||
|
"analyze" ->
|
||||||
|
Logger.info("Runnning VACUUM ANALYZE.")
|
||||||
|
|
||||||
|
Repo.query!(
|
||||||
|
"vacuum analyze;",
|
||||||
|
[],
|
||||||
|
timeout: :infinity
|
||||||
|
)
|
||||||
|
|
||||||
|
"full" ->
|
||||||
|
Logger.info("Runnning VACUUM FULL.")
|
||||||
|
|
||||||
|
Logger.warn(
|
||||||
|
"Re-packing your entire database may take a while and will consume extra disk space during the process."
|
||||||
|
)
|
||||||
|
|
||||||
|
Repo.query!(
|
||||||
|
"vacuum full;",
|
||||||
|
[],
|
||||||
|
timeout: :infinity
|
||||||
|
)
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
Logger.error("Error: invalid vacuum argument.")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,15 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Maps do
|
||||||
|
def put_if_present(map, key, value, value_function \\ &{:ok, &1}) when is_map(map) do
|
||||||
|
with false <- is_nil(key),
|
||||||
|
false <- is_nil(value),
|
||||||
|
{:ok, new_value} <- value_function.(value) do
|
||||||
|
Map.put(map, key, new_value)
|
||||||
|
else
|
||||||
|
_ -> map
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -1,5 +1,5 @@
|
||||||
# Pleroma: A lightweight social networking server
|
# Pleroma: A lightweight social networking server
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.MFA do
|
defmodule Pleroma.MFA do
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# Pleroma: A lightweight social networking server
|
# Pleroma: A lightweight social networking server
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.MFA.BackupCodes do
|
defmodule Pleroma.MFA.BackupCodes do
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# Pleroma: A lightweight social networking server
|
# Pleroma: A lightweight social networking server
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.MFA.Changeset do
|
defmodule Pleroma.MFA.Changeset do
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# Pleroma: A lightweight social networking server
|
# Pleroma: A lightweight social networking server
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.MFA.Settings do
|
defmodule Pleroma.MFA.Settings do
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# Pleroma: A lightweight social networking server
|
# Pleroma: A lightweight social networking server
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.MFA.Token do
|
defmodule Pleroma.MFA.Token do
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# Pleroma: A lightweight social networking server
|
# Pleroma: A lightweight social networking server
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.MFA.TOTP do
|
defmodule Pleroma.MFA.TOTP do
|
||||||
|
|
|
@ -0,0 +1,85 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.MigrationHelper.NotificationBackfill do
|
||||||
|
alias Pleroma.Notification
|
||||||
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
import Ecto.Query
|
||||||
|
|
||||||
|
def fill_in_notification_types do
|
||||||
|
query =
|
||||||
|
from(n in Pleroma.Notification,
|
||||||
|
where: is_nil(n.type),
|
||||||
|
preload: :activity
|
||||||
|
)
|
||||||
|
|
||||||
|
query
|
||||||
|
|> Repo.chunk_stream(100)
|
||||||
|
|> Enum.each(fn notification ->
|
||||||
|
type =
|
||||||
|
notification.activity
|
||||||
|
|> type_from_activity()
|
||||||
|
|
||||||
|
notification
|
||||||
|
|> Notification.changeset(%{type: type})
|
||||||
|
|> Repo.update()
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
# This is copied over from Notifications to keep this stable.
|
||||||
|
defp type_from_activity(%{data: %{"type" => type}} = activity) do
|
||||||
|
case type do
|
||||||
|
"Follow" ->
|
||||||
|
accepted_function = fn activity ->
|
||||||
|
with %User{} = follower <- User.get_by_ap_id(activity.data["actor"]),
|
||||||
|
%User{} = followed <- User.get_by_ap_id(activity.data["object"]) do
|
||||||
|
Pleroma.FollowingRelationship.following?(follower, followed)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if accepted_function.(activity) do
|
||||||
|
"follow"
|
||||||
|
else
|
||||||
|
"follow_request"
|
||||||
|
end
|
||||||
|
|
||||||
|
"Announce" ->
|
||||||
|
"reblog"
|
||||||
|
|
||||||
|
"Like" ->
|
||||||
|
"favourite"
|
||||||
|
|
||||||
|
"Move" ->
|
||||||
|
"move"
|
||||||
|
|
||||||
|
"EmojiReact" ->
|
||||||
|
"pleroma:emoji_reaction"
|
||||||
|
|
||||||
|
# Compatibility with old reactions
|
||||||
|
"EmojiReaction" ->
|
||||||
|
"pleroma:emoji_reaction"
|
||||||
|
|
||||||
|
"Create" ->
|
||||||
|
activity
|
||||||
|
|> type_from_activity_object()
|
||||||
|
|
||||||
|
t ->
|
||||||
|
raise "No notification type for activity type #{t}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp type_from_activity_object(%{data: %{"type" => "Create", "object" => %{}}}), do: "mention"
|
||||||
|
|
||||||
|
defp type_from_activity_object(%{data: %{"type" => "Create"}} = activity) do
|
||||||
|
object = Object.get_by_ap_id(activity.data["object"])
|
||||||
|
|
||||||
|
case object && object.data["type"] do
|
||||||
|
"ChatMessage" -> "pleroma:chat_mention"
|
||||||
|
_ -> "mention"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -30,12 +30,29 @@ defmodule Pleroma.Notification do
|
||||||
|
|
||||||
schema "notifications" do
|
schema "notifications" do
|
||||||
field(:seen, :boolean, default: false)
|
field(:seen, :boolean, default: false)
|
||||||
|
# This is an enum type in the database. If you add a new notification type,
|
||||||
|
# remember to add a migration to add it to the `notifications_type` enum
|
||||||
|
# as well.
|
||||||
|
field(:type, :string)
|
||||||
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
|
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
|
||||||
belongs_to(:activity, Activity, type: FlakeId.Ecto.CompatType)
|
belongs_to(:activity, Activity, type: FlakeId.Ecto.CompatType)
|
||||||
|
|
||||||
timestamps()
|
timestamps()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def update_notification_type(user, activity) do
|
||||||
|
with %__MODULE__{} = notification <-
|
||||||
|
Repo.get_by(__MODULE__, user_id: user.id, activity_id: activity.id) do
|
||||||
|
type =
|
||||||
|
activity
|
||||||
|
|> type_from_activity()
|
||||||
|
|
||||||
|
notification
|
||||||
|
|> changeset(%{type: type})
|
||||||
|
|> Repo.update()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
@spec unread_notifications_count(User.t()) :: integer()
|
@spec unread_notifications_count(User.t()) :: integer()
|
||||||
def unread_notifications_count(%User{id: user_id}) do
|
def unread_notifications_count(%User{id: user_id}) do
|
||||||
from(q in __MODULE__,
|
from(q in __MODULE__,
|
||||||
|
@ -44,9 +61,21 @@ def unread_notifications_count(%User{id: user_id}) do
|
||||||
|> Repo.aggregate(:count, :id)
|
|> Repo.aggregate(:count, :id)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@notification_types ~w{
|
||||||
|
favourite
|
||||||
|
follow
|
||||||
|
follow_request
|
||||||
|
mention
|
||||||
|
move
|
||||||
|
pleroma:chat_mention
|
||||||
|
pleroma:emoji_reaction
|
||||||
|
reblog
|
||||||
|
}
|
||||||
|
|
||||||
def changeset(%Notification{} = notification, attrs) do
|
def changeset(%Notification{} = notification, attrs) do
|
||||||
notification
|
notification
|
||||||
|> cast(attrs, [:seen])
|
|> cast(attrs, [:seen, :type])
|
||||||
|
|> validate_inclusion(:type, @notification_types)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec last_read_query(User.t()) :: Ecto.Queryable.t()
|
@spec last_read_query(User.t()) :: Ecto.Queryable.t()
|
||||||
|
@ -92,8 +121,9 @@ def for_user_query(user, opts \\ %{}) do
|
||||||
|> join(:left, [n, a], object in Object,
|
|> join(:left, [n, a], object in Object,
|
||||||
on:
|
on:
|
||||||
fragment(
|
fragment(
|
||||||
"(?->>'id') = COALESCE((? -> 'object'::text) ->> 'id'::text)",
|
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
|
||||||
object.data,
|
object.data,
|
||||||
|
a.data,
|
||||||
a.data
|
a.data
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -136,8 +166,16 @@ defp exclude_visibility(query, %{exclude_visibilities: visibility})
|
||||||
query
|
query
|
||||||
|> join(:left, [n, a], mutated_activity in Pleroma.Activity,
|
|> join(:left, [n, a], mutated_activity in Pleroma.Activity,
|
||||||
on:
|
on:
|
||||||
fragment("?->>'context'", a.data) ==
|
fragment(
|
||||||
fragment("?->>'context'", mutated_activity.data) and
|
"COALESCE((?->'object')->>'id', ?->>'object')",
|
||||||
|
a.data,
|
||||||
|
a.data
|
||||||
|
) ==
|
||||||
|
fragment(
|
||||||
|
"COALESCE((?->'object')->>'id', ?->>'object')",
|
||||||
|
mutated_activity.data,
|
||||||
|
mutated_activity.data
|
||||||
|
) and
|
||||||
fragment("(?->>'type' = 'Like' or ?->>'type' = 'Announce')", a.data, a.data) and
|
fragment("(?->>'type' = 'Like' or ?->>'type' = 'Announce')", a.data, a.data) and
|
||||||
fragment("?->>'type'", mutated_activity.data) == "Create",
|
fragment("?->>'type'", mutated_activity.data) == "Create",
|
||||||
as: :mutated_activity
|
as: :mutated_activity
|
||||||
|
@ -224,18 +262,8 @@ def set_read_up_to(%{id: user_id} = user, id) do
|
||||||
|> Marker.multi_set_last_read_id(user, "notifications")
|
|> Marker.multi_set_last_read_id(user, "notifications")
|
||||||
|> Repo.transaction()
|
|> Repo.transaction()
|
||||||
|
|
||||||
Notification
|
for_user_query(user)
|
||||||
|> where([n], n.id in ^notification_ids)
|
|> where([n], n.id in ^notification_ids)
|
||||||
|> join(:inner, [n], activity in assoc(n, :activity))
|
|
||||||
|> join(:left, [n, a], object in Object,
|
|
||||||
on:
|
|
||||||
fragment(
|
|
||||||
"(?->>'id') = COALESCE((? -> 'object'::text) ->> 'id'::text)",
|
|
||||||
object.data,
|
|
||||||
a.data
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|> preload([n, a, o], activity: {a, object: o})
|
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -309,42 +337,95 @@ def dismiss(%{id: user_id} = _user, id) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def create_notifications(%Activity{data: %{"to" => _, "type" => "Create"}} = activity) do
|
def create_notifications(activity, options \\ [])
|
||||||
object = Object.normalize(activity)
|
|
||||||
|
def create_notifications(%Activity{data: %{"to" => _, "type" => "Create"}} = activity, options) do
|
||||||
|
object = Object.normalize(activity, false)
|
||||||
|
|
||||||
if object && object.data["type"] == "Answer" do
|
if object && object.data["type"] == "Answer" do
|
||||||
{:ok, []}
|
{:ok, []}
|
||||||
else
|
else
|
||||||
do_create_notifications(activity)
|
do_create_notifications(activity, options)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def create_notifications(%Activity{data: %{"type" => type}} = activity)
|
def create_notifications(%Activity{data: %{"type" => type}} = activity, options)
|
||||||
when type in ["Follow", "Like", "Announce", "Move", "EmojiReact"] do
|
when type in ["Follow", "Like", "Announce", "Move", "EmojiReact"] do
|
||||||
do_create_notifications(activity)
|
do_create_notifications(activity, options)
|
||||||
end
|
end
|
||||||
|
|
||||||
def create_notifications(_), do: {:ok, []}
|
def create_notifications(_, _), do: {:ok, []}
|
||||||
|
|
||||||
|
defp do_create_notifications(%Activity{} = activity, options) do
|
||||||
|
do_send = Keyword.get(options, :do_send, true)
|
||||||
|
|
||||||
defp do_create_notifications(%Activity{} = activity) do
|
|
||||||
{enabled_receivers, disabled_receivers} = get_notified_from_activity(activity)
|
{enabled_receivers, disabled_receivers} = get_notified_from_activity(activity)
|
||||||
potential_receivers = enabled_receivers ++ disabled_receivers
|
potential_receivers = enabled_receivers ++ disabled_receivers
|
||||||
|
|
||||||
notifications =
|
notifications =
|
||||||
Enum.map(potential_receivers, fn user ->
|
Enum.map(potential_receivers, fn user ->
|
||||||
do_send = user in enabled_receivers
|
do_send = do_send && user in enabled_receivers
|
||||||
create_notification(activity, user, do_send)
|
create_notification(activity, user, do_send)
|
||||||
end)
|
end)
|
||||||
|
|
||||||
{:ok, notifications}
|
{:ok, notifications}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp type_from_activity(%{data: %{"type" => type}} = activity) do
|
||||||
|
case type do
|
||||||
|
"Follow" ->
|
||||||
|
if Activity.follow_accepted?(activity) do
|
||||||
|
"follow"
|
||||||
|
else
|
||||||
|
"follow_request"
|
||||||
|
end
|
||||||
|
|
||||||
|
"Announce" ->
|
||||||
|
"reblog"
|
||||||
|
|
||||||
|
"Like" ->
|
||||||
|
"favourite"
|
||||||
|
|
||||||
|
"Move" ->
|
||||||
|
"move"
|
||||||
|
|
||||||
|
"EmojiReact" ->
|
||||||
|
"pleroma:emoji_reaction"
|
||||||
|
|
||||||
|
# Compatibility with old reactions
|
||||||
|
"EmojiReaction" ->
|
||||||
|
"pleroma:emoji_reaction"
|
||||||
|
|
||||||
|
"Create" ->
|
||||||
|
activity
|
||||||
|
|> type_from_activity_object()
|
||||||
|
|
||||||
|
t ->
|
||||||
|
raise "No notification type for activity type #{t}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp type_from_activity_object(%{data: %{"type" => "Create", "object" => %{}}}), do: "mention"
|
||||||
|
|
||||||
|
defp type_from_activity_object(%{data: %{"type" => "Create"}} = activity) do
|
||||||
|
object = Object.get_by_ap_id(activity.data["object"])
|
||||||
|
|
||||||
|
case object && object.data["type"] do
|
||||||
|
"ChatMessage" -> "pleroma:chat_mention"
|
||||||
|
_ -> "mention"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
# TODO move to sql, too.
|
# TODO move to sql, too.
|
||||||
def create_notification(%Activity{} = activity, %User{} = user, do_send \\ true) do
|
def create_notification(%Activity{} = activity, %User{} = user, do_send \\ true) do
|
||||||
unless skip?(activity, user) do
|
unless skip?(activity, user) do
|
||||||
{:ok, %{notification: notification}} =
|
{:ok, %{notification: notification}} =
|
||||||
Multi.new()
|
Multi.new()
|
||||||
|> Multi.insert(:notification, %Notification{user_id: user.id, activity: activity})
|
|> Multi.insert(:notification, %Notification{
|
||||||
|
user_id: user.id,
|
||||||
|
activity: activity,
|
||||||
|
type: type_from_activity(activity)
|
||||||
|
})
|
||||||
|> Marker.multi_set_last_read_id(user, "notifications")
|
|> Marker.multi_set_last_read_id(user, "notifications")
|
||||||
|> Repo.transaction()
|
|> Repo.transaction()
|
||||||
|
|
||||||
|
@ -370,7 +451,8 @@ def get_notified_from_activity(%Activity{data: %{"type" => type}} = activity, lo
|
||||||
when type in ["Create", "Like", "Announce", "Follow", "Move", "EmojiReact"] do
|
when type in ["Create", "Like", "Announce", "Follow", "Move", "EmojiReact"] do
|
||||||
potential_receiver_ap_ids = get_potential_receiver_ap_ids(activity)
|
potential_receiver_ap_ids = get_potential_receiver_ap_ids(activity)
|
||||||
|
|
||||||
potential_receivers = User.get_users_from_set(potential_receiver_ap_ids, local_only)
|
potential_receivers =
|
||||||
|
User.get_users_from_set(potential_receiver_ap_ids, local_only: local_only)
|
||||||
|
|
||||||
notification_enabled_ap_ids =
|
notification_enabled_ap_ids =
|
||||||
potential_receiver_ap_ids
|
potential_receiver_ap_ids
|
||||||
|
@ -467,6 +549,7 @@ def exclude_thread_muter_ap_ids(ap_ids, %Activity{} = activity) do
|
||||||
def skip?(%Activity{} = activity, %User{} = user) do
|
def skip?(%Activity{} = activity, %User{} = user) do
|
||||||
[
|
[
|
||||||
:self,
|
:self,
|
||||||
|
:invisible,
|
||||||
:followers,
|
:followers,
|
||||||
:follows,
|
:follows,
|
||||||
:non_followers,
|
:non_followers,
|
||||||
|
@ -483,6 +566,12 @@ def skip?(:self, %Activity{} = activity, %User{} = user) do
|
||||||
activity.data["actor"] == user.ap_id
|
activity.data["actor"] == user.ap_id
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def skip?(:invisible, %Activity{} = activity, _) do
|
||||||
|
actor = activity.data["actor"]
|
||||||
|
user = User.get_cached_by_ap_id(actor)
|
||||||
|
User.invisible?(user)
|
||||||
|
end
|
||||||
|
|
||||||
def skip?(
|
def skip?(
|
||||||
:followers,
|
:followers,
|
||||||
%Activity{} = activity,
|
%Activity{} = activity,
|
||||||
|
@ -535,4 +624,12 @@ def skip?(:recently_followed, %Activity{data: %{"type" => "Follow"}} = activity,
|
||||||
end
|
end
|
||||||
|
|
||||||
def skip?(_, _, _), do: false
|
def skip?(_, _, _), do: false
|
||||||
|
|
||||||
|
def for_user_and_activity(user, activity) do
|
||||||
|
from(n in __MODULE__,
|
||||||
|
where: n.user_id == ^user.id,
|
||||||
|
where: n.activity_id == ^activity.id
|
||||||
|
)
|
||||||
|
|> Repo.one()
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -9,11 +9,13 @@ defmodule Pleroma.Object do
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Config
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Object.Fetcher
|
alias Pleroma.Object.Fetcher
|
||||||
alias Pleroma.ObjectTombstone
|
alias Pleroma.ObjectTombstone
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Workers.AttachmentsCleanupWorker
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
|
@ -138,12 +140,17 @@ def normalize(ap_id, true, options) when is_binary(ap_id) do
|
||||||
|
|
||||||
def normalize(_, _, _), do: nil
|
def normalize(_, _, _), do: nil
|
||||||
|
|
||||||
# Owned objects can only be mutated by their owner
|
# Owned objects can only be accessed by their owner
|
||||||
def authorize_mutation(%Object{data: %{"actor" => actor}}, %User{ap_id: ap_id}),
|
def authorize_access(%Object{data: %{"actor" => actor}}, %User{ap_id: ap_id}) do
|
||||||
do: actor == ap_id
|
if actor == ap_id do
|
||||||
|
:ok
|
||||||
|
else
|
||||||
|
{:error, :forbidden}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
# Legacy objects can be mutated by anybody
|
# Legacy objects can be accessed by anybody
|
||||||
def authorize_mutation(%Object{}, %User{}), do: true
|
def authorize_access(%Object{}, %User{}), do: :ok
|
||||||
|
|
||||||
@spec get_cached_by_ap_id(String.t()) :: Object.t() | nil
|
@spec get_cached_by_ap_id(String.t()) :: Object.t() | nil
|
||||||
def get_cached_by_ap_id(ap_id) do
|
def get_cached_by_ap_id(ap_id) do
|
||||||
|
@ -183,27 +190,37 @@ def swap_object_with_tombstone(object) do
|
||||||
def delete(%Object{data: %{"id" => id}} = object) do
|
def delete(%Object{data: %{"id" => id}} = object) do
|
||||||
with {:ok, _obj} = swap_object_with_tombstone(object),
|
with {:ok, _obj} = swap_object_with_tombstone(object),
|
||||||
deleted_activity = Activity.delete_all_by_object_ap_id(id),
|
deleted_activity = Activity.delete_all_by_object_ap_id(id),
|
||||||
{:ok, true} <- Cachex.del(:object_cache, "object:#{id}"),
|
{:ok, _} <- invalid_object_cache(object) do
|
||||||
{:ok, _} <- Cachex.del(:web_resp_cache, URI.parse(id).path) do
|
cleanup_attachments(
|
||||||
with true <- Pleroma.Config.get([:instance, :cleanup_attachments]) do
|
Config.get([:instance, :cleanup_attachments]),
|
||||||
{:ok, _} =
|
%{"object" => object}
|
||||||
Pleroma.Workers.AttachmentsCleanupWorker.enqueue("cleanup_attachments", %{
|
)
|
||||||
"object" => object
|
|
||||||
})
|
|
||||||
end
|
|
||||||
|
|
||||||
{:ok, object, deleted_activity}
|
{:ok, object, deleted_activity}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def prune(%Object{data: %{"id" => id}} = object) do
|
@spec cleanup_attachments(boolean(), %{required(:object) => map()}) ::
|
||||||
|
{:ok, Oban.Job.t() | nil}
|
||||||
|
def cleanup_attachments(true, %{"object" => _} = params) do
|
||||||
|
AttachmentsCleanupWorker.enqueue("cleanup_attachments", params)
|
||||||
|
end
|
||||||
|
|
||||||
|
def cleanup_attachments(_, _), do: {:ok, nil}
|
||||||
|
|
||||||
|
def prune(%Object{data: %{"id" => _id}} = object) do
|
||||||
with {:ok, object} <- Repo.delete(object),
|
with {:ok, object} <- Repo.delete(object),
|
||||||
{:ok, true} <- Cachex.del(:object_cache, "object:#{id}"),
|
{:ok, _} <- invalid_object_cache(object) do
|
||||||
{:ok, _} <- Cachex.del(:web_resp_cache, URI.parse(id).path) do
|
|
||||||
{:ok, object}
|
{:ok, object}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def invalid_object_cache(%Object{data: %{"id" => id}}) do
|
||||||
|
with {:ok, true} <- Cachex.del(:object_cache, "object:#{id}") do
|
||||||
|
Cachex.del(:web_resp_cache, URI.parse(id).path)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def set_cache(%Object{data: %{"id" => ap_id}} = object) do
|
def set_cache(%Object{data: %{"id" => ap_id}} = object) do
|
||||||
Cachex.put(:object_cache, "object:#{ap_id}", object)
|
Cachex.put(:object_cache, "object:#{ap_id}", object)
|
||||||
{:ok, object}
|
{:ok, object}
|
||||||
|
|
|
@ -83,8 +83,8 @@ def fetch_object_from_id(id, options \\ []) do
|
||||||
{:transmogrifier, {:error, {:reject, nil}}} ->
|
{:transmogrifier, {:error, {:reject, nil}}} ->
|
||||||
{:reject, nil}
|
{:reject, nil}
|
||||||
|
|
||||||
{:transmogrifier, _} ->
|
{:transmogrifier, _} = e ->
|
||||||
{:error, "Transmogrifier failure."}
|
{:error, e}
|
||||||
|
|
||||||
{:object, data, nil} ->
|
{:object, data, nil} ->
|
||||||
reinject_object(%Object{}, data)
|
reinject_object(%Object{}, data)
|
||||||
|
|
|
@ -23,12 +23,12 @@ def page_keys, do: @page_keys
|
||||||
@spec fetch_paginated(Ecto.Query.t(), map(), type(), atom() | nil) :: [Ecto.Schema.t()]
|
@spec fetch_paginated(Ecto.Query.t(), map(), type(), atom() | nil) :: [Ecto.Schema.t()]
|
||||||
def fetch_paginated(query, params, type \\ :keyset, table_binding \\ nil)
|
def fetch_paginated(query, params, type \\ :keyset, table_binding \\ nil)
|
||||||
|
|
||||||
def fetch_paginated(query, %{"total" => true} = params, :keyset, table_binding) do
|
def fetch_paginated(query, %{total: true} = params, :keyset, table_binding) do
|
||||||
total = Repo.aggregate(query, :count, :id)
|
total = Repo.aggregate(query, :count, :id)
|
||||||
|
|
||||||
%{
|
%{
|
||||||
total: total,
|
total: total,
|
||||||
items: fetch_paginated(query, Map.drop(params, ["total"]), :keyset, table_binding)
|
items: fetch_paginated(query, Map.drop(params, [:total]), :keyset, table_binding)
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -41,7 +41,7 @@ def fetch_paginated(query, params, :keyset, table_binding) do
|
||||||
|> enforce_order(options)
|
|> enforce_order(options)
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_paginated(query, %{"total" => true} = params, :offset, table_binding) do
|
def fetch_paginated(query, %{total: true} = params, :offset, table_binding) do
|
||||||
total =
|
total =
|
||||||
query
|
query
|
||||||
|> Ecto.Query.exclude(:left_join)
|
|> Ecto.Query.exclude(:left_join)
|
||||||
|
@ -49,7 +49,7 @@ def fetch_paginated(query, %{"total" => true} = params, :offset, table_binding)
|
||||||
|
|
||||||
%{
|
%{
|
||||||
total: total,
|
total: total,
|
||||||
items: fetch_paginated(query, Map.drop(params, ["total"]), :offset, table_binding)
|
items: fetch_paginated(query, Map.drop(params, [:total]), :offset, table_binding)
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -64,6 +64,12 @@ def fetch_paginated(query, params, :offset, table_binding) do
|
||||||
@spec paginate(Ecto.Query.t(), map(), type(), atom() | nil) :: [Ecto.Schema.t()]
|
@spec paginate(Ecto.Query.t(), map(), type(), atom() | nil) :: [Ecto.Schema.t()]
|
||||||
def paginate(query, options, method \\ :keyset, table_binding \\ nil)
|
def paginate(query, options, method \\ :keyset, table_binding \\ nil)
|
||||||
|
|
||||||
|
def paginate(list, options, _method, _table_binding) when is_list(list) do
|
||||||
|
offset = options[:offset] || 0
|
||||||
|
limit = options[:limit] || 0
|
||||||
|
Enum.slice(list, offset, limit)
|
||||||
|
end
|
||||||
|
|
||||||
def paginate(query, options, :keyset, table_binding) do
|
def paginate(query, options, :keyset, table_binding) do
|
||||||
query
|
query
|
||||||
|> restrict(:min_id, options, table_binding)
|
|> restrict(:min_id, options, table_binding)
|
||||||
|
@ -90,12 +96,6 @@ defp cast_params(params) do
|
||||||
skip_order: :boolean
|
skip_order: :boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
params =
|
|
||||||
Enum.reduce(params, %{}, fn
|
|
||||||
{key, _value}, acc when is_atom(key) -> Map.drop(acc, [key])
|
|
||||||
{key, value}, acc -> Map.put(acc, key, value)
|
|
||||||
end)
|
|
||||||
|
|
||||||
changeset = cast({%{}, param_types}, params, Map.keys(param_types))
|
changeset = cast({%{}, param_types}, params, Map.keys(param_types))
|
||||||
changeset.changes
|
changeset.changes
|
||||||
end
|
end
|
||||||
|
|
|
@ -30,6 +30,25 @@ def checkpw(_password, _password_hash) do
|
||||||
false
|
false
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def maybe_update_password(%User{password_hash: "$2" <> _} = user, password) do
|
||||||
|
do_update_password(user, password)
|
||||||
|
end
|
||||||
|
|
||||||
|
def maybe_update_password(%User{password_hash: "$6" <> _} = user, password) do
|
||||||
|
do_update_password(user, password)
|
||||||
|
end
|
||||||
|
|
||||||
|
def maybe_update_password(user, _), do: {:ok, user}
|
||||||
|
|
||||||
|
defp do_update_password(user, password) do
|
||||||
|
user
|
||||||
|
|> User.password_update_changeset(%{
|
||||||
|
"password" => password,
|
||||||
|
"password_confirmation" => password
|
||||||
|
})
|
||||||
|
|> Pleroma.Repo.update()
|
||||||
|
end
|
||||||
|
|
||||||
def call(%{assigns: %{user: %User{}}} = conn, _), do: conn
|
def call(%{assigns: %{user: %User{}}} = conn, _), do: conn
|
||||||
|
|
||||||
def call(
|
def call(
|
||||||
|
@ -42,6 +61,8 @@ def call(
|
||||||
_
|
_
|
||||||
) do
|
) do
|
||||||
if checkpw(password, password_hash) do
|
if checkpw(password, password_hash) do
|
||||||
|
{:ok, auth_user} = maybe_update_password(auth_user, password)
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> assign(:user, auth_user)
|
|> assign(:user, auth_user)
|
||||||
|> OAuthScopesPlug.skip_plug()
|
|> OAuthScopesPlug.skip_plug()
|
||||||
|
|
|
@ -31,7 +31,7 @@ defp headers do
|
||||||
{"x-content-type-options", "nosniff"},
|
{"x-content-type-options", "nosniff"},
|
||||||
{"referrer-policy", referrer_policy},
|
{"referrer-policy", referrer_policy},
|
||||||
{"x-download-options", "noopen"},
|
{"x-download-options", "noopen"},
|
||||||
{"content-security-policy", csp_string() <> ";"}
|
{"content-security-policy", csp_string()}
|
||||||
]
|
]
|
||||||
|
|
||||||
if report_uri do
|
if report_uri do
|
||||||
|
@ -43,23 +43,46 @@ defp headers do
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
headers ++ [{"reply-to", Jason.encode!(report_group)}]
|
[{"reply-to", Jason.encode!(report_group)} | headers]
|
||||||
else
|
else
|
||||||
headers
|
headers
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
static_csp_rules = [
|
||||||
|
"default-src 'none'",
|
||||||
|
"base-uri 'self'",
|
||||||
|
"frame-ancestors 'none'",
|
||||||
|
"style-src 'self' 'unsafe-inline'",
|
||||||
|
"font-src 'self'",
|
||||||
|
"manifest-src 'self'"
|
||||||
|
]
|
||||||
|
|
||||||
|
@csp_start [Enum.join(static_csp_rules, ";") <> ";"]
|
||||||
|
|
||||||
defp csp_string do
|
defp csp_string do
|
||||||
scheme = Config.get([Pleroma.Web.Endpoint, :url])[:scheme]
|
scheme = Config.get([Pleroma.Web.Endpoint, :url])[:scheme]
|
||||||
static_url = Pleroma.Web.Endpoint.static_url()
|
static_url = Pleroma.Web.Endpoint.static_url()
|
||||||
websocket_url = Pleroma.Web.Endpoint.websocket_url()
|
websocket_url = Pleroma.Web.Endpoint.websocket_url()
|
||||||
report_uri = Config.get([:http_security, :report_uri])
|
report_uri = Config.get([:http_security, :report_uri])
|
||||||
|
|
||||||
connect_src = "connect-src 'self' #{static_url} #{websocket_url}"
|
img_src = "img-src 'self' data: blob:"
|
||||||
|
media_src = "media-src 'self'"
|
||||||
|
|
||||||
|
{img_src, media_src} =
|
||||||
|
if Config.get([:media_proxy, :enabled]) &&
|
||||||
|
!Config.get([:media_proxy, :proxy_opts, :redirect_on_failure]) do
|
||||||
|
sources = get_proxy_and_attachment_sources()
|
||||||
|
{[img_src, sources], [media_src, sources]}
|
||||||
|
else
|
||||||
|
{[img_src, " https:"], [media_src, " https:"]}
|
||||||
|
end
|
||||||
|
|
||||||
|
connect_src = ["connect-src 'self' blob: ", static_url, ?\s, websocket_url]
|
||||||
|
|
||||||
connect_src =
|
connect_src =
|
||||||
if Pleroma.Config.get(:env) == :dev do
|
if Pleroma.Config.get(:env) == :dev do
|
||||||
connect_src <> " http://localhost:3035/"
|
[connect_src, " http://localhost:3035/"]
|
||||||
else
|
else
|
||||||
connect_src
|
connect_src
|
||||||
end
|
end
|
||||||
|
@ -71,27 +94,51 @@ defp csp_string do
|
||||||
"script-src 'self'"
|
"script-src 'self'"
|
||||||
end
|
end
|
||||||
|
|
||||||
main_part = [
|
report = if report_uri, do: ["report-uri ", report_uri, ";report-to csp-endpoint"]
|
||||||
"default-src 'none'",
|
insecure = if scheme == "https", do: "upgrade-insecure-requests"
|
||||||
"base-uri 'self'",
|
|
||||||
"frame-ancestors 'none'",
|
|
||||||
"img-src 'self' data: blob: https:",
|
|
||||||
"media-src 'self' https:",
|
|
||||||
"style-src 'self' 'unsafe-inline'",
|
|
||||||
"font-src 'self'",
|
|
||||||
"manifest-src 'self'",
|
|
||||||
connect_src,
|
|
||||||
script_src
|
|
||||||
]
|
|
||||||
|
|
||||||
report = if report_uri, do: ["report-uri #{report_uri}; report-to csp-endpoint"], else: []
|
@csp_start
|
||||||
|
|> add_csp_param(img_src)
|
||||||
insecure = if scheme == "https", do: ["upgrade-insecure-requests"], else: []
|
|> add_csp_param(media_src)
|
||||||
|
|> add_csp_param(connect_src)
|
||||||
(main_part ++ report ++ insecure)
|
|> add_csp_param(script_src)
|
||||||
|> Enum.join("; ")
|
|> add_csp_param(insecure)
|
||||||
|
|> add_csp_param(report)
|
||||||
|
|> :erlang.iolist_to_binary()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp get_proxy_and_attachment_sources do
|
||||||
|
media_proxy_whitelist =
|
||||||
|
Enum.reduce(Config.get([:media_proxy, :whitelist]), [], fn host, acc ->
|
||||||
|
add_source(acc, host)
|
||||||
|
end)
|
||||||
|
|
||||||
|
media_proxy_base_url =
|
||||||
|
if Config.get([:media_proxy, :base_url]),
|
||||||
|
do: URI.parse(Config.get([:media_proxy, :base_url])).host
|
||||||
|
|
||||||
|
upload_base_url =
|
||||||
|
if Config.get([Pleroma.Upload, :base_url]),
|
||||||
|
do: URI.parse(Config.get([Pleroma.Upload, :base_url])).host
|
||||||
|
|
||||||
|
s3_endpoint =
|
||||||
|
if Config.get([Pleroma.Upload, :uploader]) == Pleroma.Uploaders.S3,
|
||||||
|
do: URI.parse(Config.get([Pleroma.Uploaders.S3, :public_endpoint])).host
|
||||||
|
|
||||||
|
[]
|
||||||
|
|> add_source(media_proxy_base_url)
|
||||||
|
|> add_source(upload_base_url)
|
||||||
|
|> add_source(s3_endpoint)
|
||||||
|
|> add_source(media_proxy_whitelist)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp add_source(iodata, nil), do: iodata
|
||||||
|
defp add_source(iodata, source), do: [[?\s, source] | iodata]
|
||||||
|
|
||||||
|
defp add_csp_param(csp_iodata, nil), do: csp_iodata
|
||||||
|
|
||||||
|
defp add_csp_param(csp_iodata, param), do: [[param, ?;] | csp_iodata]
|
||||||
|
|
||||||
def warn_if_disabled do
|
def warn_if_disabled do
|
||||||
unless Config.get([:http_security, :enabled]) do
|
unless Config.get([:http_security, :enabled]) do
|
||||||
Logger.warn("
|
Logger.warn("
|
||||||
|
|
|
@ -10,6 +10,8 @@ defmodule Pleroma.Plugs.UploadedMedia do
|
||||||
import Pleroma.Web.Gettext
|
import Pleroma.Web.Gettext
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
|
alias Pleroma.Web.MediaProxy
|
||||||
|
|
||||||
@behaviour Plug
|
@behaviour Plug
|
||||||
# no slashes
|
# no slashes
|
||||||
@path "media"
|
@path "media"
|
||||||
|
@ -35,8 +37,7 @@ def call(%{request_path: <<"/", @path, "/", file::binary>>} = conn, opts) do
|
||||||
%{query_params: %{"name" => name}} = conn ->
|
%{query_params: %{"name" => name}} = conn ->
|
||||||
name = String.replace(name, "\"", "\\\"")
|
name = String.replace(name, "\"", "\\\"")
|
||||||
|
|
||||||
conn
|
put_resp_header(conn, "content-disposition", "filename=\"#{name}\"")
|
||||||
|> put_resp_header("content-disposition", "filename=\"#{name}\"")
|
|
||||||
|
|
||||||
conn ->
|
conn ->
|
||||||
conn
|
conn
|
||||||
|
@ -47,7 +48,8 @@ def call(%{request_path: <<"/", @path, "/", file::binary>>} = conn, opts) do
|
||||||
|
|
||||||
with uploader <- Keyword.fetch!(config, :uploader),
|
with uploader <- Keyword.fetch!(config, :uploader),
|
||||||
proxy_remote = Keyword.get(config, :proxy_remote, false),
|
proxy_remote = Keyword.get(config, :proxy_remote, false),
|
||||||
{:ok, get_method} <- uploader.get_file(file) do
|
{:ok, get_method} <- uploader.get_file(file),
|
||||||
|
false <- media_is_banned(conn, get_method) do
|
||||||
get_media(conn, get_method, proxy_remote, opts)
|
get_media(conn, get_method, proxy_remote, opts)
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
|
@ -59,6 +61,14 @@ def call(%{request_path: <<"/", @path, "/", file::binary>>} = conn, opts) do
|
||||||
|
|
||||||
def call(conn, _opts), do: conn
|
def call(conn, _opts), do: conn
|
||||||
|
|
||||||
|
defp media_is_banned(%{request_path: path} = _conn, {:static_dir, _}) do
|
||||||
|
MediaProxy.in_banned_urls(Pleroma.Web.base_url() <> path)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp media_is_banned(_, {:url, url}), do: MediaProxy.in_banned_urls(url)
|
||||||
|
|
||||||
|
defp media_is_banned(_, _), do: false
|
||||||
|
|
||||||
defp get_media(conn, {:static_dir, directory}, _, opts) do
|
defp get_media(conn, {:static_dir, directory}, _, opts) do
|
||||||
static_opts =
|
static_opts =
|
||||||
Map.get(opts, :static_plug_opts)
|
Map.get(opts, :static_plug_opts)
|
||||||
|
|
|
@ -8,11 +8,10 @@ defmodule Pleroma.Repo do
|
||||||
adapter: Ecto.Adapters.Postgres,
|
adapter: Ecto.Adapters.Postgres,
|
||||||
migration_timestamps: [type: :naive_datetime_usec]
|
migration_timestamps: [type: :naive_datetime_usec]
|
||||||
|
|
||||||
|
import Ecto.Query
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
defmodule Instrumenter do
|
defmodule Instrumenter, do: use(Prometheus.EctoInstrumenter)
|
||||||
use Prometheus.EctoInstrumenter
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Dynamically loads the repository url from the
|
Dynamically loads the repository url from the
|
||||||
|
@ -50,36 +49,30 @@ def get_assoc(resource, association) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def check_migrations_applied!() do
|
def chunk_stream(query, chunk_size) do
|
||||||
unless Pleroma.Config.get(
|
# We don't actually need start and end funcitons of resource streaming,
|
||||||
[:i_am_aware_this_may_cause_data_loss, :disable_migration_check],
|
# but it seems to be the only way to not fetch records one-by-one and
|
||||||
false
|
# have individual records be the elements of the stream, instead of
|
||||||
) do
|
# lists of records
|
||||||
Ecto.Migrator.with_repo(__MODULE__, fn repo ->
|
Stream.resource(
|
||||||
down_migrations =
|
fn -> 0 end,
|
||||||
Ecto.Migrator.migrations(repo)
|
fn
|
||||||
|> Enum.reject(fn
|
last_id ->
|
||||||
{:up, _, _} -> true
|
query
|
||||||
{:down, _, _} -> false
|
|> order_by(asc: :id)
|
||||||
end)
|
|> where([r], r.id > ^last_id)
|
||||||
|
|> limit(^chunk_size)
|
||||||
|
|> all()
|
||||||
|
|> case do
|
||||||
|
[] ->
|
||||||
|
{:halt, last_id}
|
||||||
|
|
||||||
if length(down_migrations) > 0 do
|
records ->
|
||||||
down_migrations_text =
|
last_id = List.last(records).id
|
||||||
Enum.map(down_migrations, fn {:down, id, name} -> "- #{name} (#{id})\n" end)
|
{records, last_id}
|
||||||
|
end
|
||||||
Logger.error(
|
end,
|
||||||
"The following migrations were not applied:\n#{down_migrations_text}If you want to start Pleroma anyway, set\nconfig :pleroma, :i_am_aware_this_may_cause_data_loss, disable_migration_check: true"
|
fn _ -> :ok end
|
||||||
)
|
)
|
||||||
|
|
||||||
raise Pleroma.Repo.UnappliedMigrationsError
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
else
|
|
||||||
:ok
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defmodule Pleroma.Repo.UnappliedMigrationsError do
|
|
||||||
defexception message: "Unapplied Migrations detected"
|
|
||||||
end
|
|
||||||
|
|
|
@ -5,10 +5,10 @@
|
||||||
defmodule Pleroma.Signature do
|
defmodule Pleroma.Signature do
|
||||||
@behaviour HTTPSignatures.Adapter
|
@behaviour HTTPSignatures.Adapter
|
||||||
|
|
||||||
|
alias Pleroma.EctoType.ActivityPub.ObjectValidators
|
||||||
alias Pleroma.Keys
|
alias Pleroma.Keys
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
|
|
||||||
|
|
||||||
def key_id_to_actor_id(key_id) do
|
def key_id_to_actor_id(key_id) do
|
||||||
uri =
|
uri =
|
||||||
|
@ -24,7 +24,7 @@ def key_id_to_actor_id(key_id) do
|
||||||
|
|
||||||
maybe_ap_id = URI.to_string(uri)
|
maybe_ap_id = URI.to_string(uri)
|
||||||
|
|
||||||
case Types.ObjectID.cast(maybe_ap_id) do
|
case ObjectValidators.ObjectID.cast(maybe_ap_id) do
|
||||||
{:ok, ap_id} ->
|
{:ok, ap_id} ->
|
||||||
{:ok, ap_id}
|
{:ok, ap_id}
|
||||||
|
|
||||||
|
|
|
@ -97,20 +97,11 @@ def calculate_stat_data do
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_status_visibility_count do
|
def get_status_visibility_count(instance \\ nil) do
|
||||||
counter_cache =
|
if is_nil(instance) do
|
||||||
CounterCache.get_as_map([
|
CounterCache.get_sum()
|
||||||
"status_visibility_public",
|
else
|
||||||
"status_visibility_private",
|
CounterCache.get_by_instance(instance)
|
||||||
"status_visibility_unlisted",
|
end
|
||||||
"status_visibility_direct"
|
|
||||||
])
|
|
||||||
|
|
||||||
%{
|
|
||||||
public: counter_cache["status_visibility_public"] || 0,
|
|
||||||
unlisted: counter_cache["status_visibility_unlisted"] || 0,
|
|
||||||
private: counter_cache["status_visibility_private"] || 0,
|
|
||||||
direct: counter_cache["status_visibility_direct"] || 0
|
|
||||||
}
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -67,6 +67,7 @@ def store(upload, opts \\ []) do
|
||||||
{:ok,
|
{:ok,
|
||||||
%{
|
%{
|
||||||
"type" => opts.activity_type,
|
"type" => opts.activity_type,
|
||||||
|
"mediaType" => upload.content_type,
|
||||||
"url" => [
|
"url" => [
|
||||||
%{
|
%{
|
||||||
"type" => "Link",
|
"type" => "Link",
|
||||||
|
|
|
@ -14,6 +14,7 @@ defmodule Pleroma.User do
|
||||||
alias Pleroma.Config
|
alias Pleroma.Config
|
||||||
alias Pleroma.Conversation.Participation
|
alias Pleroma.Conversation.Participation
|
||||||
alias Pleroma.Delivery
|
alias Pleroma.Delivery
|
||||||
|
alias Pleroma.EctoType.ActivityPub.ObjectValidators
|
||||||
alias Pleroma.Emoji
|
alias Pleroma.Emoji
|
||||||
alias Pleroma.FollowingRelationship
|
alias Pleroma.FollowingRelationship
|
||||||
alias Pleroma.Formatter
|
alias Pleroma.Formatter
|
||||||
|
@ -30,7 +31,6 @@ defmodule Pleroma.User do
|
||||||
alias Pleroma.Web
|
alias Pleroma.Web
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.ActivityPub.Builder
|
alias Pleroma.Web.ActivityPub.Builder
|
||||||
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
|
|
||||||
alias Pleroma.Web.ActivityPub.Pipeline
|
alias Pleroma.Web.ActivityPub.Pipeline
|
||||||
alias Pleroma.Web.ActivityPub.Utils
|
alias Pleroma.Web.ActivityPub.Utils
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
@ -79,6 +79,7 @@ defmodule Pleroma.User do
|
||||||
|
|
||||||
schema "users" do
|
schema "users" do
|
||||||
field(:bio, :string)
|
field(:bio, :string)
|
||||||
|
field(:raw_bio, :string)
|
||||||
field(:email, :string)
|
field(:email, :string)
|
||||||
field(:name, :string)
|
field(:name, :string)
|
||||||
field(:nickname, :string)
|
field(:nickname, :string)
|
||||||
|
@ -115,7 +116,7 @@ defmodule Pleroma.User do
|
||||||
field(:is_admin, :boolean, default: false)
|
field(:is_admin, :boolean, default: false)
|
||||||
field(:show_role, :boolean, default: true)
|
field(:show_role, :boolean, default: true)
|
||||||
field(:settings, :map, default: nil)
|
field(:settings, :map, default: nil)
|
||||||
field(:uri, Types.Uri, default: nil)
|
field(:uri, ObjectValidators.Uri, default: nil)
|
||||||
field(:hide_followers_count, :boolean, default: false)
|
field(:hide_followers_count, :boolean, default: false)
|
||||||
field(:hide_follows_count, :boolean, default: false)
|
field(:hide_follows_count, :boolean, default: false)
|
||||||
field(:hide_followers, :boolean, default: false)
|
field(:hide_followers, :boolean, default: false)
|
||||||
|
@ -262,37 +263,60 @@ def account_status(%User{deactivated: true}), do: :deactivated
|
||||||
def account_status(%User{password_reset_pending: true}), do: :password_reset_pending
|
def account_status(%User{password_reset_pending: true}), do: :password_reset_pending
|
||||||
|
|
||||||
def account_status(%User{confirmation_pending: true}) do
|
def account_status(%User{confirmation_pending: true}) do
|
||||||
case Config.get([:instance, :account_activation_required]) do
|
if Config.get([:instance, :account_activation_required]) do
|
||||||
true -> :confirmation_pending
|
:confirmation_pending
|
||||||
_ -> :active
|
else
|
||||||
|
:active
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def account_status(%User{}), do: :active
|
def account_status(%User{}), do: :active
|
||||||
|
|
||||||
@spec visible_for?(User.t(), User.t() | nil) :: boolean()
|
@spec visible_for(User.t(), User.t() | nil) ::
|
||||||
def visible_for?(user, for_user \\ nil)
|
:visible
|
||||||
|
| :invisible
|
||||||
|
| :restricted_unauthenticated
|
||||||
|
| :deactivated
|
||||||
|
| :confirmation_pending
|
||||||
|
def visible_for(user, for_user \\ nil)
|
||||||
|
|
||||||
def visible_for?(%User{invisible: true}, _), do: false
|
def visible_for(%User{invisible: true}, _), do: :invisible
|
||||||
|
|
||||||
def visible_for?(%User{id: user_id}, %User{id: user_id}), do: true
|
def visible_for(%User{id: user_id}, %User{id: user_id}), do: :visible
|
||||||
|
|
||||||
def visible_for?(%User{local: local} = user, nil) do
|
def visible_for(%User{} = user, nil) do
|
||||||
cfg_key =
|
if restrict_unauthenticated?(user) do
|
||||||
if local,
|
:restrict_unauthenticated
|
||||||
do: :local,
|
else
|
||||||
else: :remote
|
visible_account_status(user)
|
||||||
|
end
|
||||||
if Config.get([:restrict_unauthenticated, :profiles, cfg_key]),
|
|
||||||
do: false,
|
|
||||||
else: account_status(user) == :active
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def visible_for?(%User{} = user, for_user) do
|
def visible_for(%User{} = user, for_user) do
|
||||||
account_status(user) == :active || superuser?(for_user)
|
if superuser?(for_user) do
|
||||||
|
:visible
|
||||||
|
else
|
||||||
|
visible_account_status(user)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def visible_for?(_, _), do: false
|
def visible_for(_, _), do: :invisible
|
||||||
|
|
||||||
|
defp restrict_unauthenticated?(%User{local: local}) do
|
||||||
|
config_key = if local, do: :local, else: :remote
|
||||||
|
|
||||||
|
Config.get([:restrict_unauthenticated, :profiles, config_key], false)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp visible_account_status(user) do
|
||||||
|
status = account_status(user)
|
||||||
|
|
||||||
|
if status in [:active, :password_reset_pending] do
|
||||||
|
:visible
|
||||||
|
else
|
||||||
|
status
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
@spec superuser?(User.t()) :: boolean()
|
@spec superuser?(User.t()) :: boolean()
|
||||||
def superuser?(%User{local: true, is_admin: true}), do: true
|
def superuser?(%User{local: true, is_admin: true}), do: true
|
||||||
|
@ -305,8 +329,13 @@ def invisible?(_), do: false
|
||||||
|
|
||||||
def avatar_url(user, options \\ []) do
|
def avatar_url(user, options \\ []) do
|
||||||
case user.avatar do
|
case user.avatar do
|
||||||
%{"url" => [%{"href" => href} | _]} -> href
|
%{"url" => [%{"href" => href} | _]} ->
|
||||||
_ -> !options[:no_default] && "#{Web.base_url()}/images/avi.png"
|
href
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
unless options[:no_default] do
|
||||||
|
Config.get([:assets, :default_user_avatar], "#{Web.base_url()}/images/avi.png")
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -427,6 +456,7 @@ def update_changeset(struct, params \\ %{}) do
|
||||||
params,
|
params,
|
||||||
[
|
[
|
||||||
:bio,
|
:bio,
|
||||||
|
:raw_bio,
|
||||||
:name,
|
:name,
|
||||||
:emoji,
|
:emoji,
|
||||||
:avatar,
|
:avatar,
|
||||||
|
@ -458,6 +488,7 @@ def update_changeset(struct, params \\ %{}) do
|
||||||
|> validate_format(:nickname, local_nickname_regex())
|
|> validate_format(:nickname, local_nickname_regex())
|
||||||
|> validate_length(:bio, max: bio_limit)
|
|> validate_length(:bio, max: bio_limit)
|
||||||
|> validate_length(:name, min: 1, max: name_limit)
|
|> validate_length(:name, min: 1, max: name_limit)
|
||||||
|
|> validate_inclusion(:actor_type, ["Person", "Service"])
|
||||||
|> put_fields()
|
|> put_fields()
|
||||||
|> put_emoji()
|
|> put_emoji()
|
||||||
|> put_change_if_present(:bio, &{:ok, parse_bio(&1, struct)})
|
|> put_change_if_present(:bio, &{:ok, parse_bio(&1, struct)})
|
||||||
|
@ -533,9 +564,10 @@ def update_as_admin_changeset(struct, params) do
|
||||||
|> delete_change(:also_known_as)
|
|> delete_change(:also_known_as)
|
||||||
|> unique_constraint(:email)
|
|> unique_constraint(:email)
|
||||||
|> validate_format(:email, @email_regex)
|
|> validate_format(:email, @email_regex)
|
||||||
|
|> validate_inclusion(:actor_type, ["Person", "Service"])
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec update_as_admin(%User{}, map) :: {:ok, User.t()} | {:error, Ecto.Changeset.t()}
|
@spec update_as_admin(User.t(), map()) :: {:ok, User.t()} | {:error, Changeset.t()}
|
||||||
def update_as_admin(user, params) do
|
def update_as_admin(user, params) do
|
||||||
params = Map.put(params, "password_confirmation", params["password"])
|
params = Map.put(params, "password_confirmation", params["password"])
|
||||||
changeset = update_as_admin_changeset(user, params)
|
changeset = update_as_admin_changeset(user, params)
|
||||||
|
@ -556,7 +588,7 @@ def password_update_changeset(struct, params) do
|
||||||
|> put_change(:password_reset_pending, false)
|
|> put_change(:password_reset_pending, false)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec reset_password(User.t(), map) :: {:ok, User.t()} | {:error, Ecto.Changeset.t()}
|
@spec reset_password(User.t(), map()) :: {:ok, User.t()} | {:error, Changeset.t()}
|
||||||
def reset_password(%User{} = user, params) do
|
def reset_password(%User{} = user, params) do
|
||||||
reset_password(user, user, params)
|
reset_password(user, user, params)
|
||||||
end
|
end
|
||||||
|
@ -601,7 +633,16 @@ def register_changeset(struct, params \\ %{}, opts \\ []) do
|
||||||
|
|
||||||
struct
|
struct
|
||||||
|> confirmation_changeset(need_confirmation: need_confirmation?)
|
|> confirmation_changeset(need_confirmation: need_confirmation?)
|
||||||
|> cast(params, [:bio, :email, :name, :nickname, :password, :password_confirmation, :emoji])
|
|> cast(params, [
|
||||||
|
:bio,
|
||||||
|
:raw_bio,
|
||||||
|
:email,
|
||||||
|
:name,
|
||||||
|
:nickname,
|
||||||
|
:password,
|
||||||
|
:password_confirmation,
|
||||||
|
:emoji
|
||||||
|
])
|
||||||
|> validate_required([:name, :nickname, :password, :password_confirmation])
|
|> validate_required([:name, :nickname, :password, :password_confirmation])
|
||||||
|> validate_confirmation(:password)
|
|> validate_confirmation(:password)
|
||||||
|> unique_constraint(:email)
|
|> unique_constraint(:email)
|
||||||
|
@ -741,7 +782,6 @@ def follow(%User{} = follower, %User{} = followed, state \\ :follow_accept) do
|
||||||
|
|
||||||
follower
|
follower
|
||||||
|> update_following_count()
|
|> update_following_count()
|
||||||
|> set_cache()
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -749,7 +789,19 @@ def unfollow(%User{ap_id: ap_id}, %User{ap_id: ap_id}) do
|
||||||
{:error, "Not subscribed!"}
|
{:error, "Not subscribed!"}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@spec unfollow(User.t(), User.t()) :: {:ok, User.t(), Activity.t()} | {:error, String.t()}
|
||||||
def unfollow(%User{} = follower, %User{} = followed) do
|
def unfollow(%User{} = follower, %User{} = followed) do
|
||||||
|
case do_unfollow(follower, followed) do
|
||||||
|
{:ok, follower, followed} ->
|
||||||
|
{:ok, follower, Utils.fetch_latest_follow(follower, followed)}
|
||||||
|
|
||||||
|
error ->
|
||||||
|
error
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec do_unfollow(User.t(), User.t()) :: {:ok, User.t(), User.t()} | {:error, String.t()}
|
||||||
|
defp do_unfollow(%User{} = follower, %User{} = followed) do
|
||||||
case get_follow_state(follower, followed) do
|
case get_follow_state(follower, followed) do
|
||||||
state when state in [:follow_pending, :follow_accept] ->
|
state when state in [:follow_pending, :follow_accept] ->
|
||||||
FollowingRelationship.unfollow(follower, followed)
|
FollowingRelationship.unfollow(follower, followed)
|
||||||
|
@ -758,9 +810,8 @@ def unfollow(%User{} = follower, %User{} = followed) do
|
||||||
{:ok, follower} =
|
{:ok, follower} =
|
||||||
follower
|
follower
|
||||||
|> update_following_count()
|
|> update_following_count()
|
||||||
|> set_cache()
|
|
||||||
|
|
||||||
{:ok, follower, Utils.fetch_latest_follow(follower, followed)}
|
{:ok, follower, followed}
|
||||||
|
|
||||||
nil ->
|
nil ->
|
||||||
{:error, "Not subscribed!"}
|
{:error, "Not subscribed!"}
|
||||||
|
@ -1110,35 +1161,25 @@ defp follow_information_changeset(user, params) do
|
||||||
])
|
])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@spec update_follower_count(User.t()) :: {:ok, User.t()}
|
||||||
def update_follower_count(%User{} = user) do
|
def update_follower_count(%User{} = user) do
|
||||||
if user.local or !Pleroma.Config.get([:instance, :external_user_synchronization]) do
|
if user.local or !Pleroma.Config.get([:instance, :external_user_synchronization]) do
|
||||||
follower_count_query =
|
follower_count = FollowingRelationship.follower_count(user)
|
||||||
User.Query.build(%{followers: user, deactivated: false})
|
|
||||||
|> select([u], %{count: count(u.id)})
|
|
||||||
|
|
||||||
User
|
user
|
||||||
|> where(id: ^user.id)
|
|> follow_information_changeset(%{follower_count: follower_count})
|
||||||
|> join(:inner, [u], s in subquery(follower_count_query))
|
|> update_and_set_cache
|
||||||
|> update([u, s],
|
|
||||||
set: [follower_count: s.count]
|
|
||||||
)
|
|
||||||
|> select([u], u)
|
|
||||||
|> Repo.update_all([])
|
|
||||||
|> case do
|
|
||||||
{1, [user]} -> set_cache(user)
|
|
||||||
_ -> {:error, user}
|
|
||||||
end
|
|
||||||
else
|
else
|
||||||
{:ok, maybe_fetch_follow_information(user)}
|
{:ok, maybe_fetch_follow_information(user)}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec update_following_count(User.t()) :: User.t()
|
@spec update_following_count(User.t()) :: {:ok, User.t()}
|
||||||
def update_following_count(%User{local: false} = user) do
|
def update_following_count(%User{local: false} = user) do
|
||||||
if Pleroma.Config.get([:instance, :external_user_synchronization]) do
|
if Pleroma.Config.get([:instance, :external_user_synchronization]) do
|
||||||
maybe_fetch_follow_information(user)
|
{:ok, maybe_fetch_follow_information(user)}
|
||||||
else
|
else
|
||||||
user
|
{:ok, user}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -1147,7 +1188,7 @@ def update_following_count(%User{local: true} = user) do
|
||||||
|
|
||||||
user
|
user
|
||||||
|> follow_information_changeset(%{following_count: following_count})
|
|> follow_information_changeset(%{following_count: following_count})
|
||||||
|> Repo.update!()
|
|> update_and_set_cache()
|
||||||
end
|
end
|
||||||
|
|
||||||
def set_unread_conversation_count(%User{local: true} = user) do
|
def set_unread_conversation_count(%User{local: true} = user) do
|
||||||
|
@ -1191,8 +1232,9 @@ def increment_unread_conversation_count(conversation, %User{local: true} = user)
|
||||||
|
|
||||||
def increment_unread_conversation_count(_, user), do: {:ok, user}
|
def increment_unread_conversation_count(_, user), do: {:ok, user}
|
||||||
|
|
||||||
@spec get_users_from_set([String.t()], boolean()) :: [User.t()]
|
@spec get_users_from_set([String.t()], keyword()) :: [User.t()]
|
||||||
def get_users_from_set(ap_ids, local_only \\ true) do
|
def get_users_from_set(ap_ids, opts \\ []) do
|
||||||
|
local_only = Keyword.get(opts, :local_only, true)
|
||||||
criteria = %{ap_id: ap_ids, deactivated: false}
|
criteria = %{ap_id: ap_ids, deactivated: false}
|
||||||
criteria = if local_only, do: Map.put(criteria, :local, true), else: criteria
|
criteria = if local_only, do: Map.put(criteria, :local, true), else: criteria
|
||||||
|
|
||||||
|
@ -1204,7 +1246,9 @@ def get_users_from_set(ap_ids, local_only \\ true) do
|
||||||
def get_recipients_from_activity(%Activity{recipients: to, actor: actor}) do
|
def get_recipients_from_activity(%Activity{recipients: to, actor: actor}) do
|
||||||
to = [actor | to]
|
to = [actor | to]
|
||||||
|
|
||||||
User.Query.build(%{recipients_from_activity: to, local: true, deactivated: false})
|
query = User.Query.build(%{recipients_from_activity: to, local: true, deactivated: false})
|
||||||
|
|
||||||
|
query
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -1265,7 +1309,8 @@ def block(%User{} = blocker, %User{} = blocked) do
|
||||||
|
|
||||||
unsubscribe(blocked, blocker)
|
unsubscribe(blocked, blocker)
|
||||||
|
|
||||||
if following?(blocked, blocker), do: unfollow(blocked, blocker)
|
unfollowing_blocked = Config.get([:activitypub, :unfollow_blocked], true)
|
||||||
|
if unfollowing_blocked && following?(blocked, blocker), do: unfollow(blocked, blocker)
|
||||||
|
|
||||||
{:ok, blocker} = update_follower_count(blocker)
|
{:ok, blocker} = update_follower_count(blocker)
|
||||||
{:ok, blocker, _} = Participation.mark_all_as_read(blocker, blocked)
|
{:ok, blocker, _} = Participation.mark_all_as_read(blocker, blocked)
|
||||||
|
@ -1400,15 +1445,13 @@ def deactivate(%User{} = user, status) do
|
||||||
user
|
user
|
||||||
|> get_followers()
|
|> get_followers()
|
||||||
|> Enum.filter(& &1.local)
|
|> Enum.filter(& &1.local)
|
||||||
|> Enum.each(fn follower ->
|
|> Enum.each(&set_cache(update_following_count(&1)))
|
||||||
follower |> update_following_count() |> set_cache()
|
|
||||||
end)
|
|
||||||
|
|
||||||
# Only update local user counts, remote will be update during the next pull.
|
# Only update local user counts, remote will be update during the next pull.
|
||||||
user
|
user
|
||||||
|> get_friends()
|
|> get_friends()
|
||||||
|> Enum.filter(& &1.local)
|
|> Enum.filter(& &1.local)
|
||||||
|> Enum.each(&update_follower_count/1)
|
|> Enum.each(&do_unfollow(user, &1))
|
||||||
|
|
||||||
{:ok, user}
|
{:ok, user}
|
||||||
end
|
end
|
||||||
|
@ -1430,6 +1473,25 @@ def delete(%User{} = user) do
|
||||||
BackgroundWorker.enqueue("delete_user", %{"user_id" => user.id})
|
BackgroundWorker.enqueue("delete_user", %{"user_id" => user.id})
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp delete_and_invalidate_cache(%User{} = user) do
|
||||||
|
invalidate_cache(user)
|
||||||
|
Repo.delete(user)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp delete_or_deactivate(%User{local: false} = user), do: delete_and_invalidate_cache(user)
|
||||||
|
|
||||||
|
defp delete_or_deactivate(%User{local: true} = user) do
|
||||||
|
status = account_status(user)
|
||||||
|
|
||||||
|
if status == :confirmation_pending do
|
||||||
|
delete_and_invalidate_cache(user)
|
||||||
|
else
|
||||||
|
user
|
||||||
|
|> change(%{deactivated: true, email: nil})
|
||||||
|
|> update_and_set_cache()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def perform(:force_password_reset, user), do: force_password_reset(user)
|
def perform(:force_password_reset, user), do: force_password_reset(user)
|
||||||
|
|
||||||
@spec perform(atom(), User.t()) :: {:ok, User.t()}
|
@spec perform(atom(), User.t()) :: {:ok, User.t()}
|
||||||
|
@ -1450,15 +1512,11 @@ def perform(:delete, %User{} = user) do
|
||||||
end)
|
end)
|
||||||
|
|
||||||
delete_user_activities(user)
|
delete_user_activities(user)
|
||||||
|
delete_notifications_from_user_activities(user)
|
||||||
|
|
||||||
if user.local do
|
delete_outgoing_pending_follow_requests(user)
|
||||||
user
|
|
||||||
|> change(%{deactivated: true, email: nil})
|
delete_or_deactivate(user)
|
||||||
|> update_and_set_cache()
|
|
||||||
else
|
|
||||||
invalidate_cache(user)
|
|
||||||
Repo.delete(user)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform(:deactivate_async, user, status), do: deactivate(user, status)
|
def perform(:deactivate_async, user, status), do: deactivate(user, status)
|
||||||
|
@ -1470,8 +1528,7 @@ def perform(:blocks_import, %User{} = blocker, blocked_identifiers)
|
||||||
blocked_identifiers,
|
blocked_identifiers,
|
||||||
fn blocked_identifier ->
|
fn blocked_identifier ->
|
||||||
with {:ok, %User{} = blocked} <- get_or_fetch(blocked_identifier),
|
with {:ok, %User{} = blocked} <- get_or_fetch(blocked_identifier),
|
||||||
{:ok, _user_block} <- block(blocker, blocked),
|
{:ok, _block} <- CommonAPI.block(blocker, blocked) do
|
||||||
{:ok, _} <- ActivityPub.block(blocker, blocked) do
|
|
||||||
blocked
|
blocked
|
||||||
else
|
else
|
||||||
err ->
|
err ->
|
||||||
|
@ -1543,6 +1600,13 @@ def follow_import(%User{} = follower, followed_identifiers)
|
||||||
})
|
})
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def delete_notifications_from_user_activities(%User{ap_id: ap_id}) do
|
||||||
|
Notification
|
||||||
|
|> join(:inner, [n], activity in assoc(n, :activity))
|
||||||
|
|> where([n, a], fragment("? = ?", a.actor, ^ap_id))
|
||||||
|
|> Repo.delete_all()
|
||||||
|
end
|
||||||
|
|
||||||
def delete_user_activities(%User{ap_id: ap_id} = user) do
|
def delete_user_activities(%User{ap_id: ap_id} = user) do
|
||||||
ap_id
|
ap_id
|
||||||
|> Activity.Queries.by_actor()
|
|> Activity.Queries.by_actor()
|
||||||
|
@ -1580,6 +1644,12 @@ defp delete_activity(%{data: %{"type" => type}} = activity, user)
|
||||||
|
|
||||||
defp delete_activity(_activity, _user), do: "Doing nothing"
|
defp delete_activity(_activity, _user), do: "Doing nothing"
|
||||||
|
|
||||||
|
defp delete_outgoing_pending_follow_requests(user) do
|
||||||
|
user
|
||||||
|
|> FollowingRelationship.outgoing_pending_follow_requests_query()
|
||||||
|
|> Repo.delete_all()
|
||||||
|
end
|
||||||
|
|
||||||
def html_filter_policy(%User{no_rich_text: true}) do
|
def html_filter_policy(%User{no_rich_text: true}) do
|
||||||
Pleroma.HTML.Scrubber.TwitterText
|
Pleroma.HTML.Scrubber.TwitterText
|
||||||
end
|
end
|
||||||
|
@ -1589,12 +1659,19 @@ def html_filter_policy(_), do: Pleroma.Config.get([:markup, :scrub_policy])
|
||||||
def fetch_by_ap_id(ap_id), do: ActivityPub.make_user_from_ap_id(ap_id)
|
def fetch_by_ap_id(ap_id), do: ActivityPub.make_user_from_ap_id(ap_id)
|
||||||
|
|
||||||
def get_or_fetch_by_ap_id(ap_id) do
|
def get_or_fetch_by_ap_id(ap_id) do
|
||||||
user = get_cached_by_ap_id(ap_id)
|
cached_user = get_cached_by_ap_id(ap_id)
|
||||||
|
|
||||||
if !is_nil(user) and !needs_update?(user) do
|
maybe_fetched_user = needs_update?(cached_user) && fetch_by_ap_id(ap_id)
|
||||||
|
|
||||||
|
case {cached_user, maybe_fetched_user} do
|
||||||
|
{_, {:ok, %User{} = user}} ->
|
||||||
{:ok, user}
|
{:ok, user}
|
||||||
else
|
|
||||||
fetch_by_ap_id(ap_id)
|
{%User{} = user, _} ->
|
||||||
|
{:ok, user}
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
{:error, :not_found}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -45,7 +45,7 @@ defmodule Pleroma.User.Query do
|
||||||
is_admin: boolean(),
|
is_admin: boolean(),
|
||||||
is_moderator: boolean(),
|
is_moderator: boolean(),
|
||||||
super_users: boolean(),
|
super_users: boolean(),
|
||||||
exclude_service_users: boolean(),
|
invisible: boolean(),
|
||||||
followers: User.t(),
|
followers: User.t(),
|
||||||
friends: User.t(),
|
friends: User.t(),
|
||||||
recipients_from_activity: [String.t()],
|
recipients_from_activity: [String.t()],
|
||||||
|
@ -89,8 +89,8 @@ defp compose_query({key, value}, query)
|
||||||
where(query, [u], ilike(field(u, ^key), ^"%#{value}%"))
|
where(query, [u], ilike(field(u, ^key), ^"%#{value}%"))
|
||||||
end
|
end
|
||||||
|
|
||||||
defp compose_query({:exclude_service_users, _}, query) do
|
defp compose_query({:invisible, bool}, query) when is_boolean(bool) do
|
||||||
where(query, [u], not like(u.ap_id, "%/relay") and not like(u.ap_id, "%/internal/fetch"))
|
where(query, [u], u.invisible == ^bool)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp compose_query({key, value}, query)
|
defp compose_query({key, value}, query)
|
||||||
|
@ -167,20 +167,18 @@ defp compose_query({:friends, %User{id: id}}, query) do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp compose_query({:recipients_from_activity, to}, query) do
|
defp compose_query({:recipients_from_activity, to}, query) do
|
||||||
query
|
following_query =
|
||||||
|> join(:left, [u], r in FollowingRelationship,
|
from(u in User,
|
||||||
as: :relationships,
|
join: f in FollowingRelationship,
|
||||||
on: r.follower_id == u.id
|
on: u.id == f.following_id,
|
||||||
|
where: f.state == ^:follow_accept,
|
||||||
|
where: u.follower_address in ^to,
|
||||||
|
select: f.follower_id
|
||||||
)
|
)
|
||||||
|> join(:left, [relationships: r], f in User,
|
|
||||||
as: :following,
|
from(u in query,
|
||||||
on: f.id == r.following_id
|
where: u.ap_id in ^to or u.id in subquery(following_query)
|
||||||
)
|
)
|
||||||
|> where(
|
|
||||||
[u, following: f, relationships: r],
|
|
||||||
u.ap_id in ^to or (f.follower_address in ^to and r.state == ^:follow_accept)
|
|
||||||
)
|
|
||||||
|> distinct(true)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp compose_query({:order_by, key}, query) do
|
defp compose_query({:order_by, key}, query) do
|
||||||
|
|
|
@ -87,6 +87,22 @@ def dictionary(
|
||||||
source_to_target_rel_types \\ nil,
|
source_to_target_rel_types \\ nil,
|
||||||
target_to_source_rel_types \\ nil
|
target_to_source_rel_types \\ nil
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def dictionary(
|
||||||
|
_source_users,
|
||||||
|
_target_users,
|
||||||
|
[] = _source_to_target_rel_types,
|
||||||
|
[] = _target_to_source_rel_types
|
||||||
|
) do
|
||||||
|
[]
|
||||||
|
end
|
||||||
|
|
||||||
|
def dictionary(
|
||||||
|
source_users,
|
||||||
|
target_users,
|
||||||
|
source_to_target_rel_types,
|
||||||
|
target_to_source_rel_types
|
||||||
|
)
|
||||||
when is_list(source_users) and is_list(target_users) do
|
when is_list(source_users) and is_list(target_users) do
|
||||||
source_user_ids = User.binary_id(source_users)
|
source_user_ids = User.binary_id(source_users)
|
||||||
target_user_ids = User.binary_id(target_users)
|
target_user_ids = User.binary_id(target_users)
|
||||||
|
@ -138,11 +154,16 @@ def view_relationships_option(nil = _reading_user, _actors, _opts) do
|
||||||
|
|
||||||
def view_relationships_option(%User{} = reading_user, actors, opts) do
|
def view_relationships_option(%User{} = reading_user, actors, opts) do
|
||||||
{source_to_target_rel_types, target_to_source_rel_types} =
|
{source_to_target_rel_types, target_to_source_rel_types} =
|
||||||
if opts[:source_mutes_only] do
|
case opts[:subset] do
|
||||||
# This option is used for rendering statuses (FE needs `muted` flag for each one anyways)
|
:source_mutes ->
|
||||||
|
# Used for statuses rendering (FE needs `muted` flag for each status when statuses load)
|
||||||
{[:mute], []}
|
{[:mute], []}
|
||||||
else
|
|
||||||
|
nil ->
|
||||||
{[:block, :mute, :notification_mute, :reblog_mute], [:block, :inverse_subscription]}
|
{[:block, :mute, :notification_mute, :reblog_mute], [:block, :inverse_subscription]}
|
||||||
|
|
||||||
|
unknown ->
|
||||||
|
raise "Unsupported :subset option value: #{inspect(unknown)}"
|
||||||
end
|
end
|
||||||
|
|
||||||
user_relationships =
|
user_relationships =
|
||||||
|
@ -153,7 +174,17 @@ def view_relationships_option(%User{} = reading_user, actors, opts) do
|
||||||
target_to_source_rel_types
|
target_to_source_rel_types
|
||||||
)
|
)
|
||||||
|
|
||||||
following_relationships = FollowingRelationship.all_between_user_sets([reading_user], actors)
|
following_relationships =
|
||||||
|
case opts[:subset] do
|
||||||
|
:source_mutes ->
|
||||||
|
[]
|
||||||
|
|
||||||
|
nil ->
|
||||||
|
FollowingRelationship.all_between_user_sets([reading_user], actors)
|
||||||
|
|
||||||
|
unknown ->
|
||||||
|
raise "Unsupported :subset option value: #{inspect(unknown)}"
|
||||||
|
end
|
||||||
|
|
||||||
%{user_relationships: user_relationships, following_relationships: following_relationships}
|
%{user_relationships: user_relationships, following_relationships: following_relationships}
|
||||||
end
|
end
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -21,6 +21,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|
||||||
alias Pleroma.Web.ActivityPub.UserView
|
alias Pleroma.Web.ActivityPub.UserView
|
||||||
alias Pleroma.Web.ActivityPub.Utils
|
alias Pleroma.Web.ActivityPub.Utils
|
||||||
alias Pleroma.Web.ActivityPub.Visibility
|
alias Pleroma.Web.ActivityPub.Visibility
|
||||||
|
alias Pleroma.Web.ControllerHelper
|
||||||
|
alias Pleroma.Web.Endpoint
|
||||||
alias Pleroma.Web.FederatingPlug
|
alias Pleroma.Web.FederatingPlug
|
||||||
alias Pleroma.Web.Federator
|
alias Pleroma.Web.Federator
|
||||||
|
|
||||||
|
@ -75,8 +77,8 @@ def user(conn, %{"nickname" => nickname}) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def object(conn, %{"uuid" => uuid}) do
|
def object(conn, _) do
|
||||||
with ap_id <- o_status_url(conn, :object, uuid),
|
with ap_id <- Endpoint.url() <> conn.request_path,
|
||||||
%Object{} = object <- Object.get_cached_by_ap_id(ap_id),
|
%Object{} = object <- Object.get_cached_by_ap_id(ap_id),
|
||||||
{_, true} <- {:public?, Visibility.is_public?(object)} do
|
{_, true} <- {:public?, Visibility.is_public?(object)} do
|
||||||
conn
|
conn
|
||||||
|
@ -101,8 +103,8 @@ def track_object_fetch(conn, object_id) do
|
||||||
conn
|
conn
|
||||||
end
|
end
|
||||||
|
|
||||||
def activity(conn, %{"uuid" => uuid}) do
|
def activity(conn, _params) do
|
||||||
with ap_id <- o_status_url(conn, :activity, uuid),
|
with ap_id <- Endpoint.url() <> conn.request_path,
|
||||||
%Activity{} = activity <- Activity.normalize(ap_id),
|
%Activity{} = activity <- Activity.normalize(ap_id),
|
||||||
{_, true} <- {:public?, Visibility.is_public?(activity)} do
|
{_, true} <- {:public?, Visibility.is_public?(activity)} do
|
||||||
conn
|
conn
|
||||||
|
@ -229,27 +231,23 @@ def outbox(
|
||||||
when page? in [true, "true"] do
|
when page? in [true, "true"] do
|
||||||
with %User{} = user <- User.get_cached_by_nickname(nickname),
|
with %User{} = user <- User.get_cached_by_nickname(nickname),
|
||||||
{:ok, user} <- User.ensure_keys_present(user) do
|
{:ok, user} <- User.ensure_keys_present(user) do
|
||||||
activities =
|
# "include_poll_votes" is a hack because postgres generates inefficient
|
||||||
if params["max_id"] do
|
# queries when filtering by 'Answer', poll votes will be hidden by the
|
||||||
ActivityPub.fetch_user_activities(user, for_user, %{
|
# visibility filter in this case anyway
|
||||||
"max_id" => params["max_id"],
|
params =
|
||||||
# This is a hack because postgres generates inefficient queries when filtering by
|
params
|
||||||
# 'Answer', poll votes will be hidden by the visibility filter in this case anyway
|
|> Map.drop(["nickname", "page"])
|
||||||
"include_poll_votes" => true,
|
|> Map.put("include_poll_votes", true)
|
||||||
"limit" => 10
|
|> Map.new(fn {k, v} -> {String.to_existing_atom(k), v} end)
|
||||||
})
|
|
||||||
else
|
activities = ActivityPub.fetch_user_activities(user, for_user, params)
|
||||||
ActivityPub.fetch_user_activities(user, for_user, %{
|
|
||||||
"limit" => 10,
|
|
||||||
"include_poll_votes" => true
|
|
||||||
})
|
|
||||||
end
|
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> put_resp_content_type("application/activity+json")
|
|> put_resp_content_type("application/activity+json")
|
||||||
|> put_view(UserView)
|
|> put_view(UserView)
|
||||||
|> render("activity_collection_page.json", %{
|
|> render("activity_collection_page.json", %{
|
||||||
activities: activities,
|
activities: activities,
|
||||||
|
pagination: ControllerHelper.get_pagination_fields(conn, activities),
|
||||||
iri: "#{user.ap_id}/outbox"
|
iri: "#{user.ap_id}/outbox"
|
||||||
})
|
})
|
||||||
end
|
end
|
||||||
|
@ -352,21 +350,24 @@ def read_inbox(
|
||||||
%{"nickname" => nickname, "page" => page?} = params
|
%{"nickname" => nickname, "page" => page?} = params
|
||||||
)
|
)
|
||||||
when page? in [true, "true"] do
|
when page? in [true, "true"] do
|
||||||
|
params =
|
||||||
|
params
|
||||||
|
|> Map.drop(["nickname", "page"])
|
||||||
|
|> Map.put("blocking_user", user)
|
||||||
|
|> Map.put("user", user)
|
||||||
|
|> Map.new(fn {k, v} -> {String.to_existing_atom(k), v} end)
|
||||||
|
|
||||||
activities =
|
activities =
|
||||||
if params["max_id"] do
|
[user.ap_id | User.following(user)]
|
||||||
ActivityPub.fetch_activities([user.ap_id | User.following(user)], %{
|
|> ActivityPub.fetch_activities(params)
|
||||||
"max_id" => params["max_id"],
|
|> Enum.reverse()
|
||||||
"limit" => 10
|
|
||||||
})
|
|
||||||
else
|
|
||||||
ActivityPub.fetch_activities([user.ap_id | User.following(user)], %{"limit" => 10})
|
|
||||||
end
|
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> put_resp_content_type("application/activity+json")
|
|> put_resp_content_type("application/activity+json")
|
||||||
|> put_view(UserView)
|
|> put_view(UserView)
|
||||||
|> render("activity_collection_page.json", %{
|
|> render("activity_collection_page.json", %{
|
||||||
activities: activities,
|
activities: activities,
|
||||||
|
pagination: ControllerHelper.get_pagination_fields(conn, activities),
|
||||||
iri: "#{user.ap_id}/inbox"
|
iri: "#{user.ap_id}/inbox"
|
||||||
})
|
})
|
||||||
end
|
end
|
||||||
|
@ -513,7 +514,6 @@ defp ensure_user_keys_present_and_maybe_refresh_for_user(user, for_user) do
|
||||||
{new_user, for_user}
|
{new_user, for_user}
|
||||||
end
|
end
|
||||||
|
|
||||||
# TODO: Add support for "object" field
|
|
||||||
@doc """
|
@doc """
|
||||||
Endpoint based on <https://www.w3.org/wiki/SocialCG/ActivityPub/MediaUpload>
|
Endpoint based on <https://www.w3.org/wiki/SocialCG/ActivityPub/MediaUpload>
|
||||||
|
|
||||||
|
@ -524,6 +524,8 @@ defp ensure_user_keys_present_and_maybe_refresh_for_user(user, for_user) do
|
||||||
Response:
|
Response:
|
||||||
- HTTP Code: 201 Created
|
- HTTP Code: 201 Created
|
||||||
- HTTP Body: ActivityPub object to be inserted into another's `attachment` field
|
- HTTP Body: ActivityPub object to be inserted into another's `attachment` field
|
||||||
|
|
||||||
|
Note: Will not point to a URL with a `Location` header because no standalone Activity has been created.
|
||||||
"""
|
"""
|
||||||
def upload_media(%{assigns: %{user: %User{} = user}} = conn, %{"file" => file} = data) do
|
def upload_media(%{assigns: %{user: %User{} = user}} = conn, %{"file" => file} = data) do
|
||||||
with {:ok, object} <-
|
with {:ok, object} <-
|
||||||
|
|
|
@ -5,11 +5,15 @@ defmodule Pleroma.Web.ActivityPub.Builder do
|
||||||
This module encodes our addressing policies and general shape of our objects.
|
This module encodes our addressing policies and general shape of our objects.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
alias Pleroma.Emoji
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web.ActivityPub.Relay
|
||||||
alias Pleroma.Web.ActivityPub.Utils
|
alias Pleroma.Web.ActivityPub.Utils
|
||||||
alias Pleroma.Web.ActivityPub.Visibility
|
alias Pleroma.Web.ActivityPub.Visibility
|
||||||
|
|
||||||
|
require Pleroma.Constants
|
||||||
|
|
||||||
@spec emoji_react(User.t(), Object.t(), String.t()) :: {:ok, map(), keyword()}
|
@spec emoji_react(User.t(), Object.t(), String.t()) :: {:ok, map(), keyword()}
|
||||||
def emoji_react(actor, object, emoji) do
|
def emoji_react(actor, object, emoji) do
|
||||||
with {:ok, data, meta} <- object_action(actor, object) do
|
with {:ok, data, meta} <- object_action(actor, object) do
|
||||||
|
@ -62,6 +66,42 @@ def delete(actor, object_id) do
|
||||||
}, []}
|
}, []}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def create(actor, object, recipients) do
|
||||||
|
{:ok,
|
||||||
|
%{
|
||||||
|
"id" => Utils.generate_activity_id(),
|
||||||
|
"actor" => actor.ap_id,
|
||||||
|
"to" => recipients,
|
||||||
|
"object" => object,
|
||||||
|
"type" => "Create",
|
||||||
|
"published" => DateTime.utc_now() |> DateTime.to_iso8601()
|
||||||
|
}, []}
|
||||||
|
end
|
||||||
|
|
||||||
|
def chat_message(actor, recipient, content, opts \\ []) do
|
||||||
|
basic = %{
|
||||||
|
"id" => Utils.generate_object_id(),
|
||||||
|
"actor" => actor.ap_id,
|
||||||
|
"type" => "ChatMessage",
|
||||||
|
"to" => [recipient],
|
||||||
|
"content" => content,
|
||||||
|
"published" => DateTime.utc_now() |> DateTime.to_iso8601(),
|
||||||
|
"emoji" => Emoji.Formatter.get_emoji_map(content)
|
||||||
|
}
|
||||||
|
|
||||||
|
case opts[:attachment] do
|
||||||
|
%Object{data: attachment_data} ->
|
||||||
|
{
|
||||||
|
:ok,
|
||||||
|
Map.put(basic, "attachment", attachment_data),
|
||||||
|
[]
|
||||||
|
}
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
{:ok, basic, []}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
@spec tombstone(String.t(), String.t()) :: {:ok, map(), keyword()}
|
@spec tombstone(String.t(), String.t()) :: {:ok, map(), keyword()}
|
||||||
def tombstone(actor, id) do
|
def tombstone(actor, id) do
|
||||||
{:ok,
|
{:ok,
|
||||||
|
@ -83,6 +123,61 @@ def like(actor, object) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Retricted to user updates for now, always public
|
||||||
|
@spec update(User.t(), Object.t()) :: {:ok, map(), keyword()}
|
||||||
|
def update(actor, object) do
|
||||||
|
to = [Pleroma.Constants.as_public(), actor.follower_address]
|
||||||
|
|
||||||
|
{:ok,
|
||||||
|
%{
|
||||||
|
"id" => Utils.generate_activity_id(),
|
||||||
|
"type" => "Update",
|
||||||
|
"actor" => actor.ap_id,
|
||||||
|
"object" => object,
|
||||||
|
"to" => to
|
||||||
|
}, []}
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec block(User.t(), User.t()) :: {:ok, map(), keyword()}
|
||||||
|
def block(blocker, blocked) do
|
||||||
|
{:ok,
|
||||||
|
%{
|
||||||
|
"id" => Utils.generate_activity_id(),
|
||||||
|
"type" => "Block",
|
||||||
|
"actor" => blocker.ap_id,
|
||||||
|
"object" => blocked.ap_id,
|
||||||
|
"to" => [blocked.ap_id]
|
||||||
|
}, []}
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec announce(User.t(), Object.t(), keyword()) :: {:ok, map(), keyword()}
|
||||||
|
def announce(actor, object, options \\ []) do
|
||||||
|
public? = Keyword.get(options, :public, false)
|
||||||
|
|
||||||
|
to =
|
||||||
|
cond do
|
||||||
|
actor.ap_id == Relay.relay_ap_id() ->
|
||||||
|
[actor.follower_address]
|
||||||
|
|
||||||
|
public? ->
|
||||||
|
[actor.follower_address, object.data["actor"], Pleroma.Constants.as_public()]
|
||||||
|
|
||||||
|
true ->
|
||||||
|
[actor.follower_address, object.data["actor"]]
|
||||||
|
end
|
||||||
|
|
||||||
|
{:ok,
|
||||||
|
%{
|
||||||
|
"id" => Utils.generate_activity_id(),
|
||||||
|
"actor" => actor.ap_id,
|
||||||
|
"object" => object.data["id"],
|
||||||
|
"to" => to,
|
||||||
|
"context" => object.data["context"],
|
||||||
|
"type" => "Announce",
|
||||||
|
"published" => Utils.make_date()
|
||||||
|
}, []}
|
||||||
|
end
|
||||||
|
|
||||||
@spec object_action(User.t(), Object.t()) :: {:ok, map(), keyword()}
|
@spec object_action(User.t(), Object.t()) :: {:ok, map(), keyword()}
|
||||||
defp object_action(actor, object) do
|
defp object_action(actor, object) do
|
||||||
object_actor = User.get_cached_by_ap_id(object.data["actor"])
|
object_actor = User.get_cached_by_ap_id(object.data["actor"])
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue