honkoma/lib/pleroma/user.ex

1351 lines
35 KiB
Elixir
Raw Normal View History

# Pleroma: A lightweight social networking server
2018-12-31 15:41:47 +00:00
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
2017-03-20 20:28:31 +00:00
defmodule Pleroma.User do
use Ecto.Schema
2019-02-09 15:16:26 +00:00
import Ecto.Changeset
import Ecto.Query
alias Pleroma.Repo
alias Pleroma.User
alias Pleroma.Object
alias Pleroma.Web
alias Pleroma.Activity
alias Pleroma.Notification
alias Comeonin.Pbkdf2
2018-12-02 19:03:53 +00:00
alias Pleroma.Formatter
alias Pleroma.Web.CommonAPI.Utils, as: CommonUtils
2019-02-09 15:16:26 +00:00
alias Pleroma.Web.OStatus
alias Pleroma.Web.Websub
alias Pleroma.Web.OAuth
alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.RelMe
2017-03-20 20:28:31 +00:00
require Logger
2018-12-09 09:12:48 +00:00
@type t :: %__MODULE__{}
2019-01-09 15:08:24 +00:00
@primary_key {:id, Pleroma.FlakeId, autogenerate: true}
@email_regex ~r/^[a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/
@strict_local_nickname_regex ~r/^[a-zA-Z\d]+$/
2018-12-12 20:44:08 +00:00
@extended_local_nickname_regex ~r/^[a-zA-Z\d_-]+$/
2017-03-20 20:28:31 +00:00
schema "users" do
2018-03-30 13:01:53 +00:00
field(:bio, :string)
field(:email, :string)
field(:name, :string)
field(:nickname, :string)
field(:password_hash, :string)
field(:password, :string, virtual: true)
field(:password_confirmation, :string, virtual: true)
field(:following, {:array, :string}, default: [])
field(:ap_id, :string)
field(:avatar, :map)
field(:local, :boolean, default: true)
field(:follower_address, :string)
field(:search_rank, :float, virtual: true)
field(:tags, {:array, :string}, default: [])
2018-09-19 00:04:56 +00:00
field(:bookmarks, {:array, :string}, default: [])
field(:last_refreshed_at, :naive_datetime)
2018-03-30 13:01:53 +00:00
has_many(:notifications, Notification)
2018-11-18 17:27:04 +00:00
embeds_one(:info, Pleroma.User.Info)
2017-03-20 20:28:31 +00:00
timestamps()
end
2017-03-21 16:53:20 +00:00
def auth_active?(%User{local: false}), do: true
def auth_active?(%User{info: %User.Info{confirmation_pending: false}}), do: true
def auth_active?(%User{info: %User.Info{confirmation_pending: true}}),
do: !Pleroma.Config.get([:instance, :account_activation_required])
def auth_active?(_), do: false
def visible_for?(user, for_user \\ nil)
def visible_for?(%User{id: user_id}, %User{id: for_id}) when user_id == for_id, do: true
def visible_for?(%User{} = user, for_user) do
auth_active?(user) || superuser?(for_user)
end
def visible_for?(_, _), do: false
def superuser?(%User{local: true, info: %User.Info{is_admin: true}}), do: true
def superuser?(%User{local: true, info: %User.Info{is_moderator: true}}), do: true
def superuser?(_), do: false
2017-04-17 12:12:36 +00:00
def avatar_url(user) do
case user.avatar do
%{"url" => [%{"href" => href} | _]} -> href
_ -> "#{Web.base_url()}/images/avi.png"
2017-04-17 12:12:36 +00:00
end
end
2017-09-16 11:44:08 +00:00
def banner_url(user) do
2018-11-18 20:40:52 +00:00
case user.info.banner do
2017-09-16 11:44:08 +00:00
%{"url" => [%{"href" => href} | _]} -> href
_ -> "#{Web.base_url()}/images/banner.png"
2017-09-16 11:44:08 +00:00
end
end
2018-11-18 20:40:52 +00:00
def profile_url(%User{info: %{source_data: %{"url" => url}}}), do: url
def profile_url(%User{ap_id: ap_id}), do: ap_id
def profile_url(_), do: nil
2017-03-21 16:53:20 +00:00
def ap_id(%User{nickname: nickname}) do
2018-03-30 13:01:53 +00:00
"#{Web.base_url()}/users/#{nickname}"
2017-03-21 16:53:20 +00:00
end
def ap_followers(%User{} = user) do
"#{ap_id(user)}/followers"
end
2017-03-22 17:36:08 +00:00
2017-04-20 22:51:09 +00:00
def user_info(%User{} = user) do
oneself = if user.local, do: 1, else: 0
2018-03-30 13:01:53 +00:00
2017-04-20 22:51:09 +00:00
%{
following_count: length(user.following) - oneself,
2018-11-18 17:06:02 +00:00
note_count: user.info.note_count,
follower_count: user.info.follower_count,
locked: user.info.locked,
2018-12-20 09:55:12 +00:00
confirmation_pending: user.info.confirmation_pending,
2018-11-18 17:06:02 +00:00
default_scope: user.info.default_scope
2017-04-20 22:51:09 +00:00
}
end
2017-05-09 16:11:51 +00:00
def remote_user_creation(params) do
2018-11-18 20:41:35 +00:00
params =
params
|> Map.put(:info, params[:info] || %{})
2018-11-18 20:40:52 +00:00
info_cng = User.Info.remote_user_creation(%User.Info{}, params[:info])
2018-03-30 13:01:53 +00:00
changes =
2018-11-20 18:07:01 +00:00
%User{}
2018-11-18 20:40:52 +00:00
|> cast(params, [:bio, :name, :ap_id, :nickname, :avatar])
|> validate_required([:name, :ap_id])
2018-03-30 13:01:53 +00:00
|> unique_constraint(:nickname)
|> validate_format(:nickname, @email_regex)
|> validate_length(:bio, max: 5000)
|> validate_length(:name, max: 100)
|> put_change(:local, false)
2018-11-18 20:40:52 +00:00
|> put_embed(:info, info_cng)
2018-03-30 13:01:53 +00:00
if changes.valid? do
2018-11-18 20:40:52 +00:00
case info_cng.changes[:source_data] do
2018-02-11 16:20:02 +00:00
%{"followers" => followers} ->
changes
|> put_change(:follower_address, followers)
2018-03-30 13:01:53 +00:00
2018-02-11 16:20:02 +00:00
_ ->
followers = User.ap_followers(%User{nickname: changes.changes[:nickname]})
2018-03-30 13:01:53 +00:00
2018-02-11 16:20:02 +00:00
changes
|> put_change(:follower_address, followers)
end
else
changes
end
2017-05-09 16:11:51 +00:00
end
2017-08-29 13:14:00 +00:00
def update_changeset(struct, params \\ %{}) do
2017-11-19 01:22:07 +00:00
struct
2018-12-01 09:40:01 +00:00
|> cast(params, [:bio, :name, :avatar])
2017-08-29 13:14:00 +00:00
|> unique_constraint(:nickname)
|> validate_format(:nickname, local_nickname_regex())
|> validate_length(:bio, max: 5000)
2017-08-29 13:14:00 +00:00
|> validate_length(:name, min: 1, max: 100)
end
2018-02-21 21:21:40 +00:00
def upgrade_changeset(struct, params \\ %{}) do
params =
params
|> Map.put(:last_refreshed_at, NaiveDateTime.utc_now())
2018-11-18 21:15:03 +00:00
info_cng =
struct.info
|> User.Info.user_upgrade(params[:info])
2018-02-21 21:21:40 +00:00
struct
2018-11-18 21:15:03 +00:00
|> cast(params, [:bio, :name, :follower_address, :avatar, :last_refreshed_at])
2018-02-21 21:21:40 +00:00
|> unique_constraint(:nickname)
|> validate_format(:nickname, local_nickname_regex())
2018-02-25 15:14:25 +00:00
|> validate_length(:bio, max: 5000)
|> validate_length(:name, max: 100)
2018-11-18 21:15:03 +00:00
|> put_embed(:info, info_cng)
2018-02-21 21:21:40 +00:00
end
2017-10-19 15:37:24 +00:00
def password_update_changeset(struct, params) do
2018-03-30 13:01:53 +00:00
changeset =
struct
|> cast(params, [:password, :password_confirmation])
|> validate_required([:password, :password_confirmation])
|> validate_confirmation(:password)
2017-10-19 15:37:24 +00:00
OAuth.Token.delete_user_tokens(struct)
OAuth.Authorization.delete_user_authorizations(struct)
2017-10-19 15:37:24 +00:00
if changeset.valid? do
hashed = Pbkdf2.hashpwsalt(changeset.changes[:password])
2018-03-30 13:01:53 +00:00
2017-10-19 15:37:24 +00:00
changeset
|> put_change(:password_hash, hashed)
else
changeset
end
end
def reset_password(user, data) do
2017-12-08 16:50:11 +00:00
update_and_set_cache(password_update_changeset(user, data))
2017-10-19 15:37:24 +00:00
end
def register_changeset(struct, params \\ %{}, opts \\ []) do
confirmation_status =
if opts[:confirmed] || !Pleroma.Config.get([:instance, :account_activation_required]) do
:confirmed
else
:unconfirmed
end
2018-12-20 09:55:12 +00:00
info_change = User.Info.confirmation_changeset(%User.Info{}, confirmation_status)
2018-03-30 13:01:53 +00:00
changeset =
struct
|> cast(params, [:bio, :email, :name, :nickname, :password, :password_confirmation])
|> validate_required([:email, :name, :nickname, :password, :password_confirmation])
|> validate_confirmation(:password)
|> unique_constraint(:email)
|> unique_constraint(:nickname)
|> validate_exclusion(:nickname, Pleroma.Config.get([Pleroma.User, :restricted_nicknames]))
|> validate_format(:nickname, local_nickname_regex())
2018-03-30 13:01:53 +00:00
|> validate_format(:email, @email_regex)
|> validate_length(:bio, max: 1000)
|> validate_length(:name, min: 1, max: 100)
2018-12-20 09:55:12 +00:00
|> put_change(:info, info_change)
2017-04-15 14:40:09 +00:00
if changeset.valid? do
hashed = Pbkdf2.hashpwsalt(changeset.changes[:password])
2017-04-15 14:40:09 +00:00
ap_id = User.ap_id(%User{nickname: changeset.changes[:nickname]})
followers = User.ap_followers(%User{nickname: changeset.changes[:nickname]})
2018-03-30 13:01:53 +00:00
2017-04-15 14:40:09 +00:00
changeset
|> put_change(:password_hash, hashed)
|> put_change(:ap_id, ap_id)
2019-02-19 10:52:15 +00:00
|> unique_constraint(:ap_id)
2017-04-15 14:40:09 +00:00
|> put_change(:following, [followers])
|> put_change(:follower_address, followers)
2017-04-15 14:40:09 +00:00
else
changeset
end
end
defp autofollow_users(user) do
candidates = Pleroma.Config.get([:instance, :autofollowed_nicknames])
autofollowed_users =
from(u in User,
where: u.local == true,
where: u.nickname in ^candidates
)
|> Repo.all()
2019-01-09 10:38:45 +00:00
follow_all(user, autofollowed_users)
end
@doc "Inserts provided changeset, performs post-registration actions (confirmation email sending etc.)"
def register(%Ecto.Changeset{} = changeset) do
2018-12-19 14:24:55 +00:00
with {:ok, user} <- Repo.insert(changeset),
2019-02-09 12:39:57 +00:00
{:ok, user} <- autofollow_users(user),
2019-02-16 15:42:34 +00:00
{:ok, _} <- Pleroma.User.WelcomeMessage.post_welcome_message_to_user(user),
2019-02-09 12:39:57 +00:00
{:ok, _} <- try_send_confirmation_email(user) do
{:ok, user}
end
end
def try_send_confirmation_email(%User{} = user) do
if user.info.confirmation_pending &&
Pleroma.Config.get([:instance, :account_activation_required]) do
user
|> Pleroma.UserEmail.account_confirmation_email()
2019-02-20 16:51:25 +00:00
|> Pleroma.Mailer.deliver_async()
else
{:ok, :noop}
end
end
def needs_update?(%User{local: true}), do: false
def needs_update?(%User{local: false, last_refreshed_at: nil}), do: true
def needs_update?(%User{local: false} = user) do
NaiveDateTime.diff(NaiveDateTime.utc_now(), user.last_refreshed_at) >= 86400
end
def needs_update?(_), do: true
2018-11-18 17:53:50 +00:00
def maybe_direct_follow(%User{} = follower, %User{local: true, info: %{locked: true}}) do
{:ok, follower}
end
def maybe_direct_follow(%User{} = follower, %User{local: true} = followed) do
follow(follower, followed)
end
def maybe_direct_follow(%User{} = follower, %User{} = followed) do
2018-12-11 12:31:52 +00:00
if not User.ap_enabled?(followed) do
follow(follower, followed)
else
{:ok, follower}
end
end
2018-12-09 09:12:48 +00:00
def maybe_follow(%User{} = follower, %User{info: _info} = followed) do
if not following?(follower, followed) do
follow(follower, followed)
else
{:ok, follower}
end
end
@doc "A mass follow for local users. Respects blocks in both directions but does not create activities."
2019-01-09 10:35:23 +00:00
@spec follow_all(User.t(), list(User.t())) :: {atom(), User.t()}
def follow_all(follower, followeds) do
2019-02-09 12:24:23 +00:00
followed_addresses =
followeds
|> Enum.reject(fn followed -> blocks?(follower, followed) || blocks?(followed, follower) end)
2019-02-09 12:24:23 +00:00
|> Enum.map(fn %{follower_address: fa} -> fa end)
2019-01-09 10:35:23 +00:00
2019-01-30 18:33:25 +00:00
q =
from(u in User,
where: u.id == ^follower.id,
update: [
set: [
following:
fragment(
"array(select distinct unnest (array_cat(?, ?)))",
u.following,
^followed_addresses
)
]
]
2019-01-30 18:33:25 +00:00
)
{1, [follower]} = Repo.update_all(q, [], returning: true)
2019-01-09 10:35:23 +00:00
Enum.each(followeds, fn followed ->
update_follower_count(followed)
end)
2019-01-30 18:33:25 +00:00
set_cache(follower)
2019-01-09 10:35:23 +00:00
end
2017-12-07 16:51:55 +00:00
def follow(%User{} = follower, %User{info: info} = followed) do
2018-06-23 21:32:00 +00:00
user_config = Application.get_env(:pleroma, :user)
deny_follow_blocked = Keyword.get(user_config, :deny_follow_blocked)
ap_followers = followed.follower_address
cond do
2018-11-18 17:46:04 +00:00
following?(follower, followed) or info.deactivated ->
{:error, "Could not follow user: #{followed.nickname} is already on your list."}
deny_follow_blocked and blocks?(followed, follower) ->
{:error, "Could not follow user: #{followed.nickname} blocked you."}
2017-03-22 17:36:08 +00:00
true ->
if !followed.local && follower.local && !ap_enabled?(followed) do
Websub.subscribe(follower, followed)
end
q =
from(u in User,
where: u.id == ^follower.id,
update: [push: [following: ^ap_followers]]
)
{1, [follower]} = Repo.update_all(q, [], returning: true)
{:ok, _} = update_follower_count(followed)
set_cache(follower)
end
2017-03-22 17:36:08 +00:00
end
2017-03-23 12:13:09 +00:00
def unfollow(%User{} = follower, %User{} = followed) do
ap_followers = followed.follower_address
2018-03-30 13:01:53 +00:00
if following?(follower, followed) and follower.ap_id != followed.ap_id do
q =
from(u in User,
where: u.id == ^follower.id,
update: [pull: [following: ^ap_followers]]
)
2017-03-23 12:13:09 +00:00
{1, [follower]} = Repo.update_all(q, [], returning: true)
{:ok, followed} = update_follower_count(followed)
set_cache(follower)
{:ok, follower, Utils.fetch_latest_follow(follower, followed)}
else
{:error, "Not subscribed!"}
end
2017-03-23 12:13:09 +00:00
end
2018-12-09 09:12:48 +00:00
@spec following?(User.t(), User.t()) :: boolean
def following?(%User{} = follower, %User{} = followed) do
Enum.member?(follower.following, followed.follower_address)
end
def follow_import(%User{} = follower, followed_identifiers)
when is_list(followed_identifiers) do
Enum.map(
followed_identifiers,
fn followed_identifier ->
with %User{} = followed <- get_or_fetch(followed_identifier),
{:ok, follower} <- maybe_direct_follow(follower, followed),
{:ok, _} <- ActivityPub.follow(follower, followed) do
followed
else
err ->
Logger.debug("follow_import failed for #{followed_identifier} with: #{inspect(err)}")
err
end
end
)
end
def locked?(%User{} = user) do
user.info.locked || false
end
def get_by_id(id) do
Repo.get_by(User, id: id)
end
2017-05-11 15:59:11 +00:00
def get_by_ap_id(ap_id) do
Repo.get_by(User, ap_id: ap_id)
end
# This is mostly an SPC migration fix. This guesses the user nickname (by taking the last part of the ap_id and the domain) and tries to get that user
def get_by_guessed_nickname(ap_id) do
domain = URI.parse(ap_id).host
name = List.last(String.split(ap_id, "/"))
nickname = "#{name}@#{domain}"
get_by_nickname(nickname)
end
def set_cache(user) do
Cachex.put(:user_cache, "ap_id:#{user.ap_id}", user)
Cachex.put(:user_cache, "nickname:#{user.nickname}", user)
Cachex.put(:user_cache, "user_info:#{user.id}", user_info(user))
{:ok, user}
end
2017-12-08 16:50:11 +00:00
def update_and_set_cache(changeset) do
with {:ok, user} <- Repo.update(changeset) do
set_cache(user)
2017-12-08 16:50:11 +00:00
else
e -> e
end
end
2018-02-25 15:14:25 +00:00
def invalidate_cache(user) do
Cachex.del(:user_cache, "ap_id:#{user.ap_id}")
Cachex.del(:user_cache, "nickname:#{user.nickname}")
Cachex.del(:user_cache, "user_info:#{user.id}")
2018-02-25 15:14:25 +00:00
end
def get_cached_by_ap_id(ap_id) do
key = "ap_id:#{ap_id}"
Cachex.fetch!(:user_cache, key, fn _ -> get_by_ap_id(ap_id) end)
end
def get_cached_by_id(id) do
key = "id:#{id}"
ap_id =
Cachex.fetch!(:user_cache, key, fn _ ->
user = get_by_id(id)
if user do
Cachex.put(:user_cache, "ap_id:#{user.ap_id}", user)
{:commit, user.ap_id}
else
{:ignore, ""}
end
end)
get_cached_by_ap_id(ap_id)
end
def get_cached_by_nickname(nickname) do
key = "nickname:#{nickname}"
Cachex.fetch!(:user_cache, key, fn _ -> get_or_fetch_by_nickname(nickname) end)
end
def get_cached_by_nickname_or_id(nickname_or_id) do
get_cached_by_id(nickname_or_id) || get_cached_by_nickname(nickname_or_id)
end
2017-04-30 13:06:22 +00:00
def get_by_nickname(nickname) do
Repo.get_by(User, nickname: nickname) ||
if Regex.match?(~r(@#{Pleroma.Web.Endpoint.host()})i, nickname) do
Repo.get_by(User, nickname: local_nickname(nickname))
end
end
def get_by_nickname_or_email(nickname_or_email) do
case user = Repo.get_by(User, nickname: nickname_or_email) do
%User{} -> user
nil -> Repo.get_by(User, email: nickname_or_email)
end
end
def get_cached_user_info(user) do
key = "user_info:#{user.id}"
Cachex.fetch!(:user_cache, key, fn _ -> user_info(user) end)
end
2017-04-30 16:48:48 +00:00
2018-02-18 11:27:05 +00:00
def fetch_by_nickname(nickname) do
ap_try = ActivityPub.make_user_from_nickname(nickname)
case ap_try do
{:ok, user} -> {:ok, user}
_ -> OStatus.make_user(nickname)
end
end
2017-04-30 16:48:48 +00:00
def get_or_fetch_by_nickname(nickname) do
2018-03-30 13:01:53 +00:00
with %User{} = user <- get_by_nickname(nickname) do
2017-04-30 16:48:48 +00:00
user
2018-03-30 13:01:53 +00:00
else
_e ->
with [_nick, _domain] <- String.split(nickname, "@"),
{:ok, user} <- fetch_by_nickname(nickname) do
user
else
_e -> nil
end
2017-04-30 16:48:48 +00:00
end
end
def get_followers_query(%User{id: id, follower_address: follower_address}, nil) do
from(
u in User,
where: fragment("? <@ ?", ^[follower_address], u.following),
where: u.id != ^id
)
end
def get_followers_query(user, page) do
2019-03-02 14:21:18 +00:00
from(u in get_followers_query(user, nil))
|> paginate(page, 20)
end
def get_followers_query(user), do: get_followers_query(user, nil)
def get_followers(user, page \\ nil) do
q = get_followers_query(user, page)
{:ok, Repo.all(q)}
end
def get_followers_ids(user, page \\ nil) do
q = get_followers_query(user, page)
Repo.all(from(u in q, select: u.id))
end
def get_friends_query(%User{id: id, following: following}, nil) do
from(
u in User,
where: u.follower_address in ^following,
where: u.id != ^id
)
end
def get_friends_query(user, page) do
2019-03-02 14:21:18 +00:00
from(u in get_friends_query(user, nil))
|> paginate(page, 20)
end
def get_friends_query(user), do: get_friends_query(user, nil)
def get_friends(user, page \\ nil) do
q = get_friends_query(user, page)
{:ok, Repo.all(q)}
end
def get_friends_ids(user, page \\ nil) do
q = get_friends_query(user, page)
Repo.all(from(u in q, select: u.id))
end
2018-05-26 16:03:32 +00:00
def get_follow_requests_query(%User{} = user) do
from(
a in Activity,
2018-05-28 18:31:48 +00:00
where:
fragment(
"? ->> 'type' = 'Follow'",
a.data
),
where:
fragment(
"? ->> 'state' = 'pending'",
a.data
),
where:
fragment(
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
2018-05-28 18:31:48 +00:00
a.data,
a.data,
^user.ap_id
2018-05-28 18:31:48 +00:00
)
2018-05-26 16:03:32 +00:00
)
end
def get_follow_requests(%User{} = user) do
users =
user
|> User.get_follow_requests_query()
|> join(:inner, [a], u in User, a.actor == u.ap_id)
|> where([a, u], not fragment("? @> ?", u.following, ^[user.follower_address]))
|> group_by([a, u], u.id)
|> select([a, u], u)
|> Repo.all()
2018-05-26 16:03:32 +00:00
{:ok, users}
end
def increase_note_count(%User{} = user) do
User
|> where(id: ^user.id)
|> update([u],
set: [
info:
fragment(
"jsonb_set(?, '{note_count}', ((?->>'note_count')::int + 1)::varchar::jsonb, true)",
u.info,
u.info
)
]
)
|> Repo.update_all([], returning: true)
|> case do
{1, [user]} -> set_cache(user)
_ -> {:error, user}
end
end
def decrease_note_count(%User{} = user) do
User
|> where(id: ^user.id)
|> update([u],
set: [
info:
fragment(
"jsonb_set(?, '{note_count}', (greatest(0, (?->>'note_count')::int - 1))::varchar::jsonb, true)",
u.info,
u.info
)
]
)
|> Repo.update_all([], returning: true)
|> case do
{1, [user]} -> set_cache(user)
_ -> {:error, user}
end
end
def update_note_count(%User{} = user) do
2018-03-30 13:01:53 +00:00
note_count_query =
from(
a in Object,
where: fragment("?->>'actor' = ? and ?->>'type' = 'Note'", a.data, ^user.ap_id, a.data),
select: count(a.id)
)
note_count = Repo.one(note_count_query)
2018-11-18 17:52:21 +00:00
info_cng = User.Info.set_note_count(user.info, note_count)
2018-11-18 17:52:21 +00:00
cng =
change(user)
|> put_embed(:info, info_cng)
2018-11-18 17:52:21 +00:00
update_and_set_cache(cng)
end
def update_follower_count(%User{} = user) do
2018-03-30 13:01:53 +00:00
follower_count_query =
User
|> where([u], ^user.follower_address in u.following)
|> where([u], u.id != ^user.id)
|> select([u], %{count: count(u.id)})
User
|> where(id: ^user.id)
|> join(:inner, [u], s in subquery(follower_count_query))
|> update([u, s],
set: [
info:
fragment(
"jsonb_set(?, '{follower_count}', ?::varchar::jsonb, true)",
u.info,
s.count
)
]
)
|> Repo.update_all([], returning: true)
|> case do
{1, [user]} -> set_cache(user)
_ -> {:error, user}
end
end
def get_users_from_set_query(ap_ids, false) do
from(
u in User,
where: u.ap_id in ^ap_ids
)
end
def get_users_from_set_query(ap_ids, true) do
query = get_users_from_set_query(ap_ids, false)
from(
u in query,
where: u.local == true
)
end
def get_users_from_set(ap_ids, local_only \\ true) do
get_users_from_set_query(ap_ids, local_only)
|> Repo.all()
end
def get_recipients_from_activity(%Activity{recipients: to}) do
2018-03-30 13:01:53 +00:00
query =
from(
u in User,
where: u.ap_id in ^to,
or_where: fragment("? && ?", u.following, ^to)
)
2018-03-30 13:01:53 +00:00
query = from(u in query, where: u.local == true)
Repo.all(query)
end
2019-03-02 14:21:18 +00:00
@spec search_for_admin(binary(), %{
admin: Pleroma.User.t(),
local: boolean(),
page: number(),
page_size: number()
}) :: {:ok, [Pleroma.User.t()], number()}
def search_for_admin(term, %{admin: admin, local: local, page: page, page_size: page_size}) do
2019-03-01 17:13:02 +00:00
term = String.trim_leading(term, "@")
2019-03-02 14:21:18 +00:00
local_paginated_query =
User
|> maybe_local_user_query(local)
|> paginate(page, page_size)
2018-03-30 13:01:53 +00:00
2019-03-02 14:21:18 +00:00
search_query = fts_search_subquery(term, local_paginated_query)
2019-03-02 14:21:18 +00:00
count =
term
|> fts_search_subquery()
|> maybe_local_user_query(local)
|> Repo.aggregate(:count, :id)
2018-03-30 13:01:53 +00:00
2019-03-02 14:21:18 +00:00
{:ok, do_search(search_query, admin), count}
end
2018-03-30 13:01:53 +00:00
2019-03-02 14:21:18 +00:00
@spec all_for_admin(number(), number()) :: {:ok, [Pleroma.User.t()], number()}
def all_for_admin(page, page_size) do
query = from(u in User, order_by: u.id)
paginated_query =
query
|> paginate(page, page_size)
count =
query
|> Repo.aggregate(:count, :id)
{:ok, Repo.all(paginated_query), count}
2019-02-27 22:11:56 +00:00
end
2019-02-26 21:13:38 +00:00
def search(query, resolve \\ false, for_user \\ nil) do
# Strip the beginning @ off if there is a query
query = String.trim_leading(query, "@")
if resolve, do: get_or_fetch(query)
2018-03-30 13:01:53 +00:00
fts_results = do_search(fts_search_subquery(query), for_user)
2019-01-20 09:57:49 +00:00
{:ok, trigram_results} =
Repo.transaction(fn ->
Ecto.Adapters.SQL.query(Repo, "select set_limit(0.25)", [])
do_search(trigram_search_subquery(query), for_user)
end)
2018-03-30 13:01:53 +00:00
Enum.uniq_by(fts_results ++ trigram_results, & &1.id)
end
2018-03-30 13:01:53 +00:00
defp do_search(subquery, for_user, options \\ []) do
2018-05-19 08:37:04 +00:00
q =
from(
s in subquery(subquery),
order_by: [desc: s.search_rank],
limit: ^(options[:limit] || 20)
2018-05-19 08:37:04 +00:00
)
results =
q
|> Repo.all()
|> Enum.filter(&(&1.search_rank > 0))
boost_search_results(results, for_user)
end
2019-03-02 14:21:18 +00:00
defp fts_search_subquery(term, query \\ User) do
processed_query =
2019-03-01 17:13:02 +00:00
term
|> String.replace(~r/\W+/, " ")
|> String.trim()
|> String.split()
|> Enum.map(&(&1 <> ":*"))
|> Enum.join(" | ")
from(
2019-03-01 17:13:02 +00:00
u in query,
select_merge: %{
search_rank:
fragment(
"""
ts_rank_cd(
setweight(to_tsvector('simple', regexp_replace(?, '\\W', ' ', 'g')), 'A') ||
setweight(to_tsvector('simple', regexp_replace(coalesce(?, ''), '\\W', ' ', 'g')), 'B'),
to_tsquery('simple', ?),
32
)
""",
u.nickname,
u.name,
^processed_query
)
},
where:
fragment(
"""
(setweight(to_tsvector('simple', regexp_replace(?, '\\W', ' ', 'g')), 'A') ||
setweight(to_tsvector('simple', regexp_replace(coalesce(?, ''), '\\W', ' ', 'g')), 'B')) @@ to_tsquery('simple', ?)
""",
u.nickname,
u.name,
^processed_query
)
)
end
2019-03-02 14:21:18 +00:00
defp trigram_search_subquery(term) do
from(
u in User,
select_merge: %{
search_rank:
fragment(
2019-01-18 07:57:42 +00:00
"similarity(?, trim(? || ' ' || coalesce(?, '')))",
2019-03-01 17:13:02 +00:00
^term,
u.nickname,
u.name
)
},
2019-03-01 17:13:02 +00:00
where: fragment("trim(? || ' ' || coalesce(?, '')) % ?", u.nickname, u.name, ^term)
)
end
defp boost_search_results(results, nil), do: results
defp boost_search_results(results, for_user) do
friends_ids = get_friends_ids(for_user)
followers_ids = get_followers_ids(for_user)
Enum.map(
results,
fn u ->
search_rank_coef =
cond do
u.id in friends_ids ->
1.2
u.id in followers_ids ->
1.1
true ->
1
end
Map.put(u, :search_rank, u.search_rank * search_rank_coef)
end
)
|> Enum.sort_by(&(-&1.search_rank))
2017-10-30 18:23:16 +00:00
end
2017-11-02 20:57:37 +00:00
def blocks_import(%User{} = blocker, blocked_identifiers) when is_list(blocked_identifiers) do
Enum.map(
blocked_identifiers,
fn blocked_identifier ->
with %User{} = blocked <- get_or_fetch(blocked_identifier),
{:ok, blocker} <- block(blocker, blocked),
{:ok, _} <- ActivityPub.block(blocker, blocked) do
blocked
else
err ->
Logger.debug("blocks_import failed for #{blocked_identifier} with: #{inspect(err)}")
err
end
end
)
end
def mute(muter, %User{ap_id: ap_id}) do
info_cng =
muter.info
|> User.Info.add_to_mutes(ap_id)
cng =
change(muter)
|> put_embed(:info, info_cng)
update_and_set_cache(cng)
end
def unmute(muter, %{ap_id: ap_id}) do
info_cng =
muter.info
|> User.Info.remove_from_mutes(ap_id)
cng =
change(muter)
|> put_embed(:info, info_cng)
update_and_set_cache(cng)
end
def block(blocker, %User{ap_id: ap_id} = blocked) do
# sever any follow relationships to prevent leaks per activitypub (Pleroma issue #213)
blocker =
if following?(blocker, blocked) do
{:ok, blocker, _} = unfollow(blocker, blocked)
blocker
else
blocker
end
if following?(blocked, blocker) do
unfollow(blocked, blocker)
end
2018-11-18 17:40:31 +00:00
info_cng =
blocker.info
|> User.Info.add_to_block(ap_id)
2017-11-02 20:57:37 +00:00
2018-11-18 17:40:31 +00:00
cng =
change(blocker)
|> put_embed(:info, info_cng)
update_and_set_cache(cng)
2017-11-02 20:57:37 +00:00
end
# helper to handle the block given only an actor's AP id
def block(blocker, %{ap_id: ap_id}) do
block(blocker, User.get_by_ap_id(ap_id))
end
2018-11-18 17:40:31 +00:00
def unblock(blocker, %{ap_id: ap_id}) do
info_cng =
blocker.info
|> User.Info.remove_from_block(ap_id)
2017-11-02 20:57:37 +00:00
2018-11-18 17:40:31 +00:00
cng =
change(blocker)
|> put_embed(:info, info_cng)
update_and_set_cache(cng)
2017-11-02 20:57:37 +00:00
end
def mutes?(nil, _), do: false
def mutes?(user, %{ap_id: ap_id}), do: Enum.member?(user.info.mutes, ap_id)
2017-11-02 20:57:37 +00:00
def blocks?(user, %{ap_id: ap_id}) do
2018-11-18 17:40:31 +00:00
blocks = user.info.blocks
domain_blocks = user.info.domain_blocks
2018-06-03 19:01:37 +00:00
%{host: host} = URI.parse(ap_id)
2018-06-03 19:21:23 +00:00
Enum.member?(blocks, ap_id) ||
Enum.any?(domain_blocks, fn domain ->
host == domain
end)
2018-06-03 19:01:37 +00:00
end
def muted_users(user),
do: Repo.all(from(u in User, where: u.ap_id in ^user.info.mutes))
def blocked_users(user),
do: Repo.all(from(u in User, where: u.ap_id in ^user.info.blocks))
2018-06-03 19:01:37 +00:00
def block_domain(user, domain) do
2018-11-18 20:41:35 +00:00
info_cng =
user.info
|> User.Info.add_to_domain_block(domain)
2018-06-03 19:01:37 +00:00
2018-11-18 20:41:35 +00:00
cng =
change(user)
|> put_embed(:info, info_cng)
2018-11-18 20:40:52 +00:00
update_and_set_cache(cng)
2018-06-03 19:01:37 +00:00
end
def unblock_domain(user, domain) do
2018-11-18 20:41:35 +00:00
info_cng =
user.info
|> User.Info.remove_from_domain_block(domain)
2018-06-03 19:01:37 +00:00
2018-11-18 20:41:35 +00:00
cng =
change(user)
|> put_embed(:info, info_cng)
2018-11-18 20:40:52 +00:00
update_and_set_cache(cng)
2017-11-02 20:57:37 +00:00
end
2019-03-02 14:21:18 +00:00
def maybe_local_user_query(query, local) do
if local, do: local_user_query(query), else: query
2019-03-01 17:13:02 +00:00
end
2019-03-02 14:21:18 +00:00
def local_user_query(query \\ User) do
from(
2019-03-02 14:21:18 +00:00
u in query,
where: u.local == true,
where: not is_nil(u.nickname)
)
2017-11-30 13:59:44 +00:00
end
def active_local_user_query do
from(
u in local_user_query(),
where: fragment("not (?->'deactivated' @> 'true')", u.info)
)
end
def moderator_user_query do
2018-09-03 12:03:23 +00:00
from(
u in User,
where: u.local == true,
where: fragment("?->'is_moderator' @> 'true'", u.info)
)
end
2018-10-29 23:08:56 +00:00
def deactivate(%User{} = user, status \\ true) do
2018-11-18 17:06:02 +00:00
info_cng = User.Info.set_activation_status(user.info, status)
2018-11-18 17:27:04 +00:00
cng =
change(user)
|> put_embed(:info, info_cng)
2018-11-18 17:06:02 +00:00
update_and_set_cache(cng)
2017-12-07 16:47:23 +00:00
end
2017-12-07 17:13:05 +00:00
2018-03-30 13:01:53 +00:00
def delete(%User{} = user) do
2017-12-07 17:13:05 +00:00
{:ok, user} = User.deactivate(user)
# Remove all relationships
2018-03-30 13:01:53 +00:00
{:ok, followers} = User.get_followers(user)
2017-12-07 17:13:05 +00:00
followers
2018-03-30 13:01:53 +00:00
|> Enum.each(fn follower -> User.unfollow(follower, user) end)
2017-12-07 17:13:05 +00:00
{:ok, friends} = User.get_friends(user)
2018-03-30 13:01:53 +00:00
2017-12-07 17:13:05 +00:00
friends
2018-03-30 13:01:53 +00:00
|> Enum.each(fn followed -> User.unfollow(user, followed) end)
2017-12-07 17:13:05 +00:00
delete_user_activities(user)
{:ok, user}
end
def delete_user_activities(user) do
2018-03-30 13:01:53 +00:00
query = from(a in Activity, where: a.actor == ^user.ap_id)
2017-12-08 16:50:11 +00:00
Repo.all(query)
2018-03-30 13:01:53 +00:00
|> Enum.each(fn activity ->
2017-12-08 16:50:11 +00:00
case activity.data["type"] do
2018-03-30 13:01:53 +00:00
"Create" ->
ActivityPub.delete(Object.normalize(activity.data["object"]))
2018-03-30 13:01:53 +00:00
# TODO: Do something with likes, follows, repeats.
_ ->
"Doing nothing"
2017-12-08 16:50:11 +00:00
end
end)
2017-12-07 17:13:05 +00:00
end
2018-12-01 11:46:08 +00:00
def html_filter_policy(%User{info: %{no_rich_text: true}}) do
2018-09-22 01:37:05 +00:00
Pleroma.HTML.Scrubber.TwitterText
end
@default_scrubbers Pleroma.Config.get([:markup, :scrub_policy])
def html_filter_policy(_), do: @default_scrubbers
2018-09-22 01:37:05 +00:00
def get_or_fetch_by_ap_id(ap_id) do
user = get_by_ap_id(ap_id)
if !is_nil(user) and !User.needs_update?(user) do
user
else
2018-02-25 16:06:12 +00:00
ap_try = ActivityPub.make_user_from_ap_id(ap_id)
case ap_try do
2018-03-30 13:01:53 +00:00
{:ok, user} ->
user
2018-02-25 16:06:12 +00:00
_ ->
case OStatus.make_user(ap_id) do
{:ok, user} -> user
2018-03-19 17:57:58 +00:00
_ -> {:error, "Could not fetch by AP id"}
2018-02-25 16:06:12 +00:00
end
end
end
end
def get_or_create_instance_user do
relay_uri = "#{Pleroma.Web.Endpoint.url()}/relay"
if user = get_by_ap_id(relay_uri) do
user
else
changes =
2018-11-20 18:07:01 +00:00
%User{info: %User.Info{}}
|> cast(%{}, [:ap_id, :nickname, :local])
|> put_change(:ap_id, relay_uri)
|> put_change(:nickname, nil)
|> put_change(:local, true)
|> put_change(:follower_address, relay_uri <> "/followers")
{:ok, user} = Repo.insert(changes)
user
end
end
# AP style
2018-03-30 13:01:53 +00:00
def public_key_from_info(%{
2018-11-27 17:12:03 +00:00
source_data: %{"publicKey" => %{"publicKeyPem" => public_key_pem}}
2018-03-30 13:01:53 +00:00
}) do
key =
2018-12-11 12:31:52 +00:00
public_key_pem
|> :public_key.pem_decode()
2018-03-30 13:01:53 +00:00
|> hd()
|> :public_key.pem_entry_decode()
2018-03-30 13:01:53 +00:00
{:ok, key}
end
# OStatus Magic Key
2018-11-27 17:12:03 +00:00
def public_key_from_info(%{magic_key: magic_key}) do
{:ok, Pleroma.Web.Salmon.decode_key(magic_key)}
end
def get_public_key_for_ap_id(ap_id) do
with %User{} = user <- get_or_fetch_by_ap_id(ap_id),
{:ok, public_key} <- public_key_from_info(user.info) do
{:ok, public_key}
else
_ -> :error
end
end
2018-02-11 16:20:02 +00:00
defp blank?(""), do: nil
defp blank?(n), do: n
2018-02-11 16:20:02 +00:00
def insert_or_update_user(data) do
2018-03-30 13:01:53 +00:00
data =
data
|> Map.put(:name, blank?(data[:name]) || data[:nickname])
2018-02-11 16:20:02 +00:00
cs = User.remote_user_creation(data)
2018-11-20 18:07:01 +00:00
2018-02-11 16:20:02 +00:00
Repo.insert(cs, on_conflict: :replace_all, conflict_target: :nickname)
end
def ap_enabled?(%User{local: true}), do: true
2018-11-18 20:40:52 +00:00
def ap_enabled?(%User{info: info}), do: info.ap_enabled
2018-02-25 15:40:37 +00:00
def ap_enabled?(_), do: false
2018-03-24 14:09:09 +00:00
2018-12-11 12:31:52 +00:00
@doc "Gets or fetch a user by uri or nickname."
@spec get_or_fetch(String.t()) :: User.t()
def get_or_fetch("http" <> _host = uri), do: get_or_fetch_by_ap_id(uri)
def get_or_fetch(nickname), do: get_or_fetch_by_nickname(nickname)
# wait a period of time and return newest version of the User structs
# this is because we have synchronous follow APIs and need to simulate them
# with an async handshake
def wait_and_refresh(_, %User{local: true} = a, %User{local: true} = b) do
with %User{} = a <- Repo.get(User, a.id),
%User{} = b <- Repo.get(User, b.id) do
{:ok, a, b}
else
_e ->
:error
end
end
def wait_and_refresh(timeout, %User{} = a, %User{} = b) do
with :ok <- :timer.sleep(timeout),
%User{} = a <- Repo.get(User, a.id),
%User{} = b <- Repo.get(User, b.id) do
{:ok, a, b}
else
_e ->
:error
end
end
2018-12-02 19:03:53 +00:00
def parse_bio(bio, user \\ %User{info: %{source_data: %{}}})
2018-12-10 19:49:06 +00:00
def parse_bio(nil, _user), do: ""
def parse_bio(bio, _user) when bio == "", do: bio
def parse_bio(bio, user) do
2018-12-02 19:03:53 +00:00
emoji =
(user.info.source_data["tag"] || [])
|> Enum.filter(fn %{"type" => t} -> t == "Emoji" end)
|> Enum.map(fn %{"icon" => %{"url" => url}, "name" => name} ->
{String.trim(name, ":"), url}
end)
# TODO: get profile URLs other than user.ap_id
profile_urls = [user.ap_id]
2018-12-11 12:31:52 +00:00
bio
2019-03-02 06:04:49 +00:00
|> CommonUtils.format_input("text/plain",
mentions_format: :full,
rel: &RelMe.maybe_put_rel_me(&1, profile_urls)
2019-03-02 06:04:49 +00:00
)
2019-02-26 23:32:26 +00:00
|> elem(0)
2018-12-11 12:31:52 +00:00
|> Formatter.emojify(emoji)
2018-12-02 19:03:53 +00:00
end
def tag(user_identifiers, tags) when is_list(user_identifiers) do
Repo.transaction(fn ->
for user_identifier <- user_identifiers, do: tag(user_identifier, tags)
end)
end
2018-12-09 09:12:48 +00:00
def tag(nickname, tags) when is_binary(nickname),
do: tag(User.get_by_nickname(nickname), tags)
def tag(%User{} = user, tags),
do: update_tags(user, Enum.uniq((user.tags || []) ++ normalize_tags(tags)))
2018-12-09 09:12:48 +00:00
def untag(user_identifiers, tags) when is_list(user_identifiers) do
Repo.transaction(fn ->
for user_identifier <- user_identifiers, do: untag(user_identifier, tags)
end)
end
def untag(nickname, tags) when is_binary(nickname),
do: untag(User.get_by_nickname(nickname), tags)
def untag(%User{} = user, tags),
do: update_tags(user, (user.tags || []) -- normalize_tags(tags))
defp update_tags(%User{} = user, new_tags) do
{:ok, updated_user} =
user
|> change(%{tags: new_tags})
|> update_and_set_cache()
updated_user
end
2018-12-06 17:23:16 +00:00
2018-09-19 00:04:56 +00:00
def bookmark(%User{} = user, status_id) do
bookmarks = Enum.uniq(user.bookmarks ++ [status_id])
update_bookmarks(user, bookmarks)
end
def unbookmark(%User{} = user, status_id) do
bookmarks = Enum.uniq(user.bookmarks -- [status_id])
update_bookmarks(user, bookmarks)
end
def update_bookmarks(%User{} = user, bookmarks) do
user
|> change(%{bookmarks: bookmarks})
|> update_and_set_cache
end
defp normalize_tags(tags) do
[tags]
|> List.flatten()
|> Enum.map(&String.downcase(&1))
end
defp local_nickname_regex() do
if Pleroma.Config.get([:instance, :extended_nickname_format]) do
@extended_local_nickname_regex
else
@strict_local_nickname_regex
end
end
def local_nickname(nickname_or_mention) do
nickname_or_mention
|> full_nickname()
|> String.split("@")
|> hd()
end
def full_nickname(nickname_or_mention),
do: String.trim_leading(nickname_or_mention, "@")
def error_user(ap_id) do
%User{
name: ap_id,
ap_id: ap_id,
info: %User.Info{},
nickname: "erroruser@example.com",
inserted_at: NaiveDateTime.utc_now()
}
end
2019-02-20 16:51:25 +00:00
def all_superusers do
from(
u in User,
where: u.local == true,
where: fragment("?->'is_admin' @> 'true' OR ?->'is_moderator' @> 'true'", u.info, u.info)
)
|> Repo.all()
end
2019-03-02 14:21:18 +00:00
defp paginate(query, page, page_size) do
from(u in query,
limit: ^page_size,
offset: ^((page - 1) * page_size)
)
end
2017-03-20 20:28:31 +00:00
end