spc-pleroma/lib/pleroma/web/federator/federator.ex

161 lines
4.4 KiB
Elixir
Raw Normal View History

# Pleroma: A lightweight social networking server
2018-12-31 15:41:47 +00:00
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.Federator do
2018-02-21 07:51:50 +00:00
alias Pleroma.Activity
2019-04-17 11:52:01 +00:00
alias Pleroma.Object.Containment
2019-02-09 15:16:26 +00:00
alias Pleroma.User
alias Pleroma.Web.ActivityPub.ActivityPub
2018-02-21 07:51:03 +00:00
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.ActivityPub.Utils
2019-05-12 03:57:10 +00:00
alias Pleroma.Web.Federator.Publisher
2018-08-26 18:17:13 +00:00
alias Pleroma.Web.Federator.RetryQueue
alias Pleroma.Web.OStatus
alias Pleroma.Web.Websub
2019-02-06 19:20:02 +00:00
require Logger
def init do
2019-01-28 15:17:17 +00:00
# 1 minute
2019-03-05 01:30:19 +00:00
Process.sleep(1000 * 60)
2019-01-28 15:17:17 +00:00
refresh_subscriptions()
2018-05-07 16:11:37 +00:00
end
@doc "Addresses [memory leaks on recursive replies fetching](https://git.pleroma.social/pleroma/pleroma/issues/161)"
# credo:disable-for-previous-line Credo.Check.Readability.MaxLineLength
2019-06-30 12:58:50 +00:00
def allowed_incoming_reply_depth?(depth) do
max_replies_depth = Pleroma.Config.get([:instance, :federation_incoming_replies_max_depth])
if max_replies_depth do
(depth || 1) <= max_replies_depth
else
true
end
end
2019-01-28 15:17:17 +00:00
# Client API
def incoming_doc(doc) do
PleromaJobQueue.enqueue(:federator_incoming, __MODULE__, [:incoming_doc, doc])
2019-01-28 15:17:17 +00:00
end
def incoming_ap_doc(params) do
PleromaJobQueue.enqueue(:federator_incoming, __MODULE__, [:incoming_ap_doc, params])
2019-01-28 15:17:17 +00:00
end
def publish(activity, priority \\ 1) do
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:publish, activity], priority)
2019-01-28 15:17:17 +00:00
end
def verify_websub(websub) do
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:verify_websub, websub])
2019-01-28 15:17:17 +00:00
end
def request_subscription(sub) do
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:request_subscription, sub])
2019-01-28 15:17:17 +00:00
end
def refresh_subscriptions do
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:refresh_subscriptions])
2019-01-28 15:17:17 +00:00
end
2018-03-30 13:01:53 +00:00
2019-01-28 15:17:17 +00:00
# Job Worker Callbacks
def perform(:refresh_subscriptions) do
2017-05-10 16:44:06 +00:00
Logger.debug("Federator running refresh subscriptions")
Websub.refresh_subscriptions()
2018-03-30 13:01:53 +00:00
2017-05-10 16:44:06 +00:00
spawn(fn ->
2018-03-30 13:01:53 +00:00
# 6 hours
Process.sleep(1000 * 60 * 60 * 6)
2019-01-28 15:17:17 +00:00
refresh_subscriptions()
2017-05-10 16:44:06 +00:00
end)
end
2019-01-28 15:17:17 +00:00
def perform(:request_subscription, websub) do
2017-08-02 10:34:48 +00:00
Logger.debug("Refreshing #{websub.topic}")
2018-03-30 13:01:53 +00:00
with {:ok, websub} <- Websub.request_subscription(websub) do
2017-08-02 10:34:48 +00:00
Logger.debug("Successfully refreshed #{websub.topic}")
else
_e -> Logger.debug("Couldn't refresh #{websub.topic}")
end
end
2019-01-28 15:17:17 +00:00
def perform(:publish, activity) do
2017-05-05 10:07:38 +00:00
Logger.debug(fn -> "Running publish for #{activity.data["id"]}" end)
2018-03-30 13:01:53 +00:00
with %User{} = actor <- User.get_cached_by_ap_id(activity.data["actor"]),
{:ok, actor} <- User.ensure_keys_present(actor) do
2019-05-12 03:57:10 +00:00
Publisher.publish(actor, activity)
end
end
2019-01-28 15:17:17 +00:00
def perform(:verify_websub, websub) do
2018-03-30 13:01:53 +00:00
Logger.debug(fn ->
"Running WebSub verification for #{websub.id} (#{websub.topic}, #{websub.callback})"
end)
Websub.verify(websub)
end
2019-01-28 15:17:17 +00:00
def perform(:incoming_doc, doc) do
Logger.info("Got document, trying to parse")
OStatus.handle_incoming(doc)
end
2019-01-28 15:17:17 +00:00
def perform(:incoming_ap_doc, params) do
2018-03-19 17:28:06 +00:00
Logger.info("Handling incoming AP activity")
2018-03-30 13:01:53 +00:00
params = Utils.normalize_params(params)
# NOTE: we use the actor ID to do the containment, this is fine because an
# actor shouldn't be acting on objects outside their own AP server.
2018-02-21 07:51:03 +00:00
with {:ok, _user} <- ap_enabled_actor(params["actor"]),
nil <- Activity.normalize(params["id"]),
:ok <- Containment.contain_origin_from_id(params["actor"], params),
{:ok, activity} <- Transmogrifier.handle_incoming(params) do
{:ok, activity}
2018-02-21 07:51:03 +00:00
else
%Activity{} ->
Logger.info("Already had #{params["id"]}")
:error
2018-03-30 13:01:53 +00:00
_e ->
2018-02-21 07:51:03 +00:00
# Just drop those for now
Logger.info("Unhandled activity")
2019-05-13 20:37:38 +00:00
Logger.info(Jason.encode!(params, pretty: true))
:error
2018-02-21 07:51:03 +00:00
end
end
2019-01-28 15:17:17 +00:00
def perform(
2018-08-26 18:17:13 +00:00
:publish_single_websub,
2018-12-09 09:12:48 +00:00
%{xml: _xml, topic: _topic, callback: _callback, secret: _secret} = params
2018-08-26 18:17:13 +00:00
) do
case Websub.publish_one(params) do
{:ok, _} ->
:ok
{:error, _} ->
2018-11-19 16:08:41 +00:00
RetryQueue.enqueue(params, Websub)
2017-06-23 14:37:34 +00:00
end
end
2019-01-28 15:17:17 +00:00
def perform(type, _) do
2017-05-05 10:07:38 +00:00
Logger.debug(fn -> "Unknown task: #{type}" end)
2018-03-19 17:47:51 +00:00
{:error, "Don't know what to do with this"}
end
2018-02-21 07:51:03 +00:00
def ap_enabled_actor(id) do
2019-04-22 07:20:43 +00:00
user = User.get_cached_by_ap_id(id)
2018-03-30 13:01:53 +00:00
2018-02-21 07:51:03 +00:00
if User.ap_enabled?(user) do
{:ok, user}
else
ActivityPub.make_user_from_ap_id(id)
end
end
end