Fetch user's outbox posts on first federation with that user
This commit is contained in:
parent
39b766cc43
commit
5021b7836f
|
@ -348,6 +348,10 @@
|
||||||
federator_outgoing: [max_jobs: 50],
|
federator_outgoing: [max_jobs: 50],
|
||||||
mailer: [max_jobs: 10]
|
mailer: [max_jobs: 10]
|
||||||
|
|
||||||
|
config :pleroma, :fetch_initial_posts,
|
||||||
|
enabled: false,
|
||||||
|
pages: 5
|
||||||
|
|
||||||
config :auto_linker,
|
config :auto_linker,
|
||||||
opts: [
|
opts: [
|
||||||
scheme: true,
|
scheme: true,
|
||||||
|
|
|
@ -285,6 +285,10 @@ This config contains two queues: `federator_incoming` and `federator_outgoing`.
|
||||||
## :rich_media
|
## :rich_media
|
||||||
* `enabled`: if enabled the instance will parse metadata from attached links to generate link previews
|
* `enabled`: if enabled the instance will parse metadata from attached links to generate link previews
|
||||||
|
|
||||||
|
## :fetch_initial_posts
|
||||||
|
* `enabled`: if enabled, when a new user is federated with, fetch some of their latest posts
|
||||||
|
* `pages`: the amount of pages to fetch
|
||||||
|
|
||||||
## :hackney_pools
|
## :hackney_pools
|
||||||
|
|
||||||
Advanced. Tweaks Hackney (http client) connections pools.
|
Advanced. Tweaks Hackney (http client) connections pools.
|
||||||
|
|
|
@ -532,6 +532,10 @@ def get_or_fetch_by_nickname(nickname) do
|
||||||
_e ->
|
_e ->
|
||||||
with [_nick, _domain] <- String.split(nickname, "@"),
|
with [_nick, _domain] <- String.split(nickname, "@"),
|
||||||
{:ok, user} <- fetch_by_nickname(nickname) do
|
{:ok, user} <- fetch_by_nickname(nickname) do
|
||||||
|
if Pleroma.Config.get([:fetch_initial_posts, :enabled]) do
|
||||||
|
{:ok, _} = Task.start(__MODULE__, :fetch_initial_posts, [user])
|
||||||
|
end
|
||||||
|
|
||||||
user
|
user
|
||||||
else
|
else
|
||||||
_e -> nil
|
_e -> nil
|
||||||
|
@ -539,6 +543,17 @@ def get_or_fetch_by_nickname(nickname) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc "Fetch some posts when the user has just been federated with"
|
||||||
|
def fetch_initial_posts(user) do
|
||||||
|
pages = Pleroma.Config.get!([:fetch_initial_posts, :pages])
|
||||||
|
|
||||||
|
Enum.each(
|
||||||
|
# Insert all the posts in reverse order, so they're in the right order on the timeline
|
||||||
|
Enum.reverse(Utils.fetch_ordered_collection(user.info.source_data["outbox"], pages)),
|
||||||
|
&Pleroma.Web.Federator.incoming_ap_doc/1
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
def get_followers_query(%User{id: id, follower_address: follower_address}, nil) do
|
def get_followers_query(%User{id: id, follower_address: follower_address}, nil) do
|
||||||
from(
|
from(
|
||||||
u in User,
|
u in User,
|
||||||
|
@ -1108,12 +1123,7 @@ def html_filter_policy(%User{info: %{no_rich_text: true}}) do
|
||||||
|
|
||||||
def html_filter_policy(_), do: @default_scrubbers
|
def html_filter_policy(_), do: @default_scrubbers
|
||||||
|
|
||||||
def get_or_fetch_by_ap_id(ap_id) do
|
def fetch_by_ap_id(ap_id) do
|
||||||
user = get_by_ap_id(ap_id)
|
|
||||||
|
|
||||||
if !is_nil(user) and !User.needs_update?(user) do
|
|
||||||
user
|
|
||||||
else
|
|
||||||
ap_try = ActivityPub.make_user_from_ap_id(ap_id)
|
ap_try = ActivityPub.make_user_from_ap_id(ap_id)
|
||||||
|
|
||||||
case ap_try do
|
case ap_try do
|
||||||
|
@ -1127,6 +1137,23 @@ def get_or_fetch_by_ap_id(ap_id) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def get_or_fetch_by_ap_id(ap_id) do
|
||||||
|
user = get_by_ap_id(ap_id)
|
||||||
|
|
||||||
|
if !is_nil(user) and !User.needs_update?(user) do
|
||||||
|
user
|
||||||
|
else
|
||||||
|
user = fetch_by_ap_id(ap_id)
|
||||||
|
|
||||||
|
if Pleroma.Config.get([:fetch_initial_posts, :enabled]) do
|
||||||
|
with %User{} = user do
|
||||||
|
{:ok, _} = Task.start(__MODULE__, :fetch_initial_posts, [user])
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
user
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_or_create_instance_user do
|
def get_or_create_instance_user do
|
||||||
|
|
|
@ -633,4 +633,43 @@ def make_flag_data(params, additional) do
|
||||||
}
|
}
|
||||||
|> Map.merge(additional)
|
|> Map.merge(additional)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Fetches the OrderedCollection/OrderedCollectionPage from `from`, limiting the amount of pages fetched after
|
||||||
|
the first one to `pages_left` pages.
|
||||||
|
If the amount of pages is higher than the collection has, it returns whatever was there.
|
||||||
|
"""
|
||||||
|
def fetch_ordered_collection(from, pages_left, acc \\ []) do
|
||||||
|
with {:ok, response} <- Tesla.get(from),
|
||||||
|
{:ok, collection} <- Poison.decode(response.body) do
|
||||||
|
case collection["type"] do
|
||||||
|
"OrderedCollection" ->
|
||||||
|
# If we've encountered the OrderedCollection and not the page,
|
||||||
|
# just call the same function on the page address
|
||||||
|
fetch_ordered_collection(collection["first"], pages_left)
|
||||||
|
|
||||||
|
"OrderedCollectionPage" ->
|
||||||
|
if pages_left > 0 do
|
||||||
|
# There are still more pages
|
||||||
|
if Map.has_key?(collection, "next") do
|
||||||
|
# There are still more pages, go deeper saving what we have into the accumulator
|
||||||
|
fetch_ordered_collection(
|
||||||
|
collection["next"],
|
||||||
|
pages_left - 1,
|
||||||
|
acc ++ collection["orderedItems"]
|
||||||
|
)
|
||||||
|
else
|
||||||
|
# No more pages left, just return whatever we already have
|
||||||
|
acc ++ collection["orderedItems"]
|
||||||
|
end
|
||||||
|
else
|
||||||
|
# Got the amount of pages needed, add them all to the accumulator
|
||||||
|
acc ++ collection["orderedItems"]
|
||||||
|
end
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
{:error, "Not an OrderedCollection or OrderedCollectionPage"}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
Loading…
Reference in New Issue