spc-pleroma/lib/pleroma/web/mastodon_api/views/status_view.ex

837 lines
25 KiB
Elixir
Raw Normal View History

# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
2017-09-09 10:10:29 +00:00
defmodule Pleroma.Web.MastodonAPI.StatusView do
use Pleroma.Web, :view
2018-12-06 18:50:34 +00:00
require Pleroma.Constants
2018-12-07 04:53:14 +00:00
alias Pleroma.Activity
alias Pleroma.HTML
2021-05-12 21:16:10 +00:00
alias Pleroma.Maps
alias Pleroma.Object
2019-04-17 11:52:01 +00:00
alias Pleroma.Repo
2018-12-07 04:53:14 +00:00
alias Pleroma.User
alias Pleroma.UserRelationship
alias Pleroma.Web.CommonAPI
2017-09-15 15:50:47 +00:00
alias Pleroma.Web.CommonAPI.Utils
2018-12-07 04:53:14 +00:00
alias Pleroma.Web.MastodonAPI.AccountView
alias Pleroma.Web.MastodonAPI.PollView
2018-12-07 04:53:14 +00:00
alias Pleroma.Web.MastodonAPI.StatusView
2017-11-22 18:06:07 +00:00
alias Pleroma.Web.MediaProxy
alias Pleroma.Web.PleromaAPI.EmojiReactionController
RichMedia refactor Rich Media parsing was previously handled on-demand with a 2 second HTTP request timeout and retained only in Cachex. Every time a Pleroma instance is restarted it will have to request and parse the data for each status with a URL detected. When fetching a batch of statuses they were processed in parallel to attempt to keep the maximum latency at 2 seconds, but often resulted in a timeline appearing to hang during loading due to a URL that could not be successfully reached. URLs which had images links that expire (Amazon AWS) were parsed and inserted with a TTL to ensure the image link would not break. Rich Media data is now cached in the database and fetched asynchronously. Cachex is used as a read-through cache. When the data becomes available we stream an update to the clients. If the result is returned quickly the experience is almost seamless. Activities were already processed for their Rich Media data during ingestion to warm the cache, so users should not normally encounter the asynchronous loading of the Rich Media data. Implementation notes: - The async worker is a Task with a globally unique process name to prevent duplicate processing of the same URL - The Task will attempt to fetch the data 3 times with increasing sleep time between attempts - The HTTP request obeys the default HTTP request timeout value instead of 2 seconds - URLs that cannot be successfully parsed due to an unexpected error receives a negative cache entry for 15 minutes - URLs that fail with an expected error will receive a negative cache with no TTL - Activities that have no detected URLs insert a nil value in the Cachex :scrubber_cache so we do not repeat parsing the object content with Floki every time the activity is rendered - Expiring image URLs are handled with an Oban job - There is no automatic cleanup of the Rich Media data in the database, but it is safe to delete at any time - The post draft/preview feature makes the URL processing synchronous so the rendered post preview will have an accurate rendering Overall performance of timelines and creating new posts which contain URLs is greatly improved.
2024-02-11 21:11:52 +00:00
alias Pleroma.Web.RichMedia.Card
2018-03-27 16:18:24 +00:00
2020-06-24 11:29:08 +00:00
import Pleroma.Web.ActivityPub.Visibility, only: [get_visibility: 1, visible_for_user?: 2]
# This is a naive way to do this, just spawning a process per activity
# to fetch the preview. However it should be fine considering
# pagination is restricted to 40 activities at a time
defp fetch_rich_media_for_activities(activities) do
Enum.each(activities, fn activity ->
RichMedia refactor Rich Media parsing was previously handled on-demand with a 2 second HTTP request timeout and retained only in Cachex. Every time a Pleroma instance is restarted it will have to request and parse the data for each status with a URL detected. When fetching a batch of statuses they were processed in parallel to attempt to keep the maximum latency at 2 seconds, but often resulted in a timeline appearing to hang during loading due to a URL that could not be successfully reached. URLs which had images links that expire (Amazon AWS) were parsed and inserted with a TTL to ensure the image link would not break. Rich Media data is now cached in the database and fetched asynchronously. Cachex is used as a read-through cache. When the data becomes available we stream an update to the clients. If the result is returned quickly the experience is almost seamless. Activities were already processed for their Rich Media data during ingestion to warm the cache, so users should not normally encounter the asynchronous loading of the Rich Media data. Implementation notes: - The async worker is a Task with a globally unique process name to prevent duplicate processing of the same URL - The Task will attempt to fetch the data 3 times with increasing sleep time between attempts - The HTTP request obeys the default HTTP request timeout value instead of 2 seconds - URLs that cannot be successfully parsed due to an unexpected error receives a negative cache entry for 15 minutes - URLs that fail with an expected error will receive a negative cache with no TTL - Activities that have no detected URLs insert a nil value in the Cachex :scrubber_cache so we do not repeat parsing the object content with Floki every time the activity is rendered - Expiring image URLs are handled with an Oban job - There is no automatic cleanup of the Rich Media data in the database, but it is safe to delete at any time - The post draft/preview feature makes the URL processing synchronous so the rendered post preview will have an accurate rendering Overall performance of timelines and creating new posts which contain URLs is greatly improved.
2024-02-11 21:11:52 +00:00
spawn(fn -> Card.get_by_activity(activity) end)
end)
end
2018-03-27 16:18:24 +00:00
# TODO: Add cached version.
defp get_replied_to_activities([]), do: %{}
2018-03-27 16:18:24 +00:00
defp get_replied_to_activities(activities) do
activities
|> Enum.map(fn
%{data: %{"type" => "Create"}} = activity ->
object = Object.normalize(activity, fetch: false)
object && object.data["inReplyTo"] != "" && object.data["inReplyTo"]
2018-03-30 13:01:53 +00:00
_ ->
nil
2018-03-27 16:18:24 +00:00
end)
2018-03-30 13:01:53 +00:00
|> Enum.filter(& &1)
|> Activity.create_by_object_ap_id_with_object()
2018-03-30 13:01:53 +00:00
|> Repo.all()
|> Enum.reduce(%{}, fn activity, acc ->
object = Object.normalize(activity, fetch: false)
if object, do: Map.put(acc, object.data["id"], activity), else: acc
2018-03-30 13:01:53 +00:00
end)
2018-03-27 16:18:24 +00:00
end
2017-09-09 10:10:29 +00:00
defp get_quoted_activities([]), do: %{}
defp get_quoted_activities(activities) do
activities
|> Enum.map(fn
%{data: %{"type" => "Create"}} = activity ->
object = Object.normalize(activity, fetch: false)
object && object.data["quoteUrl"] != "" && object.data["quoteUrl"]
_ ->
nil
end)
|> Enum.filter(& &1)
|> Activity.create_by_object_ap_id_with_object()
|> Repo.all()
|> Enum.reduce(%{}, fn activity, acc ->
object = Object.normalize(activity, fetch: false)
if object, do: Map.put(acc, object.data["id"], activity), else: acc
end)
end
# DEPRECATED This field seems to be a left-over from the StatusNet era.
# If your application uses `pleroma.conversation_id`: this field is deprecated.
# It is currently stubbed instead by doing a CRC32 of the context, and
# clearing the MSB to avoid overflow exceptions with signed integers on the
# different clients using this field (Java/Kotlin code, mostly; see Husky.)
# This should be removed in a future version of Pleroma. Pleroma-FE currently
# depends on this field, as well.
defp get_context_id(%{data: %{"context" => context}}) when is_binary(context) do
2022-12-15 23:02:33 +00:00
import Bitwise
:erlang.crc32(context)
|> band(bnot(0x8000_0000))
end
defp get_context_id(_), do: nil
# Check if the user reblogged this status
defp reblogged?(activity, %User{ap_id: ap_id}) do
with %Object{data: %{"announcements" => announcements}} when is_list(announcements) <-
Object.normalize(activity, fetch: false) do
ap_id in announcements
else
_ -> false
end
end
# False if the user is logged out
defp reblogged?(_activity, _user), do: false
2017-09-09 10:10:29 +00:00
def render("index.json", opts) do
reading_user = opts[:for]
# To do: check AdminAPIControllerTest on the reasons behind nil activities in the list
activities = Enum.filter(opts.activities, & &1)
RichMedia refactor Rich Media parsing was previously handled on-demand with a 2 second HTTP request timeout and retained only in Cachex. Every time a Pleroma instance is restarted it will have to request and parse the data for each status with a URL detected. When fetching a batch of statuses they were processed in parallel to attempt to keep the maximum latency at 2 seconds, but often resulted in a timeline appearing to hang during loading due to a URL that could not be successfully reached. URLs which had images links that expire (Amazon AWS) were parsed and inserted with a TTL to ensure the image link would not break. Rich Media data is now cached in the database and fetched asynchronously. Cachex is used as a read-through cache. When the data becomes available we stream an update to the clients. If the result is returned quickly the experience is almost seamless. Activities were already processed for their Rich Media data during ingestion to warm the cache, so users should not normally encounter the asynchronous loading of the Rich Media data. Implementation notes: - The async worker is a Task with a globally unique process name to prevent duplicate processing of the same URL - The Task will attempt to fetch the data 3 times with increasing sleep time between attempts - The HTTP request obeys the default HTTP request timeout value instead of 2 seconds - URLs that cannot be successfully parsed due to an unexpected error receives a negative cache entry for 15 minutes - URLs that fail with an expected error will receive a negative cache with no TTL - Activities that have no detected URLs insert a nil value in the Cachex :scrubber_cache so we do not repeat parsing the object content with Floki every time the activity is rendered - Expiring image URLs are handled with an Oban job - There is no automatic cleanup of the Rich Media data in the database, but it is safe to delete at any time - The post draft/preview feature makes the URL processing synchronous so the rendered post preview will have an accurate rendering Overall performance of timelines and creating new posts which contain URLs is greatly improved.
2024-02-11 21:11:52 +00:00
# Start prefetching rich media before doing anything else
fetch_rich_media_for_activities(activities)
replied_to_activities = get_replied_to_activities(activities)
quoted_activities = get_quoted_activities(activities)
parent_activities =
activities
|> Enum.filter(&(&1.data["type"] == "Announce" && &1.data["object"]))
|> Enum.map(&Object.normalize(&1, fetch: false).data["id"])
|> Activity.create_by_object_ap_id()
|> Activity.with_preloaded_object(:left)
|> Activity.with_preloaded_bookmark(reading_user)
|> Activity.with_set_thread_muted_field(reading_user)
|> Repo.all()
relationships_opt =
cond do
Map.has_key?(opts, :relationships) ->
opts[:relationships]
is_nil(reading_user) ->
UserRelationship.view_relationships_option(nil, [])
true ->
# Note: unresolved users are filtered out
actors =
(activities ++ parent_activities)
|> Enum.map(&CommonAPI.get_user(&1.data["actor"], false))
|> Enum.filter(& &1)
UserRelationship.view_relationships_option(reading_user, actors, subset: :source_mutes)
end
opts =
opts
|> Map.put(:replied_to_activities, replied_to_activities)
|> Map.put(:quoted_activities, quoted_activities)
|> Map.put(:parent_activities, parent_activities)
|> Map.put(:relationships, relationships_opt)
2018-03-30 13:01:53 +00:00
safe_render_many(activities, StatusView, "show.json", opts)
2017-09-09 10:10:29 +00:00
end
2018-03-30 13:01:53 +00:00
def render(
"show.json",
%{activity: %{data: %{"type" => "Announce", "object" => _object}} = activity} = opts
2018-03-30 13:01:53 +00:00
) do
user = CommonAPI.get_user(activity.data["actor"])
2017-09-17 11:54:14 +00:00
created_at = Utils.to_masto_date(activity.data["published"])
object = Object.normalize(activity, fetch: false)
2017-09-17 11:54:14 +00:00
reblogged_parent_activity =
if opts[:parent_activities] do
Activity.Queries.find_by_object_ap_id(
opts[:parent_activities],
object.data["id"]
)
else
Activity.create_by_object_ap_id(object.data["id"])
|> Activity.with_preloaded_bookmark(opts[:for])
|> Activity.with_set_thread_muted_field(opts[:for])
|> Repo.one()
end
reblog_rendering_opts = Map.put(opts, :activity, reblogged_parent_activity)
reblogged = render("show.json", reblog_rendering_opts)
2017-09-17 11:54:14 +00:00
favorited = opts[:for] && opts[:for].ap_id in (object.data["likes"] || [])
2019-04-22 09:16:19 +00:00
bookmark = Activity.get_bookmark(reblogged_parent_activity, opts[:for])
bookmark_folder =
if bookmark != nil do
bookmark.folder_id
else
nil
end
2018-03-30 13:01:53 +00:00
mentions =
activity.recipients
|> Enum.map(fn ap_id -> User.get_cached_by_ap_id(ap_id) end)
|> Enum.filter(& &1)
|> Enum.map(fn user -> AccountView.render("mention.json", %{user: user}) end)
2017-09-17 11:54:14 +00:00
{pinned?, pinned_at} = pin_data(object, user)
2017-09-17 11:54:14 +00:00
%{
id: to_string(activity.id),
uri: object.data["id"],
url: object.data["id"],
account:
AccountView.render("show.json", %{
user: user,
for: opts[:for]
}),
2017-09-17 11:54:14 +00:00
in_reply_to_id: nil,
in_reply_to_account_id: nil,
reblog: reblogged,
content: reblogged[:content] || "",
2017-09-17 11:54:14 +00:00
created_at: created_at,
reblogs_count: 0,
replies_count: 0,
2017-09-17 11:54:14 +00:00
favourites_count: 0,
reblogged: reblogged?(reblogged_parent_activity, opts[:for]),
favourited: present?(favorited),
bookmarked: present?(bookmark),
2017-09-17 11:54:14 +00:00
muted: false,
pinned: pinned?,
2017-09-17 11:54:14 +00:00
sensitive: false,
spoiler_text: "",
visibility: get_visibility(activity),
2018-12-06 18:50:34 +00:00
media_attachments: reblogged[:media_attachments] || [],
2017-09-17 11:54:14 +00:00
mentions: mentions,
2018-12-06 18:50:34 +00:00
tags: reblogged[:tags] || [],
application: build_application(object.data["generator"]),
2017-11-10 16:18:19 +00:00
language: nil,
emojis: [],
pleroma: %{
local: activity.local,
pinned_at: pinned_at,
bookmark_folder: bookmark_folder
}
2017-09-17 11:54:14 +00:00
}
end
def render("show.json", %{activity: %{data: %{"object" => _object}} = activity} = opts) do
object = Object.normalize(activity, fetch: false)
user = CommonAPI.get_user(activity.data["actor"])
user_follower_address = user.follower_address
2017-09-09 10:10:29 +00:00
like_count = object.data["like_count"] || 0
announcement_count = object.data["announcement_count"] || 0
2017-09-09 10:10:29 +00:00
hashtags = Object.hashtags(object)
sensitive = object.data["sensitive"] || Enum.member?(hashtags, "nsfw")
tags = Object.tags(object)
2017-09-09 10:10:29 +00:00
tag_mentions =
tags
|> Enum.filter(fn tag -> is_map(tag) and tag["type"] == "Mention" end)
|> Enum.map(fn tag -> tag["href"] end)
2018-03-30 13:01:53 +00:00
mentions =
(object.data["to"] ++ tag_mentions)
|> Enum.uniq()
|> Enum.map(fn
Pleroma.Constants.as_public() -> nil
^user_follower_address -> nil
ap_id -> User.get_cached_by_ap_id(ap_id)
end)
2018-03-30 13:01:53 +00:00
|> Enum.filter(& &1)
|> Enum.map(fn user -> AccountView.render("mention.json", %{user: user}) end)
2017-09-09 10:10:29 +00:00
favorited = opts[:for] && opts[:for].ap_id in (object.data["likes"] || [])
2017-09-09 15:48:57 +00:00
bookmark = Activity.get_bookmark(activity, opts[:for])
bookmark_folder =
if bookmark != nil do
bookmark.folder_id
else
nil
end
2017-09-09 15:48:57 +00:00
client_posted_this_activity = opts[:for] && user.id == opts[:for].id
expires_at =
with true <- client_posted_this_activity,
2020-08-22 17:46:01 +00:00
%Oban.Job{scheduled_at: scheduled_at} <-
Pleroma.Workers.PurgeExpiredActivity.get_expiration(activity.id) do
2020-02-18 10:52:11 +00:00
scheduled_at
else
_ -> nil
end
thread_muted? =
cond do
is_nil(opts[:for]) -> false
is_boolean(activity.thread_muted?) -> activity.thread_muted?
true -> CommonAPI.thread_muted?(opts[:for], activity)
end
attachment_data = object.data["attachment"] || []
2018-03-25 15:07:40 +00:00
attachments = render_many(attachment_data, StatusView, "attachment.json", as: :attachment)
2017-09-10 09:51:01 +00:00
created_at = Utils.to_masto_date(object.data["published"])
2022-06-04 01:47:40 +00:00
edited_at =
with %{"updated" => updated} <- object.data,
date <- Utils.to_masto_date(updated),
true <- date != "" do
date
else
_ ->
nil
end
2018-03-27 16:18:24 +00:00
reply_to = get_reply_to(activity, opts)
reply_to_user = reply_to && CommonAPI.get_user(reply_to.data["actor"])
2017-10-23 14:27:51 +00:00
history_len =
1 +
2022-06-25 04:32:22 +00:00
(Object.Updater.history_for(object.data)
|> Map.get("orderedItems")
|> length())
# See render("history.json", ...) for more details
# Here the implicit index of the current content is 0
chrono_order = history_len - 1
quote_activity = get_quote(activity, opts)
2023-07-13 03:56:54 +00:00
quote_id =
case quote_activity do
%Activity{id: id} -> id
_ -> nil
end
quote_post =
2023-07-13 03:56:54 +00:00
if visible_for_user?(quote_activity, opts[:for]) and opts[:show_quote] != false do
quote_rendering_opts = Map.merge(opts, %{activity: quote_activity, show_quote: false})
render("show.json", quote_rendering_opts)
else
nil
end
content =
2018-12-06 18:50:34 +00:00
object
|> render_content()
content_html =
content
|> Activity.HTML.get_cached_scrubbed_html_for_activity(
User.html_filter_policy(opts[:for]),
activity,
"mastoapi:content:#{chrono_order}"
)
content_plaintext =
content
|> Activity.HTML.get_cached_stripped_html_for_activity(
activity,
"mastoapi:content:#{chrono_order}"
)
summary = object.data["summary"] || ""
RichMedia refactor Rich Media parsing was previously handled on-demand with a 2 second HTTP request timeout and retained only in Cachex. Every time a Pleroma instance is restarted it will have to request and parse the data for each status with a URL detected. When fetching a batch of statuses they were processed in parallel to attempt to keep the maximum latency at 2 seconds, but often resulted in a timeline appearing to hang during loading due to a URL that could not be successfully reached. URLs which had images links that expire (Amazon AWS) were parsed and inserted with a TTL to ensure the image link would not break. Rich Media data is now cached in the database and fetched asynchronously. Cachex is used as a read-through cache. When the data becomes available we stream an update to the clients. If the result is returned quickly the experience is almost seamless. Activities were already processed for their Rich Media data during ingestion to warm the cache, so users should not normally encounter the asynchronous loading of the Rich Media data. Implementation notes: - The async worker is a Task with a globally unique process name to prevent duplicate processing of the same URL - The Task will attempt to fetch the data 3 times with increasing sleep time between attempts - The HTTP request obeys the default HTTP request timeout value instead of 2 seconds - URLs that cannot be successfully parsed due to an unexpected error receives a negative cache entry for 15 minutes - URLs that fail with an expected error will receive a negative cache with no TTL - Activities that have no detected URLs insert a nil value in the Cachex :scrubber_cache so we do not repeat parsing the object content with Floki every time the activity is rendered - Expiring image URLs are handled with an Oban job - There is no automatic cleanup of the Rich Media data in the database, but it is safe to delete at any time - The post draft/preview feature makes the URL processing synchronous so the rendered post preview will have an accurate rendering Overall performance of timelines and creating new posts which contain URLs is greatly improved.
2024-02-11 21:11:52 +00:00
card =
case Card.get_by_activity(activity) do
%Card{} = result -> render("card.json", result)
_ -> nil
end
2019-02-20 16:36:16 +00:00
url =
if user.local do
Pleroma.Web.Router.Helpers.o_status_url(Pleroma.Web.Endpoint, :notice, activity)
else
object.data["url"] || object.data["external_url"] || object.data["id"]
2019-02-20 16:36:16 +00:00
end
direct_conversation_id =
with {_, nil} <- {:direct_conversation_id, opts[:direct_conversation_id]},
{_, true} <- {:include_id, opts[:with_direct_conversation_id]},
{_, %User{} = for_user} <- {:for_user, opts[:for]} do
Activity.direct_conversation_id(activity, for_user)
else
{:direct_conversation_id, participation_id} when is_integer(participation_id) ->
participation_id
_e ->
nil
2019-02-20 16:36:16 +00:00
end
2020-01-20 15:24:20 +00:00
emoji_reactions =
object
|> Object.get_emoji_reactions()
|> EmojiReactionController.filter_allowed_users(
opts[:for],
Map.get(opts, :with_muted, false)
)
|> Stream.map(fn {emoji, users, url} ->
build_emoji_map(emoji, users, url, opts[:for])
end)
|> Enum.to_list()
2020-01-20 15:24:20 +00:00
# Status muted state (would do 1 request per status unless user mutes are preloaded)
muted =
thread_muted? ||
UserRelationship.exists?(
get_in(opts, [:relationships, :user_relationships]),
:mute,
opts[:for],
user,
fn for_user, user -> User.mutes?(for_user, user) end
)
{pinned?, pinned_at} = pin_data(object, user)
2017-09-09 10:10:29 +00:00
%{
id: to_string(activity.id),
uri: object.data["id"],
2019-02-20 16:36:16 +00:00
url: url,
account:
AccountView.render("show.json", %{
user: user,
for: opts[:for]
}),
in_reply_to_id: reply_to && to_string(reply_to.id),
in_reply_to_account_id: reply_to_user && to_string(reply_to_user.id),
2017-09-09 10:10:29 +00:00
reblog: nil,
card: card,
content: content_html,
2022-06-04 16:56:56 +00:00
text: opts[:with_source] && get_source_text(object.data["source"]),
created_at: created_at,
2022-06-04 01:47:40 +00:00
edited_at: edited_at,
2017-09-09 10:10:29 +00:00
reblogs_count: announcement_count,
replies_count: object.data["repliesCount"] || 0,
2017-09-09 10:10:29 +00:00
favourites_count: like_count,
reblogged: reblogged?(activity, opts[:for]),
2018-12-06 18:50:34 +00:00
favourited: present?(favorited),
bookmarked: present?(bookmark),
muted: muted,
pinned: pinned?,
2017-09-09 10:10:29 +00:00
sensitive: sensitive,
spoiler_text: summary,
visibility: get_visibility(object),
media_attachments: attachments,
poll: render(PollView, "show.json", object: object, for: opts[:for]),
2017-09-09 10:10:29 +00:00
mentions: mentions,
tags: build_tags(tags),
application: build_application(object.data["generator"]),
2017-10-23 14:27:51 +00:00
language: nil,
emojis: build_emojis(object.data["emoji"]),
pleroma: %{
local: activity.local,
conversation_id: get_context_id(activity),
context: object.data["context"],
in_reply_to_account_acct: reply_to_user && reply_to_user.nickname,
quote: quote_post,
2023-07-13 03:47:31 +00:00
quote_id: quote_id,
quote_url: object.data["quoteUrl"],
2022-01-26 17:52:50 +00:00
quote_visible: visible_for_user?(quote_activity, opts[:for]),
content: %{"text/plain" => content_plaintext},
spoiler_text: %{"text/plain" => summary},
expires_at: expires_at,
direct_conversation_id: direct_conversation_id,
2020-01-20 15:24:20 +00:00
thread_muted: thread_muted?,
2020-06-24 11:29:08 +00:00
emoji_reactions: emoji_reactions,
parent_visible: visible_for_user?(reply_to, opts[:for]),
2023-11-12 13:38:08 +00:00
pinned_at: pinned_at,
quotes_count: object.data["quotesCount"] || 0,
bookmark_folder: bookmark_folder
}
2017-09-09 10:10:29 +00:00
}
end
2017-09-10 09:51:01 +00:00
def render("show.json", _) do
nil
end
def render("history.json", %{activity: %{data: %{"object" => _object}} = activity} = opts) do
object = Object.normalize(activity, fetch: false)
hashtags = Object.hashtags(object)
user = CommonAPI.get_user(activity.data["actor"])
past_history =
2022-06-25 04:32:22 +00:00
Object.Updater.history_for(object.data)
|> Map.get("orderedItems")
|> Enum.map(&Map.put(&1, "id", object.data["id"]))
|> Enum.map(&%Object{data: &1, id: object.id})
history =
[object | past_history]
# Mastodon expects the original to be at the first
|> Enum.reverse()
|> Enum.with_index()
|> Enum.map(fn {object, chrono_order} ->
%{
# The history is prepended every time there is a new edit.
# In chrono_order, the oldest item is always at 0, and so on.
# The chrono_order is an invariant kept between edits.
chrono_order: chrono_order,
object: object
}
end)
individual_opts =
opts
|> Map.put(:as, :item)
|> Map.put(:user, user)
|> Map.put(:hashtags, hashtags)
render_many(history, StatusView, "history_item.json", individual_opts)
end
def render(
"history_item.json",
%{
activity: activity,
user: user,
item: %{object: object, chrono_order: chrono_order},
hashtags: hashtags
} = opts
) do
sensitive = object.data["sensitive"] || Enum.member?(hashtags, "nsfw")
attachment_data = object.data["attachment"] || []
attachments = render_many(attachment_data, StatusView, "attachment.json", as: :attachment)
created_at = Utils.to_masto_date(object.data["updated"] || object.data["published"])
content =
object
|> render_content()
content_html =
content
|> Activity.HTML.get_cached_scrubbed_html_for_activity(
User.html_filter_policy(opts[:for]),
activity,
"mastoapi:content:#{chrono_order}"
)
summary = object.data["summary"] || ""
%{
account:
AccountView.render("show.json", %{
user: user,
for: opts[:for]
}),
content: content_html,
sensitive: sensitive,
spoiler_text: summary,
created_at: created_at,
media_attachments: attachments,
emojis: build_emojis(object.data["emoji"]),
poll: render(PollView, "show.json", object: object, for: opts[:for])
}
end
2022-05-30 04:59:23 +00:00
def render("source.json", %{activity: %{data: %{"object" => _object}} = activity} = _opts) do
object = Object.normalize(activity, fetch: false)
%{
id: activity.id,
2022-06-04 16:56:56 +00:00
text: get_source_text(Map.get(object.data, "source", "")),
spoiler_text: Map.get(object.data, "summary", ""),
content_type: get_source_content_type(object.data["source"])
2022-05-30 04:59:23 +00:00
}
end
RichMedia refactor Rich Media parsing was previously handled on-demand with a 2 second HTTP request timeout and retained only in Cachex. Every time a Pleroma instance is restarted it will have to request and parse the data for each status with a URL detected. When fetching a batch of statuses they were processed in parallel to attempt to keep the maximum latency at 2 seconds, but often resulted in a timeline appearing to hang during loading due to a URL that could not be successfully reached. URLs which had images links that expire (Amazon AWS) were parsed and inserted with a TTL to ensure the image link would not break. Rich Media data is now cached in the database and fetched asynchronously. Cachex is used as a read-through cache. When the data becomes available we stream an update to the clients. If the result is returned quickly the experience is almost seamless. Activities were already processed for their Rich Media data during ingestion to warm the cache, so users should not normally encounter the asynchronous loading of the Rich Media data. Implementation notes: - The async worker is a Task with a globally unique process name to prevent duplicate processing of the same URL - The Task will attempt to fetch the data 3 times with increasing sleep time between attempts - The HTTP request obeys the default HTTP request timeout value instead of 2 seconds - URLs that cannot be successfully parsed due to an unexpected error receives a negative cache entry for 15 minutes - URLs that fail with an expected error will receive a negative cache with no TTL - Activities that have no detected URLs insert a nil value in the Cachex :scrubber_cache so we do not repeat parsing the object content with Floki every time the activity is rendered - Expiring image URLs are handled with an Oban job - There is no automatic cleanup of the Rich Media data in the database, but it is safe to delete at any time - The post draft/preview feature makes the URL processing synchronous so the rendered post preview will have an accurate rendering Overall performance of timelines and creating new posts which contain URLs is greatly improved.
2024-02-11 21:11:52 +00:00
def render("card.json", %Card{fields: rich_media}) do
page_url_data = URI.parse(rich_media["url"])
2019-02-02 08:38:37 +00:00
2019-02-02 09:24:24 +00:00
page_url = page_url_data |> to_string
image_url = proxied_url(rich_media["image"], page_url_data)
audio_url = proxied_url(rich_media["audio"], page_url_data)
video_url = proxied_url(rich_media["video"], page_url_data)
%{
type: "link",
provider_name: page_url_data.host,
provider_url: page_url_data.scheme <> "://" <> page_url_data.host,
url: page_url,
image: image_url,
2020-06-09 17:49:24 +00:00
title: rich_media["title"] || "",
description: rich_media["description"] || "",
pleroma: %{
opengraph:
rich_media
|> Maps.put_if_present("image", image_url)
|> Maps.put_if_present("audio", audio_url)
|> Maps.put_if_present("video", video_url)
}
}
end
2019-09-23 19:37:30 +00:00
def render("card.json", _), do: nil
2017-09-10 09:51:01 +00:00
def render("attachment.json", %{attachment: attachment}) do
2018-06-13 22:26:37 +00:00
[attachment_url | _] = attachment["url"]
media_type = attachment_url["mediaType"] || attachment_url["mimeType"] || "image"
href = attachment_url["href"] |> MediaProxy.url()
href_preview = attachment_url["href"] |> MediaProxy.preview_url()
2021-05-12 21:16:10 +00:00
meta = render("attachment_meta.json", %{attachment: attachment})
2017-09-10 09:51:01 +00:00
2018-03-30 13:01:53 +00:00
type =
cond do
String.contains?(media_type, "image") -> "image"
String.contains?(media_type, "video") -> "video"
String.contains?(media_type, "audio") -> "audio"
true -> "unknown"
end
2017-09-10 09:51:01 +00:00
attachment_id =
with {_, ap_id} when is_binary(ap_id) <- {:ap_id, attachment["id"]},
{_, %Object{data: _object_data, id: object_id}} <-
{:object, Object.get_by_ap_id(ap_id)} do
to_string(object_id)
else
_ ->
<<hash_id::signed-32, _rest::binary>> = :crypto.hash(:md5, href)
to_string(attachment["id"] || hash_id)
end
2017-09-10 09:51:01 +00:00
%{
id: attachment_id,
url: href,
2017-09-10 09:51:01 +00:00
remote_url: href,
preview_url: href_preview,
text_url: href,
type: type,
description: attachment["name"],
2020-11-11 18:51:13 +00:00
pleroma: %{mime_type: media_type},
blurhash: attachment["blurhash"]
2017-09-10 09:51:01 +00:00
}
2021-05-12 21:16:10 +00:00
|> Maps.put_if_present(:meta, meta)
2017-09-10 09:51:01 +00:00
end
def render("attachment_meta.json", %{
attachment: %{"url" => [%{"width" => width, "height" => height} | _]}
})
when is_integer(width) and is_integer(height) do
%{
original: %{
width: width,
height: height,
aspect: width / height
}
}
2017-09-10 09:51:01 +00:00
end
2021-05-12 21:16:10 +00:00
def render("attachment_meta.json", _), do: nil
def render("context.json", %{activity: activity, activities: activities, user: user}) do
%{ancestors: ancestors, descendants: descendants} =
activities
|> Enum.reverse()
|> Enum.group_by(fn %{id: id} -> if id < activity.id, do: :ancestors, else: :descendants end)
|> Map.put_new(:ancestors, [])
|> Map.put_new(:descendants, [])
%{
ancestors: render("index.json", for: user, activities: ancestors, as: :activity),
descendants: render("index.json", for: user, activities: descendants, as: :activity)
}
end
def get_reply_to(activity, %{replied_to_activities: replied_to_activities}) do
object = Object.normalize(activity, fetch: false)
with nil <- replied_to_activities[object.data["inReplyTo"]] do
2019-04-12 02:21:32 +00:00
# If user didn't participate in the thread
Activity.get_in_reply_to_activity(activity)
end
end
def get_reply_to(%{data: %{"object" => _object}} = activity, _) do
object = Object.normalize(activity, fetch: false)
if object.data["inReplyTo"] && object.data["inReplyTo"] != "" do
Activity.get_create_by_object_ap_id(object.data["inReplyTo"])
else
nil
end
end
def get_quote(activity, %{quoted_activities: quoted_activities}) do
object = Object.normalize(activity, fetch: false)
with nil <- quoted_activities[object.data["quoteUrl"]] do
# For when a quote post is inside an Announce
Activity.get_create_by_object_ap_id_with_object(object.data["quoteUrl"])
end
end
def get_quote(%{data: %{"object" => _object}} = activity, _) do
object = Object.normalize(activity, fetch: false)
if object.data["quoteUrl"] && object.data["quoteUrl"] != "" do
Activity.get_create_by_object_ap_id(object.data["quoteUrl"])
else
nil
end
end
2020-08-20 16:41:42 +00:00
def render_content(%{data: %{"name" => name}} = object) when not is_nil(name) and name != "" do
url = object.data["url"] || object.data["id"]
2020-08-20 16:41:42 +00:00
"<p><a href=\"#{url}\">#{name}</a></p>#{object.data["content"]}"
end
def render_content(object), do: object.data["content"] || ""
2018-06-24 06:34:44 +00:00
2018-12-13 12:13:02 +00:00
@doc """
Builds a dictionary tags.
## Examples
iex> Pleroma.Web.MastodonAPI.StatusView.build_tags(["fediverse", "nextcloud"])
[{"name": "fediverse", "url": "/tag/fediverse"},
{"name": "nextcloud", "url": "/tag/nextcloud"}]
"""
2018-12-14 19:56:37 +00:00
@spec build_tags(list(any())) :: list(map())
2018-12-13 12:13:02 +00:00
def build_tags(object_tags) when is_list(object_tags) do
object_tags
|> Enum.filter(&is_binary/1)
|> Enum.map(&%{name: &1, url: "#{Pleroma.Web.Endpoint.url()}/tag/#{URI.encode(&1)}"})
end
2018-12-13 12:13:02 +00:00
def build_tags(_), do: []
2018-06-24 06:34:44 +00:00
2018-12-06 18:50:34 +00:00
@doc """
Builds list emojis.
2018-12-06 18:50:34 +00:00
Arguments: `nil` or list tuple of name and url.
Returns list emojis.
## Examples
2018-12-06 18:50:34 +00:00
iex> Pleroma.Web.MastodonAPI.StatusView.build_emojis([{"2hu", "corndog.png"}])
[%{shortcode: "2hu", static_url: "corndog.png", url: "corndog.png", visible_in_picker: false}]
"""
@spec build_emojis(nil | list(tuple())) :: list(map())
def build_emojis(nil), do: []
def build_emojis(emojis) do
emojis
|> Enum.map(fn {name, url} ->
name = HTML.strip_tags(name)
url =
url
|> HTML.strip_tags()
|> MediaProxy.url()
%{shortcode: name, url: url, static_url: url, visible_in_picker: false}
end)
end
2018-12-06 18:50:34 +00:00
defp present?(nil), do: false
defp present?(false), do: false
defp present?(_), do: true
2019-01-08 08:27:02 +00:00
defp pin_data(%Object{data: %{"id" => object_id}}, %User{pinned_objects: pinned_objects}) do
if pinned_at = pinned_objects[object_id] do
{true, Utils.to_masto_date(pinned_at)}
else
{false, nil}
end
end
defp build_emoji_map(emoji, users, url, current_user) do
%{
name: Pleroma.Web.PleromaAPI.EmojiReactionView.emoji_name(emoji, url),
count: length(users),
url: MediaProxy.url(url),
me: !!(current_user && current_user.ap_id in users),
account_ids: Enum.map(users, fn user -> User.get_cached_by_ap_id(user).id end)
}
end
@spec build_application(map() | nil) :: map() | nil
2021-03-02 17:37:37 +00:00
defp build_application(%{"type" => _type, "name" => name, "url" => url}),
do: %{name: name, website: url}
defp build_application(_), do: nil
# Workaround for Elixir issue #10771
# Avoid applying URI.merge unless necessary
# TODO: revert to always attempting URI.merge(image_url_data, page_url_data)
# when Elixir 1.12 is the minimum supported version
@spec build_image_url(struct() | nil, struct()) :: String.t() | nil
defp build_image_url(
%URI{scheme: image_scheme, host: image_host} = image_url_data,
%URI{} = _page_url_data
)
when not is_nil(image_scheme) and not is_nil(image_host) do
image_url_data |> to_string
end
defp build_image_url(%URI{} = image_url_data, %URI{} = page_url_data) do
URI.merge(page_url_data, image_url_data) |> to_string
end
2022-06-04 16:56:56 +00:00
defp get_source_text(%{"content" => content} = _source) do
content
end
defp get_source_text(source) when is_binary(source) do
source
end
defp get_source_text(_) do
""
end
defp get_source_content_type(%{"mediaType" => type} = _source) do
type
end
defp get_source_content_type(_source) do
Utils.get_content_type(nil)
end
defp proxied_url(url, page_url_data) do
if is_binary(url) do
build_image_url(URI.parse(url), page_url_data) |> MediaProxy.url()
else
nil
end
end
2017-09-09 10:10:29 +00:00
end