Merge branch 'tusooa/rework-refetch' into 'develop'

Make sure object refetching follows update rules

See merge request pleroma/pleroma!3883
This commit is contained in:
Haelwenn 2023-05-17 18:50:35 +00:00 committed by Haelwenn (lanodan) Monnier
parent e4288df502
commit 72833c84b5
5 changed files with 144 additions and 111 deletions

1
changelog.d/3883.fix Normal file
View File

@ -0,0 +1 @@
Fix abnormal behaviour when refetching a poll

View File

@ -8,77 +8,30 @@ defmodule Pleroma.Object.Fetcher do
alias Pleroma.Maps
alias Pleroma.Object
alias Pleroma.Object.Containment
alias Pleroma.Repo
alias Pleroma.Signature
alias Pleroma.Web.ActivityPub.InternalFetchActor
alias Pleroma.Web.ActivityPub.MRF
alias Pleroma.Web.ActivityPub.ObjectValidator
alias Pleroma.Web.ActivityPub.Pipeline
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.Federator
require Logger
require Pleroma.Constants
defp touch_changeset(changeset) do
updated_at =
NaiveDateTime.utc_now()
|> NaiveDateTime.truncate(:second)
Ecto.Changeset.put_change(changeset, :updated_at, updated_at)
end
defp maybe_reinject_internal_fields(%{data: %{} = old_data}, new_data) do
has_history? = fn
%{"formerRepresentations" => %{"orderedItems" => list}} when is_list(list) -> true
_ -> false
end
internal_fields = Map.take(old_data, Pleroma.Constants.object_internal_fields())
remote_history_exists? = has_history?.(new_data)
# If the remote history exists, we treat that as the only source of truth.
new_data =
if has_history?.(old_data) and not remote_history_exists? do
Map.put(new_data, "formerRepresentations", old_data["formerRepresentations"])
else
new_data
end
# If the remote does not have history information, we need to manage it ourselves
new_data =
if not remote_history_exists? do
changed? =
Pleroma.Constants.status_updatable_fields()
|> Enum.any?(fn field -> Map.get(old_data, field) != Map.get(new_data, field) end)
%{updated_object: updated_object} =
new_data
|> Object.Updater.maybe_update_history(old_data,
updated: changed?,
use_history_in_new_object?: false
)
updated_object
else
new_data
end
Map.merge(new_data, internal_fields)
end
defp maybe_reinject_internal_fields(_, new_data), do: new_data
@spec reinject_object(struct(), map()) :: {:ok, Object.t()} | {:error, any()}
defp reinject_object(%Object{data: %{"type" => "Question"}} = object, new_data) do
defp reinject_object(%Object{data: %{}} = object, new_data) do
Logger.debug("Reinjecting object #{new_data["id"]}")
with data <- maybe_reinject_internal_fields(object, new_data),
{:ok, data, _} <- ObjectValidator.validate(data, %{}),
changeset <- Object.change(object, %{data: data}),
changeset <- touch_changeset(changeset),
{:ok, object} <- Repo.insert_or_update(changeset),
{:ok, object} <- Object.set_cache(object) do
{:ok, object}
with {:ok, new_data, _} <- ObjectValidator.validate(new_data, %{}),
{:ok, new_data} <- MRF.filter(new_data),
{:ok, new_object, _} <-
Object.Updater.do_update_and_invalidate_cache(
object,
new_data,
_touch_changeset? = true
) do
{:ok, new_object}
else
e ->
Logger.error("Error while processing object: #{inspect(e)}")
@ -86,20 +39,11 @@ defp reinject_object(%Object{data: %{"type" => "Question"}} = object, new_data)
end
end
defp reinject_object(%Object{} = object, new_data) do
Logger.debug("Reinjecting object #{new_data["id"]}")
with new_data <- Transmogrifier.fix_object(new_data),
data <- maybe_reinject_internal_fields(object, new_data),
changeset <- Object.change(object, %{data: data}),
changeset <- touch_changeset(changeset),
{:ok, object} <- Repo.insert_or_update(changeset),
{:ok, object} <- Object.set_cache(object) do
defp reinject_object(_, new_data) do
with {:ok, object, _} <- Pipeline.common_pipeline(new_data, local: false) do
{:ok, object}
else
e ->
Logger.error("Error while processing object: #{inspect(e)}")
{:error, e}
e -> e
end
end

View File

@ -5,6 +5,9 @@
defmodule Pleroma.Object.Updater do
require Pleroma.Constants
alias Pleroma.Object
alias Pleroma.Repo
def update_content_fields(orig_object_data, updated_object) do
Pleroma.Constants.status_updatable_fields()
|> Enum.reduce(
@ -237,4 +240,49 @@ def do_with_history(object, fun) do
{:history_items, e} -> e
end
end
defp maybe_touch_changeset(changeset, true) do
updated_at =
NaiveDateTime.utc_now()
|> NaiveDateTime.truncate(:second)
Ecto.Changeset.put_change(changeset, :updated_at, updated_at)
end
defp maybe_touch_changeset(changeset, _), do: changeset
def do_update_and_invalidate_cache(orig_object, updated_object, touch_changeset? \\ false) do
orig_object_ap_id = updated_object["id"]
orig_object_data = orig_object.data
%{
updated_data: updated_object_data,
updated: updated,
used_history_in_new_object?: used_history_in_new_object?
} = make_new_object_data_from_update_object(orig_object_data, updated_object)
changeset =
orig_object
|> Repo.preload(:hashtags)
|> Object.change(%{data: updated_object_data})
|> maybe_touch_changeset(touch_changeset?)
with {:ok, new_object} <- Repo.update(changeset),
{:ok, _} <- Object.invalid_object_cache(new_object),
{:ok, _} <- Object.set_cache(new_object),
# The metadata/utils.ex uses the object id for the cache.
{:ok, _} <- Pleroma.Activity.HTML.invalidate_cache_for(new_object.id) do
if used_history_in_new_object? do
with create_activity when not is_nil(create_activity) <-
Pleroma.Activity.get_create_by_object_ap_id(orig_object_ap_id),
{:ok, _} <- Pleroma.Activity.HTML.invalidate_cache_for(create_activity.id) do
nil
else
_ -> nil
end
end
{:ok, new_object, updated}
end
end
end

View File

@ -428,31 +428,8 @@ defp handle_update_object(
end
if orig_object_data["type"] in Pleroma.Constants.updatable_object_types() do
%{
updated_data: updated_object_data,
updated: updated,
used_history_in_new_object?: used_history_in_new_object?
} = Object.Updater.make_new_object_data_from_update_object(orig_object_data, updated_object)
changeset =
orig_object
|> Repo.preload(:hashtags)
|> Object.change(%{data: updated_object_data})
with {:ok, new_object} <- Repo.update(changeset),
{:ok, _} <- Object.invalid_object_cache(new_object),
{:ok, _} <- Object.set_cache(new_object),
# The metadata/utils.ex uses the object id for the cache.
{:ok, _} <- Pleroma.Activity.HTML.invalidate_cache_for(new_object.id) do
if used_history_in_new_object? do
with create_activity when not is_nil(create_activity) <-
Pleroma.Activity.get_create_by_object_ap_id(orig_object_ap_id),
{:ok, _} <- Pleroma.Activity.HTML.invalidate_cache_for(create_activity.id) do
nil
else
_ -> nil
end
end
{:ok, _, updated} =
Object.Updater.do_update_and_invalidate_cache(orig_object, updated_object)
if updated do
object
@ -460,7 +437,6 @@ defp handle_update_object(
|> ActivityPub.notify_and_stream()
end
end
end
{:ok, object, meta}
end

View File

@ -9,8 +9,12 @@ defmodule Pleroma.Object.FetcherTest do
alias Pleroma.Instances
alias Pleroma.Object
alias Pleroma.Object.Fetcher
alias Pleroma.Web.ActivityPub.ObjectValidator
require Pleroma.Constants
import Mock
import Pleroma.Factory
import Tesla.Mock
setup do
@ -284,6 +288,8 @@ test "it can refetch pruned objects" do
describe "refetching" do
setup do
insert(:user, ap_id: "https://mastodon.social/users/emelie")
object1 = %{
"id" => "https://mastodon.social/1",
"actor" => "https://mastodon.social/users/emelie",
@ -293,10 +299,14 @@ test "it can refetch pruned objects" do
"bcc" => [],
"bto" => [],
"cc" => [],
"to" => [],
"summary" => ""
"to" => [Pleroma.Constants.as_public()],
"summary" => "",
"published" => "2023-05-08 23:43:20Z",
"updated" => "2023-05-09 23:43:20Z"
}
{:ok, local_object1, _} = ObjectValidator.validate(object1, [])
object2 = %{
"id" => "https://mastodon.social/2",
"actor" => "https://mastodon.social/users/emelie",
@ -306,8 +316,10 @@ test "it can refetch pruned objects" do
"bcc" => [],
"bto" => [],
"cc" => [],
"to" => [],
"to" => [Pleroma.Constants.as_public()],
"summary" => "",
"published" => "2023-05-08 23:43:20Z",
"updated" => "2023-05-09 23:43:25Z",
"formerRepresentations" => %{
"type" => "OrderedCollection",
"orderedItems" => [
@ -319,14 +331,18 @@ test "it can refetch pruned objects" do
"bcc" => [],
"bto" => [],
"cc" => [],
"to" => [],
"summary" => ""
"to" => [Pleroma.Constants.as_public()],
"summary" => "",
"published" => "2023-05-08 23:43:20Z",
"updated" => "2023-05-09 23:43:21Z"
}
],
"totalItems" => 1
}
}
{:ok, local_object2, _} = ObjectValidator.validate(object2, [])
mock(fn
%{
method: :get,
@ -335,7 +351,7 @@ test "it can refetch pruned objects" do
%Tesla.Env{
status: 200,
headers: [{"content-type", "application/activity+json"}],
body: Jason.encode!(object1)
body: Jason.encode!(object1 |> Map.put("updated", "2023-05-09 23:44:20Z"))
}
%{
@ -345,7 +361,7 @@ test "it can refetch pruned objects" do
%Tesla.Env{
status: 200,
headers: [{"content-type", "application/activity+json"}],
body: Jason.encode!(object2)
body: Jason.encode!(object2 |> Map.put("updated", "2023-05-09 23:44:20Z"))
}
%{
@ -370,7 +386,7 @@ test "it can refetch pruned objects" do
apply(HttpRequestMock, :request, [env])
end)
%{object1: object1, object2: object2}
%{object1: local_object1, object2: local_object2}
end
test "it keeps formerRepresentations if remote does not have this attr", %{object1: object1} do
@ -388,8 +404,9 @@ test "it keeps formerRepresentations if remote does not have this attr", %{objec
"bcc" => [],
"bto" => [],
"cc" => [],
"to" => [],
"summary" => ""
"to" => [Pleroma.Constants.as_public()],
"summary" => "",
"published" => "2023-05-08 23:43:20Z"
}
],
"totalItems" => 1
@ -467,6 +484,53 @@ test "it adds to formerRepresentations if the remote does not have one and the o
}
} = refetched.data
end
test "it keeps the history intact if only updated time has changed",
%{object1: object1} do
full_object1 =
object1
|> Map.merge(%{
"updated" => "2023-05-08 23:43:47Z",
"formerRepresentations" => %{
"type" => "OrderedCollection",
"orderedItems" => [
%{"type" => "Note", "content" => "mew mew 1"}
],
"totalItems" => 1
}
})
{:ok, o} = Object.create(full_object1)
assert {:ok, refetched} = Fetcher.refetch_object(o)
assert %{
"content" => "test 1",
"formerRepresentations" => %{
"orderedItems" => [
%{"content" => "mew mew 1"}
],
"totalItems" => 1
}
} = refetched.data
end
test "it goes through ObjectValidator and MRF", %{object2: object2} do
with_mock Pleroma.Web.ActivityPub.MRF, [:passthrough],
filter: fn
%{"type" => "Note"} = object ->
{:ok, Map.put(object, "content", "MRFd content")}
arg ->
passthrough([arg])
end do
{:ok, o} = Object.create(object2)
assert {:ok, refetched} = Fetcher.refetch_object(o)
assert %{"content" => "MRFd content"} = refetched.data
end
end
end
describe "fetch with history" do