Merge branch 'dialyzer-fixes' into 'develop'

Dialyzer fixes

See merge request pleroma/pleroma!4047
This commit is contained in:
feld 2024-01-27 15:09:54 +00:00
commit b1659b7755
11 changed files with 20 additions and 13 deletions

View File

View File

@ -126,9 +126,15 @@ defp rewrite_entries(
<<pos::integer-big-size(unquote(size)), rest::bits>>,
acc
) do
rewrite_entries(unquote(size), offset, rest, [
acc | <<pos + offset::integer-big-size(unquote(size))>>
])
rewrite_entries(
unquote(size),
offset,
rest,
acc ++
[
<<pos + offset::integer-big-size(unquote(size))>>
]
)
end
end

View File

@ -121,7 +121,7 @@ defp prepare_log_data(%{actor: actor, action: action} = attrs) do
defp prepare_log_data(attrs), do: attrs
@spec insert_log(log_params()) :: {:ok, ModerationLog} | {:error, any}
@spec insert_log(log_params()) :: {:ok, ModerationLog.t()} | {:error, any}
def insert_log(%{actor: %User{}, subject: subjects, permission: permission} = attrs) do
data =
attrs
@ -248,7 +248,8 @@ def insert_log(%{actor: %User{} = actor, action: "chat_message_delete", subject_
|> insert_log_entry_with_message()
end
@spec insert_log_entry_with_message(ModerationLog) :: {:ok, ModerationLog} | {:error, any}
@spec insert_log_entry_with_message(ModerationLog.t()) ::
{:ok, ModerationLog.t()} | {:error, any}
defp insert_log_entry_with_message(entry) do
entry.data["message"]
|> put_in(get_log_entry_message(entry))

View File

@ -20,5 +20,5 @@ defmodule Pleroma.Search.SearchBackend do
is what contains the actual content and there is no need for filtering when removing
from index.
"""
@callback remove_from_index(object :: Pleroma.Object.t()) :: {:ok, any()} | {:error, any()}
@callback remove_from_index(object :: Pleroma.Object.t()) :: :ok | {:error, any()}
end

View File

@ -27,7 +27,7 @@ def key_id_to_actor_id(key_id) do
_ ->
case Pleroma.Web.WebFinger.finger(maybe_ap_id) do
%{"ap_id" => ap_id} -> {:ok, ap_id}
{:ok, %{"ap_id" => ap_id}} -> {:ok, ap_id}
_ -> {:error, maybe_ap_id}
end
end

View File

@ -175,7 +175,7 @@ defp prepare_upload(%Plug.Upload{} = file, opts) do
defp prepare_upload(%{img: "data:image/" <> image_data}, opts) do
parsed = Regex.named_captures(~r/(?<filetype>jpeg|png|gif);base64,(?<data>.*)/, image_data)
data = Base.decode64!(parsed["data"], ignore: :whitespace)
hash = Base.encode16(:crypto.hash(:sha256, data), lower: true)
hash = Base.encode16(:crypto.hash(:sha256, data), case: :upper)
with :ok <- check_binary_size(data, opts.size_limit),
tmp_path <- tempfile_for_image(data),

View File

@ -105,7 +105,7 @@ def ttl(data, url) do
{:ok, integer() | :noop} | {:error, :no_key}
def set_ttl_based_on_image(data, url) do
case get_ttl_from_image(data, url) do
{:ok, ttl} when is_number(ttl) ->
ttl when is_number(ttl) ->
ttl = ttl * 1000
case @cachex.expire_at(:rich_media_cache, url, ttl) do

View File

@ -45,6 +45,6 @@ defp get_expiration_timestamp(params) when is_map(params) do
|> Map.get("X-Amz-Date")
|> Timex.parse("{ISO:Basic:Z}")
{:ok, Timex.to_unix(date) + String.to_integer(Map.get(params, "X-Amz-Expires"))}
Timex.to_unix(date) + String.to_integer(Map.get(params, "X-Amz-Expires"))
end
end

View File

@ -319,7 +319,7 @@ defp find_user_by_nickname(nickname) do
user = User.get_cached_by_nickname(nickname)
if user == nil do
{:not_found, nil}
{:error, :not_found}
else
{:ok, user}
end

View File

@ -113,7 +113,7 @@ test "it deduces the actor id for gotoSocial" do
test "it calls webfinger for 'acct:' accounts" do
with_mock(Pleroma.Web.WebFinger,
finger: fn _ -> %{"ap_id" => "https://gensokyo.2hu/users/raymoo"} end
finger: fn _ -> {:ok, %{"ap_id" => "https://gensokyo.2hu/users/raymoo"}} end
) do
assert Signature.key_id_to_actor_id("acct:raymoo@gensokyo.2hu") ==
{:ok, "https://gensokyo.2hu/users/raymoo"}

View File

@ -22,7 +22,7 @@ test "s3 signed url is parsed correct for expiration time" do
expire_time =
Timex.parse!(timestamp, "{ISO:Basic:Z}") |> Timex.to_unix() |> Kernel.+(valid_till)
assert {:ok, expire_time} == Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl.ttl(metadata, url)
assert expire_time == Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl.ttl(metadata, url)
end
test "s3 signed url is parsed and correct ttl is set for rich media" do