[Credo] fix Credo.Check.Readability.MaxLineLength
This commit is contained in:
parent
8cd3eada7d
commit
c42d34b2ec
|
@ -73,7 +73,7 @@ def handle_call({:validate, token, captcha, answer_data}, _from, state) do
|
||||||
secret = KeyGenerator.generate(secret_key_base, token <> "_encrypt")
|
secret = KeyGenerator.generate(secret_key_base, token <> "_encrypt")
|
||||||
sign_secret = KeyGenerator.generate(secret_key_base, token <> "_sign")
|
sign_secret = KeyGenerator.generate(secret_key_base, token <> "_sign")
|
||||||
|
|
||||||
# If the time found is less than (current_time - seconds_valid), then the time has already passed.
|
# If the time found is less than (current_time-seconds_valid) then the time has already passed
|
||||||
# Later we check that the time found is more than the presumed invalidatation time, that means
|
# Later we check that the time found is more than the presumed invalidatation time, that means
|
||||||
# that the data is still valid and the captcha can be checked
|
# that the data is still valid and the captcha can be checked
|
||||||
seconds_valid = Pleroma.Config.get!([Pleroma.Captcha, :seconds_valid])
|
seconds_valid = Pleroma.Config.get!([Pleroma.Captcha, :seconds_valid])
|
||||||
|
|
|
@ -92,8 +92,8 @@ def puts(text_or_lines) do
|
||||||
|
|
||||||
# surrond one/five line clippy with blank lines around to not fuck up the layout
|
# surrond one/five line clippy with blank lines around to not fuck up the layout
|
||||||
#
|
#
|
||||||
# yes this fix sucks but it's good enough, have you ever seen a release of windows wihtout some butched
|
# yes this fix sucks but it's good enough, have you ever seen a release of windows
|
||||||
# features anyway?
|
# without some butched features anyway?
|
||||||
lines =
|
lines =
|
||||||
if length(lines) == 1 or length(lines) == 5 do
|
if length(lines) == 1 or length(lines) == 5 do
|
||||||
[""] ++ lines ++ [""]
|
[""] ++ lines ++ [""]
|
||||||
|
|
|
@ -10,6 +10,7 @@ defmodule Pleroma.Formatter do
|
||||||
|
|
||||||
@markdown_characters_regex ~r/(`|\*|_|{|}|[|]|\(|\)|#|\+|-|\.|!)/
|
@markdown_characters_regex ~r/(`|\*|_|{|}|[|]|\(|\)|#|\+|-|\.|!)/
|
||||||
@link_regex ~r{((?:http(s)?:\/\/)?[\w.-]+(?:\.[\w\.-]+)+[\w\-\._~%:/?#[\]@!\$&'\(\)\*\+,;=.]+)|[0-9a-z+\-\.]+:[0-9a-z$-_.+!*'(),]+}ui
|
@link_regex ~r{((?:http(s)?:\/\/)?[\w.-]+(?:\.[\w\.-]+)+[\w\-\._~%:/?#[\]@!\$&'\(\)\*\+,;=.]+)|[0-9a-z+\-\.]+:[0-9a-z$-_.+!*'(),]+}ui
|
||||||
|
# credo:disable-for-previous-line Credo.Check.Readability.MaxLineLength
|
||||||
|
|
||||||
@auto_linker_config hashtag: true,
|
@auto_linker_config hashtag: true,
|
||||||
hashtag_handler: &Pleroma.Formatter.hashtag_handler/4,
|
hashtag_handler: &Pleroma.Formatter.hashtag_handler/4,
|
||||||
|
|
|
@ -38,6 +38,7 @@ defp fetch_user_and_token(token) do
|
||||||
preload: [user: user]
|
preload: [user: user]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# credo:disable-for-next-line Credo.Check.Readability.MaxLineLength
|
||||||
with %Token{user: %{info: %{deactivated: false} = _} = user} = token_record <- Repo.one(query) do
|
with %Token{user: %{info: %{deactivated: false} = _} = user} = token_record <- Repo.one(query) do
|
||||||
{:ok, user, token_record}
|
{:ok, user, token_record}
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,10 +3,12 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.ReverseProxy do
|
defmodule Pleroma.ReverseProxy do
|
||||||
@keep_req_headers ~w(accept user-agent accept-encoding cache-control if-modified-since if-unmodified-since if-none-match if-range range)
|
@keep_req_headers ~w(accept user-agent accept-encoding cache-control if-modified-since) ++
|
||||||
|
~w(if-unmodified-since if-none-match if-range range)
|
||||||
@resp_cache_headers ~w(etag date last-modified cache-control)
|
@resp_cache_headers ~w(etag date last-modified cache-control)
|
||||||
@keep_resp_headers @resp_cache_headers ++
|
@keep_resp_headers @resp_cache_headers ++
|
||||||
~w(content-type content-disposition content-encoding content-range accept-ranges vary)
|
~w(content-type content-disposition content-encoding content-range) ++
|
||||||
|
~w(accept-ranges vary)
|
||||||
@default_cache_control_header "public, max-age=1209600"
|
@default_cache_control_header "public, max-age=1209600"
|
||||||
@valid_resp_codes [200, 206, 304]
|
@valid_resp_codes [200, 206, 304]
|
||||||
@max_read_duration :timer.seconds(30)
|
@max_read_duration :timer.seconds(30)
|
||||||
|
@ -282,8 +284,8 @@ defp build_resp_cache_headers(headers, _opts) do
|
||||||
headers
|
headers
|
||||||
|
|
||||||
has_cache? ->
|
has_cache? ->
|
||||||
# There's caching header present but no cache-control -- we need to explicitely override it to public
|
# There's caching header present but no cache-control -- we need to explicitely override it
|
||||||
# as Plug defaults to "max-age=0, private, must-revalidate"
|
# to public as Plug defaults to "max-age=0, private, must-revalidate"
|
||||||
List.keystore(headers, "cache-control", 0, {"cache-control", "public"})
|
List.keystore(headers, "cache-control", 0, {"cache-control", "public"})
|
||||||
|
|
||||||
true ->
|
true ->
|
||||||
|
|
|
@ -6,7 +6,8 @@ defmodule Pleroma.Uploaders.S3 do
|
||||||
@behaviour Pleroma.Uploaders.Uploader
|
@behaviour Pleroma.Uploaders.Uploader
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
# The file name is re-encoded with S3's constraints here to comply with previous links with less strict filenames
|
# The file name is re-encoded with S3's constraints here to comply with previous
|
||||||
|
# links with less strict filenames
|
||||||
def get_file(file) do
|
def get_file(file) do
|
||||||
config = Pleroma.Config.get([__MODULE__])
|
config = Pleroma.Config.get([__MODULE__])
|
||||||
bucket = Keyword.fetch!(config, :bucket)
|
bucket = Keyword.fetch!(config, :bucket)
|
||||||
|
|
|
@ -30,6 +30,7 @@ defmodule Pleroma.User do
|
||||||
|
|
||||||
@primary_key {:id, Pleroma.FlakeId, autogenerate: true}
|
@primary_key {:id, Pleroma.FlakeId, autogenerate: true}
|
||||||
|
|
||||||
|
# credo:disable-for-next-line Credo.Check.Readability.MaxLineLength
|
||||||
@email_regex ~r/^[a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/
|
@email_regex ~r/^[a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/
|
||||||
|
|
||||||
@strict_local_nickname_regex ~r/^[a-zA-Z\d]+$/
|
@strict_local_nickname_regex ~r/^[a-zA-Z\d]+$/
|
||||||
|
@ -435,7 +436,8 @@ def get_by_ap_id(ap_id) do
|
||||||
Repo.get_by(User, ap_id: ap_id)
|
Repo.get_by(User, ap_id: ap_id)
|
||||||
end
|
end
|
||||||
|
|
||||||
# This is mostly an SPC migration fix. This guesses the user nickname (by taking the last part of the ap_id and the domain) and tries to get that user
|
# This is mostly an SPC migration fix. This guesses the user nickname by taking the last part
|
||||||
|
# of the ap_id and the domain and tries to get that user
|
||||||
def get_by_guessed_nickname(ap_id) do
|
def get_by_guessed_nickname(ap_id) do
|
||||||
domain = URI.parse(ap_id).host
|
domain = URI.parse(ap_id).host
|
||||||
name = List.last(String.split(ap_id, "/"))
|
name = List.last(String.split(ap_id, "/"))
|
||||||
|
|
|
@ -170,7 +170,8 @@ def create(%{to: to, actor: actor, context: context, object: object} = params) d
|
||||||
additional
|
additional
|
||||||
),
|
),
|
||||||
{:ok, activity} <- insert(create_data, local),
|
{:ok, activity} <- insert(create_data, local),
|
||||||
# Changing note count prior to enqueuing federation task in order to avoid race conditions on updating user.info
|
# Changing note count prior to enqueuing federation task in order to avoid
|
||||||
|
# race conditions on updating user.info
|
||||||
{:ok, _actor} <- increase_note_count_if_public(actor, activity),
|
{:ok, _actor} <- increase_note_count_if_public(actor, activity),
|
||||||
:ok <- maybe_federate(activity) do
|
:ok <- maybe_federate(activity) do
|
||||||
{:ok, activity}
|
{:ok, activity}
|
||||||
|
@ -320,7 +321,8 @@ def delete(%Object{data: %{"id" => id, "actor" => actor}} = object, local \\ tru
|
||||||
"deleted_activity_id" => activity && activity.id
|
"deleted_activity_id" => activity && activity.id
|
||||||
},
|
},
|
||||||
{:ok, activity} <- insert(data, local),
|
{:ok, activity} <- insert(data, local),
|
||||||
# Changing note count prior to enqueuing federation task in order to avoid race conditions on updating user.info
|
# Changing note count prior to enqueuing federation task in order to avoid
|
||||||
|
# race conditions on updating user.info
|
||||||
{:ok, _actor} <- decrease_note_count_if_public(user, object),
|
{:ok, _actor} <- decrease_note_count_if_public(user, object),
|
||||||
:ok <- maybe_federate(activity) do
|
:ok <- maybe_federate(activity) do
|
||||||
{:ok, activity}
|
{:ok, activity}
|
||||||
|
|
|
@ -45,13 +45,14 @@ defp check_ftl_removal(
|
||||||
|
|
||||||
defp check_replace(%{"object" => %{"content" => content, "summary" => summary}} = message) do
|
defp check_replace(%{"object" => %{"content" => content, "summary" => summary}} = message) do
|
||||||
{content, summary} =
|
{content, summary} =
|
||||||
Enum.reduce(Pleroma.Config.get([:mrf_keyword, :replace]), {content, summary}, fn {pattern,
|
Enum.reduce(
|
||||||
replacement},
|
Pleroma.Config.get([:mrf_keyword, :replace]),
|
||||||
{content_acc,
|
{content, summary},
|
||||||
summary_acc} ->
|
fn {pattern, replacement}, {content_acc, summary_acc} ->
|
||||||
{String.replace(content_acc, pattern, replacement),
|
{String.replace(content_acc, pattern, replacement),
|
||||||
String.replace(summary_acc, pattern, replacement)}
|
String.replace(summary_acc, pattern, replacement)}
|
||||||
end)
|
end
|
||||||
|
)
|
||||||
|
|
||||||
{:ok,
|
{:ok,
|
||||||
message
|
message
|
||||||
|
|
|
@ -6,7 +6,8 @@ defmodule Pleroma.Web.ControllerHelper do
|
||||||
use Pleroma.Web, :controller
|
use Pleroma.Web, :controller
|
||||||
|
|
||||||
def oauth_scopes(params, default) do
|
def oauth_scopes(params, default) do
|
||||||
# Note: `scopes` is used by Mastodon — supporting it but sticking to OAuth's standard `scope` wherever we control it
|
# Note: `scopes` is used by Mastodon — supporting it but sticking to
|
||||||
|
# OAuth's standard `scope` wherever we control it
|
||||||
Pleroma.Web.OAuth.parse_scopes(params["scope"] || params["scopes"], default)
|
Pleroma.Web.OAuth.parse_scopes(params["scope"] || params["scopes"], default)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -26,6 +26,7 @@ defmodule Pleroma.Web.Endpoint do
|
||||||
from: :pleroma,
|
from: :pleroma,
|
||||||
only:
|
only:
|
||||||
~w(index.html static finmoji emoji packs sounds images instance sw.js sw-pleroma.js favicon.png schemas doc)
|
~w(index.html static finmoji emoji packs sounds images instance sw.js sw-pleroma.js favicon.png schemas doc)
|
||||||
|
# credo:disable-for-previous-line Credo.Check.Readability.MaxLineLength
|
||||||
)
|
)
|
||||||
|
|
||||||
# Code reloading can be explicitly enabled under the
|
# Code reloading can be explicitly enabled under the
|
||||||
|
|
|
@ -19,7 +19,8 @@ def url(url) do
|
||||||
else
|
else
|
||||||
secret = Application.get_env(:pleroma, Pleroma.Web.Endpoint)[:secret_key_base]
|
secret = Application.get_env(:pleroma, Pleroma.Web.Endpoint)[:secret_key_base]
|
||||||
|
|
||||||
# Must preserve `%2F` for compatibility with S3 (https://git.pleroma.social/pleroma/pleroma/issues/580)
|
# Must preserve `%2F` for compatibility with S3
|
||||||
|
# https://git.pleroma.social/pleroma/pleroma/issues/580
|
||||||
replacement = get_replacement(url, ":2F:")
|
replacement = get_replacement(url, ":2F:")
|
||||||
|
|
||||||
# The URL is url-decoded and encoded again to ensure it is correctly encoded and not twice.
|
# The URL is url-decoded and encoded again to ensure it is correctly encoded and not twice.
|
||||||
|
|
|
@ -88,7 +88,7 @@ defp build_attachments(%{data: %{"attachment" => attachments}}) do
|
||||||
|
|
||||||
# TODO: Add additional properties to objects when we have the data available.
|
# TODO: Add additional properties to objects when we have the data available.
|
||||||
# Also, Whatsapp only wants JPEG or PNGs. It seems that if we add a second og:image
|
# Also, Whatsapp only wants JPEG or PNGs. It seems that if we add a second og:image
|
||||||
# object when a Video or GIF is attached it will display that in the Whatsapp Rich Preview.
|
# object when a Video or GIF is attached it will display that in Whatsapp Rich Preview.
|
||||||
case media_type do
|
case media_type do
|
||||||
"audio" ->
|
"audio" ->
|
||||||
[
|
[
|
||||||
|
|
|
@ -97,7 +97,8 @@ defp build_attachments(id, %{data: %{"attachment" => attachments}}) do
|
||||||
| acc
|
| acc
|
||||||
]
|
]
|
||||||
|
|
||||||
# TODO: Need the true width and height values here or Twitter renders an iFrame with a bad aspect ratio
|
# TODO: Need the true width and height values here or Twitter renders an iFrame with
|
||||||
|
# a bad aspect ratio
|
||||||
"video" ->
|
"video" ->
|
||||||
[
|
[
|
||||||
{:meta, [property: "twitter:card", content: "player"], []},
|
{:meta, [property: "twitter:card", content: "player"], []},
|
||||||
|
|
|
@ -82,8 +82,8 @@ def delete_if_exists(user, token) do
|
||||||
end
|
end
|
||||||
|
|
||||||
# Some webpush clients (e.g. iOS Toot!) use an non urlsafe base64 as an encoding for the key.
|
# Some webpush clients (e.g. iOS Toot!) use an non urlsafe base64 as an encoding for the key.
|
||||||
# However, the web push rfs specify to use base64 urlsafe, and the `web_push_encryption` library we use
|
# However, the web push rfs specify to use base64 urlsafe, and the `web_push_encryption` library
|
||||||
# requires the key to be properly encoded. So we just convert base64 to urlsafe base64.
|
# we use requires the key to be properly encoded. So we just convert base64 to urlsafe base64.
|
||||||
defp ensure_base64_urlsafe(string) do
|
defp ensure_base64_urlsafe(string) do
|
||||||
string
|
string
|
||||||
|> String.replace("+", "-")
|
|> String.replace("+", "-")
|
||||||
|
|
|
@ -102,7 +102,8 @@ test "returns error status on non-binary input" do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# Note: implementation-specific (e.g. Instance) details of set_unreachable/1 should be tested in implementation-specific tests
|
# Note: implementation-specific (e.g. Instance) details of set_unreachable/1
|
||||||
|
# should be tested in implementation-specific tests
|
||||||
describe "set_unreachable/1" do
|
describe "set_unreachable/1" do
|
||||||
test "returns error status on non-binary input" do
|
test "returns error status on non-binary input" do
|
||||||
assert {:error, _} = Instances.set_unreachable(nil)
|
assert {:error, _} = Instances.set_unreachable(nil)
|
||||||
|
|
|
@ -472,6 +472,7 @@ test "fetches a user by uri" do
|
||||||
|
|
||||||
# Also fetches the feed.
|
# Also fetches the feed.
|
||||||
# assert Activity.get_create_by_object_ap_id("tag:mastodon.social,2017-04-05:objectId=1641750:objectType=Status")
|
# assert Activity.get_create_by_object_ap_id("tag:mastodon.social,2017-04-05:objectId=1641750:objectType=Status")
|
||||||
|
# credo:disable-for-previous-line Credo.Check.Readability.MaxLineLength
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
Loading…
Reference in New Issue