Use multiple hackney pools
* federation (ap, salmon) * media (rich media, media proxy) * upload (uploader proxy) Each "part" will stop fighting others ones -- a huge federation outbound could before make the media proxy fail to checkout a connection in time. splitted media and uploaded media for the good reason than an upload pool will have all connections to the same host (the uploader upstream). it also has a longer default retention period for connections.
This commit is contained in:
parent
97694eb451
commit
4aff4efa8d
|
@ -15,6 +15,20 @@
|
||||||
seconds_valid: 60,
|
seconds_valid: 60,
|
||||||
method: Pleroma.Captcha.Kocaptcha
|
method: Pleroma.Captcha.Kocaptcha
|
||||||
|
|
||||||
|
config :pleroma, :hackney_pools,
|
||||||
|
federation: [
|
||||||
|
max_connections: 50,
|
||||||
|
timeout: 150_000
|
||||||
|
],
|
||||||
|
media: [
|
||||||
|
max_connections: 50,
|
||||||
|
timeout: 150_000
|
||||||
|
],
|
||||||
|
upload: [
|
||||||
|
max_connections: 25,
|
||||||
|
timeout: 300_000
|
||||||
|
]
|
||||||
|
|
||||||
config :pleroma, Pleroma.Captcha.Kocaptcha, endpoint: "https://captcha.kotobank.ch"
|
config :pleroma, Pleroma.Captcha.Kocaptcha, endpoint: "https://captcha.kotobank.ch"
|
||||||
|
|
||||||
# Upload configuration
|
# Upload configuration
|
||||||
|
@ -22,7 +36,14 @@
|
||||||
uploader: Pleroma.Uploaders.Local,
|
uploader: Pleroma.Uploaders.Local,
|
||||||
filters: [],
|
filters: [],
|
||||||
proxy_remote: false,
|
proxy_remote: false,
|
||||||
proxy_opts: []
|
proxy_opts: [
|
||||||
|
redirect_on_failure: false,
|
||||||
|
max_body_length: 25 * 1_048_576,
|
||||||
|
http: [
|
||||||
|
follow_redirect: true,
|
||||||
|
pool: :upload
|
||||||
|
]
|
||||||
|
]
|
||||||
|
|
||||||
config :pleroma, Pleroma.Uploaders.Local, uploads: "uploads"
|
config :pleroma, Pleroma.Uploaders.Local, uploads: "uploads"
|
||||||
|
|
||||||
|
@ -214,7 +235,16 @@
|
||||||
reject: [],
|
reject: [],
|
||||||
accept: []
|
accept: []
|
||||||
|
|
||||||
config :pleroma, :media_proxy, enabled: false
|
config :pleroma, :media_proxy,
|
||||||
|
enabled: false,
|
||||||
|
proxy_opts: [
|
||||||
|
redirect_on_failure: false,
|
||||||
|
max_body_length: 25 * 1_048_576,
|
||||||
|
http: [
|
||||||
|
follow_redirect: true,
|
||||||
|
pool: :media
|
||||||
|
]
|
||||||
|
]
|
||||||
|
|
||||||
config :pleroma, :chat, enabled: true
|
config :pleroma, :chat, enabled: true
|
||||||
|
|
||||||
|
|
|
@ -234,3 +234,20 @@ curl "http://localhost:4000/api/pleroma/admin/invite_token?admin_token=somerando
|
||||||
* Pleroma.Web.Metadata.Providers.OpenGraph
|
* Pleroma.Web.Metadata.Providers.OpenGraph
|
||||||
* Pleroma.Web.Metadata.Providers.TwitterCard
|
* Pleroma.Web.Metadata.Providers.TwitterCard
|
||||||
* `unfurl_nsfw`: If set to `true` nsfw attachments will be shown in previews
|
* `unfurl_nsfw`: If set to `true` nsfw attachments will be shown in previews
|
||||||
|
|
||||||
|
## :hackney_pools
|
||||||
|
|
||||||
|
Advanced. Tweaks Hackney (http client) connections pools.
|
||||||
|
|
||||||
|
There's three pools used:
|
||||||
|
|
||||||
|
* `:federation` for the federation jobs.
|
||||||
|
You may want this pool max_connections to be at least equal to the number of federator jobs + retry queue jobs.
|
||||||
|
* `:media` for rich media, media proxy
|
||||||
|
* `:upload` for uploaded media (if using a remote uploader and `proxy_remote: true`)
|
||||||
|
|
||||||
|
For each pool, the options are:
|
||||||
|
|
||||||
|
* `max_connections` - how much connections a pool can hold
|
||||||
|
* `timeout` - retention duration for connections
|
||||||
|
|
||||||
|
|
|
@ -101,7 +101,10 @@ def start(_type, _args) do
|
||||||
],
|
],
|
||||||
id: :cachex_idem
|
id: :cachex_idem
|
||||||
),
|
),
|
||||||
worker(Pleroma.FlakeId, []),
|
worker(Pleroma.FlakeId, [])
|
||||||
|
] ++
|
||||||
|
hackney_pool_children() ++
|
||||||
|
[
|
||||||
worker(Pleroma.Web.Federator.RetryQueue, []),
|
worker(Pleroma.Web.Federator.RetryQueue, []),
|
||||||
worker(Pleroma.Web.Federator, []),
|
worker(Pleroma.Web.Federator, []),
|
||||||
worker(Pleroma.Stats, []),
|
worker(Pleroma.Stats, []),
|
||||||
|
@ -121,6 +124,20 @@ def start(_type, _args) do
|
||||||
Supervisor.start_link(children, opts)
|
Supervisor.start_link(children, opts)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def enabled_hackney_pools() do
|
||||||
|
[:media] ++
|
||||||
|
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
|
||||||
|
[:federation]
|
||||||
|
else
|
||||||
|
[]
|
||||||
|
end ++
|
||||||
|
if Pleroma.Config.get([Pleroma.Uploader, :proxy_remote]) do
|
||||||
|
[:uploadproxy]
|
||||||
|
else
|
||||||
|
[]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
if Mix.env() == :test do
|
if Mix.env() == :test do
|
||||||
defp streamer_child(), do: []
|
defp streamer_child(), do: []
|
||||||
defp chat_child(), do: []
|
defp chat_child(), do: []
|
||||||
|
@ -137,4 +154,11 @@ defp chat_child() do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp hackney_pool_children() do
|
||||||
|
for pool <- enabled_hackney_pools() do
|
||||||
|
options = Pleroma.Config.get([:hackney_pools, pool])
|
||||||
|
:hackney_pool.child_spec(pool, options)
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -10,7 +10,8 @@ defmodule Pleroma.HTTP.Connection do
|
||||||
@hackney_options [
|
@hackney_options [
|
||||||
timeout: 10000,
|
timeout: 10000,
|
||||||
recv_timeout: 20000,
|
recv_timeout: 20000,
|
||||||
follow_redirect: true
|
follow_redirect: true,
|
||||||
|
pool: :federation
|
||||||
]
|
]
|
||||||
@adapter Application.get_env(:tesla, :adapter)
|
@adapter Application.get_env(:tesla, :adapter)
|
||||||
|
|
||||||
|
|
|
@ -24,7 +24,8 @@ def put_file(upload) do
|
||||||
extension = String.split(upload.name, ".") |> List.last()
|
extension = String.split(upload.name, ".") |> List.last()
|
||||||
query = "#{cgi}?#{extension}"
|
query = "#{cgi}?#{extension}"
|
||||||
|
|
||||||
with {:ok, %{status: 200, body: body}} <- @httpoison.post(query, file_data) do
|
with {:ok, %{status: 200, body: body}} <-
|
||||||
|
@httpoison.post(query, file_data, adapter: [pool: :default]) do
|
||||||
remote_file_name = String.split(body) |> List.first()
|
remote_file_name = String.split(body) |> List.first()
|
||||||
public_url = "#{files}/#{remote_file_name}.#{extension}"
|
public_url = "#{files}/#{remote_file_name}.#{extension}"
|
||||||
{:ok, {:url, public_url}}
|
{:ok, {:url, public_url}}
|
||||||
|
|
|
@ -1311,7 +1311,8 @@ def suggestions(%{assigns: %{user: user}} = conn, _) do
|
||||||
[],
|
[],
|
||||||
adapter: [
|
adapter: [
|
||||||
timeout: timeout,
|
timeout: timeout,
|
||||||
recv_timeout: timeout
|
recv_timeout: timeout,
|
||||||
|
pool: :default
|
||||||
]
|
]
|
||||||
),
|
),
|
||||||
{:ok, data} <- Jason.decode(body) do
|
{:ok, data} <- Jason.decode(body) do
|
||||||
|
|
|
@ -28,7 +28,7 @@ def parse(url) do
|
||||||
|
|
||||||
defp parse_url(url) do
|
defp parse_url(url) do
|
||||||
try do
|
try do
|
||||||
{:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url)
|
{:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url, [], pool: :media)
|
||||||
|
|
||||||
html |> maybe_parse() |> get_parsed_data()
|
html |> maybe_parse() |> get_parsed_data()
|
||||||
rescue
|
rescue
|
||||||
|
|
|
@ -20,7 +20,7 @@ defp get_oembed_url(nodes) do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp get_oembed_data(url) do
|
defp get_oembed_data(url) do
|
||||||
{:ok, %Tesla.Env{body: json}} = Pleroma.HTTP.get(url)
|
{:ok, %Tesla.Env{body: json}} = Pleroma.HTTP.get(url, [], pool: :media)
|
||||||
|
|
||||||
{:ok, data} = Jason.decode(json)
|
{:ok, data} = Jason.decode(json)
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue