Merge branch 'multiple-hackney-pools' into 'develop'
Use multiple hackney pools See merge request pleroma/pleroma!739
This commit is contained in:
commit
bd5086731e
|
@ -15,6 +15,20 @@
|
|||
seconds_valid: 60,
|
||||
method: Pleroma.Captcha.Kocaptcha
|
||||
|
||||
config :pleroma, :hackney_pools,
|
||||
federation: [
|
||||
max_connections: 50,
|
||||
timeout: 150_000
|
||||
],
|
||||
media: [
|
||||
max_connections: 50,
|
||||
timeout: 150_000
|
||||
],
|
||||
upload: [
|
||||
max_connections: 25,
|
||||
timeout: 300_000
|
||||
]
|
||||
|
||||
config :pleroma, Pleroma.Captcha.Kocaptcha, endpoint: "https://captcha.kotobank.ch"
|
||||
|
||||
# Upload configuration
|
||||
|
@ -22,7 +36,14 @@
|
|||
uploader: Pleroma.Uploaders.Local,
|
||||
filters: [],
|
||||
proxy_remote: false,
|
||||
proxy_opts: []
|
||||
proxy_opts: [
|
||||
redirect_on_failure: false,
|
||||
max_body_length: 25 * 1_048_576,
|
||||
http: [
|
||||
follow_redirect: true,
|
||||
pool: :upload
|
||||
]
|
||||
]
|
||||
|
||||
config :pleroma, Pleroma.Uploaders.Local, uploads: "uploads"
|
||||
|
||||
|
@ -214,7 +235,16 @@
|
|||
reject: [],
|
||||
accept: []
|
||||
|
||||
config :pleroma, :media_proxy, enabled: false
|
||||
config :pleroma, :media_proxy,
|
||||
enabled: false,
|
||||
proxy_opts: [
|
||||
redirect_on_failure: false,
|
||||
max_body_length: 25 * 1_048_576,
|
||||
http: [
|
||||
follow_redirect: true,
|
||||
pool: :media
|
||||
]
|
||||
]
|
||||
|
||||
config :pleroma, :chat, enabled: true
|
||||
|
||||
|
|
|
@ -234,3 +234,20 @@ curl "http://localhost:4000/api/pleroma/admin/invite_token?admin_token=somerando
|
|||
* Pleroma.Web.Metadata.Providers.OpenGraph
|
||||
* Pleroma.Web.Metadata.Providers.TwitterCard
|
||||
* `unfurl_nsfw`: If set to `true` nsfw attachments will be shown in previews
|
||||
|
||||
## :hackney_pools
|
||||
|
||||
Advanced. Tweaks Hackney (http client) connections pools.
|
||||
|
||||
There's three pools used:
|
||||
|
||||
* `:federation` for the federation jobs.
|
||||
You may want this pool max_connections to be at least equal to the number of federator jobs + retry queue jobs.
|
||||
* `:media` for rich media, media proxy
|
||||
* `:upload` for uploaded media (if using a remote uploader and `proxy_remote: true`)
|
||||
|
||||
For each pool, the options are:
|
||||
|
||||
* `max_connections` - how much connections a pool can hold
|
||||
* `timeout` - retention duration for connections
|
||||
|
||||
|
|
|
@ -101,7 +101,10 @@ def start(_type, _args) do
|
|||
],
|
||||
id: :cachex_idem
|
||||
),
|
||||
worker(Pleroma.FlakeId, []),
|
||||
worker(Pleroma.FlakeId, [])
|
||||
] ++
|
||||
hackney_pool_children() ++
|
||||
[
|
||||
worker(Pleroma.Web.Federator.RetryQueue, []),
|
||||
worker(Pleroma.Web.Federator, []),
|
||||
worker(Pleroma.Stats, []),
|
||||
|
@ -121,6 +124,20 @@ def start(_type, _args) do
|
|||
Supervisor.start_link(children, opts)
|
||||
end
|
||||
|
||||
def enabled_hackney_pools() do
|
||||
[:media] ++
|
||||
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
|
||||
[:federation]
|
||||
else
|
||||
[]
|
||||
end ++
|
||||
if Pleroma.Config.get([Pleroma.Uploader, :proxy_remote]) do
|
||||
[:uploadproxy]
|
||||
else
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
if Mix.env() == :test do
|
||||
defp streamer_child(), do: []
|
||||
defp chat_child(), do: []
|
||||
|
@ -137,4 +154,11 @@ defp chat_child() do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp hackney_pool_children() do
|
||||
for pool <- enabled_hackney_pools() do
|
||||
options = Pleroma.Config.get([:hackney_pools, pool])
|
||||
:hackney_pool.child_spec(pool, options)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -10,7 +10,8 @@ defmodule Pleroma.HTTP.Connection do
|
|||
@hackney_options [
|
||||
timeout: 10000,
|
||||
recv_timeout: 20000,
|
||||
follow_redirect: true
|
||||
follow_redirect: true,
|
||||
pool: :federation
|
||||
]
|
||||
@adapter Application.get_env(:tesla, :adapter)
|
||||
|
||||
|
|
|
@ -24,7 +24,8 @@ def put_file(upload) do
|
|||
extension = String.split(upload.name, ".") |> List.last()
|
||||
query = "#{cgi}?#{extension}"
|
||||
|
||||
with {:ok, %{status: 200, body: body}} <- @httpoison.post(query, file_data) do
|
||||
with {:ok, %{status: 200, body: body}} <-
|
||||
@httpoison.post(query, file_data, adapter: [pool: :default]) do
|
||||
remote_file_name = String.split(body) |> List.first()
|
||||
public_url = "#{files}/#{remote_file_name}.#{extension}"
|
||||
{:ok, {:url, public_url}}
|
||||
|
|
|
@ -1311,7 +1311,8 @@ def suggestions(%{assigns: %{user: user}} = conn, _) do
|
|||
[],
|
||||
adapter: [
|
||||
timeout: timeout,
|
||||
recv_timeout: timeout
|
||||
recv_timeout: timeout,
|
||||
pool: :default
|
||||
]
|
||||
),
|
||||
{:ok, data} <- Jason.decode(body) do
|
||||
|
|
|
@ -28,7 +28,7 @@ def parse(url) do
|
|||
|
||||
defp parse_url(url) do
|
||||
try do
|
||||
{:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url)
|
||||
{:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url, [], pool: :media)
|
||||
|
||||
html |> maybe_parse() |> get_parsed_data()
|
||||
rescue
|
||||
|
|
|
@ -20,7 +20,7 @@ defp get_oembed_url(nodes) do
|
|||
end
|
||||
|
||||
defp get_oembed_data(url) do
|
||||
{:ok, %Tesla.Env{body: json}} = Pleroma.HTTP.get(url)
|
||||
{:ok, %Tesla.Env{body: json}} = Pleroma.HTTP.get(url, [], pool: :media)
|
||||
|
||||
{:ok, data} = Jason.decode(json)
|
||||
|
||||
|
|
Loading…
Reference in a new issue