Merge develop
This commit is contained in:
commit
371d39e160
|
@ -35,6 +35,7 @@ docs-build:
|
||||||
- develop@pleroma/pleroma
|
- develop@pleroma/pleroma
|
||||||
variables:
|
variables:
|
||||||
MIX_ENV: dev
|
MIX_ENV: dev
|
||||||
|
PLEROMA_BUILD_ENV: prod
|
||||||
script:
|
script:
|
||||||
- mix deps.get
|
- mix deps.get
|
||||||
- mix compile
|
- mix compile
|
||||||
|
|
14
CHANGELOG.md
14
CHANGELOG.md
|
@ -6,14 +6,27 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
### Added
|
### Added
|
||||||
- MRF: Support for priming the mediaproxy cache (`Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy`)
|
- MRF: Support for priming the mediaproxy cache (`Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy`)
|
||||||
|
Configuration: `federation_incoming_replies_max_depth` option
|
||||||
- Mastodon API: Support for the [`tagged` filter](https://github.com/tootsuite/mastodon/pull/9755) in [`GET /api/v1/accounts/:id/statuses`](https://docs.joinmastodon.org/api/rest/accounts/#get-api-v1-accounts-id-statuses)
|
- Mastodon API: Support for the [`tagged` filter](https://github.com/tootsuite/mastodon/pull/9755) in [`GET /api/v1/accounts/:id/statuses`](https://docs.joinmastodon.org/api/rest/accounts/#get-api-v1-accounts-id-statuses)
|
||||||
|
- Mastodon API, streaming: Add support for passing the token in the `Sec-WebSocket-Protocol` header
|
||||||
|
- Admin API: Return users' tags when querying reports
|
||||||
|
- Admin API: Return avatar and display name when querying users
|
||||||
|
- Admin API: Allow querying user by ID
|
||||||
|
- Added synchronization of following/followers counters for external users
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
- Not being able to pin unlisted posts
|
- Not being able to pin unlisted posts
|
||||||
|
- Metadata rendering errors resulting in the entire page being inaccessible
|
||||||
|
- Mastodon API: Handling of search timeouts (`/api/v1/search` and `/api/v2/search`)
|
||||||
|
- Mastodon API: Embedded relationships not being properly rendered in the Account entity of Status entity
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
- Configuration: OpenGraph and TwitterCard providers enabled by default
|
||||||
- Configuration: Filter.AnonymizeFilename added ability to retain file extension with custom text
|
- Configuration: Filter.AnonymizeFilename added ability to retain file extension with custom text
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- NodeInfo: Return `skipThreadContainment` in `metadata` for the `skip_thread_containment` option
|
||||||
|
|
||||||
## [1.0.0] - 2019-06-29
|
## [1.0.0] - 2019-06-29
|
||||||
### Security
|
### Security
|
||||||
- Mastodon API: Fix display names not being sanitized
|
- Mastodon API: Fix display names not being sanitized
|
||||||
|
@ -75,6 +88,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- OAuth: added job to clean expired access tokens
|
- OAuth: added job to clean expired access tokens
|
||||||
- MRF: Support for rejecting reports from specific instances (`mrf_simple`)
|
- MRF: Support for rejecting reports from specific instances (`mrf_simple`)
|
||||||
- MRF: Support for stripping avatars and banner images from specific instances (`mrf_simple`)
|
- MRF: Support for stripping avatars and banner images from specific instances (`mrf_simple`)
|
||||||
|
- Ability to reset avatar, profile banner and backgroud
|
||||||
- MRF: Support for running subchains.
|
- MRF: Support for running subchains.
|
||||||
- Configuration: `skip_thread_containment` option
|
- Configuration: `skip_thread_containment` option
|
||||||
- Configuration: `rate_limit` option. See `Pleroma.Plugs.RateLimiter` documentation for details.
|
- Configuration: `rate_limit` option. See `Pleroma.Plugs.RateLimiter` documentation for details.
|
||||||
|
|
|
@ -218,6 +218,7 @@
|
||||||
},
|
},
|
||||||
registrations_open: true,
|
registrations_open: true,
|
||||||
federating: true,
|
federating: true,
|
||||||
|
federation_incoming_replies_max_depth: 100,
|
||||||
federation_reachability_timeout_days: 7,
|
federation_reachability_timeout_days: 7,
|
||||||
federation_publisher_modules: [
|
federation_publisher_modules: [
|
||||||
Pleroma.Web.ActivityPub.Publisher,
|
Pleroma.Web.ActivityPub.Publisher,
|
||||||
|
@ -248,7 +249,14 @@
|
||||||
remote_post_retention_days: 90,
|
remote_post_retention_days: 90,
|
||||||
skip_thread_containment: true,
|
skip_thread_containment: true,
|
||||||
limit_to_local_content: :unauthenticated,
|
limit_to_local_content: :unauthenticated,
|
||||||
dynamic_configuration: false
|
dynamic_configuration: false,
|
||||||
|
external_user_synchronization: [
|
||||||
|
enabled: false,
|
||||||
|
# every 2 hours
|
||||||
|
interval: 60 * 60 * 2,
|
||||||
|
max_retries: 3,
|
||||||
|
limit: 500
|
||||||
|
]
|
||||||
|
|
||||||
config :pleroma, :markup,
|
config :pleroma, :markup,
|
||||||
# XXX - unfortunately, inline images must be enabled by default right now, because
|
# XXX - unfortunately, inline images must be enabled by default right now, because
|
||||||
|
@ -358,7 +366,11 @@
|
||||||
port: 9999
|
port: 9999
|
||||||
|
|
||||||
config :pleroma, Pleroma.Web.Metadata,
|
config :pleroma, Pleroma.Web.Metadata,
|
||||||
providers: [Pleroma.Web.Metadata.Providers.RelMe],
|
providers: [
|
||||||
|
Pleroma.Web.Metadata.Providers.OpenGraph,
|
||||||
|
Pleroma.Web.Metadata.Providers.TwitterCard,
|
||||||
|
Pleroma.Web.Metadata.Providers.RelMe
|
||||||
|
],
|
||||||
unfurl_nsfw: false
|
unfurl_nsfw: false
|
||||||
|
|
||||||
config :pleroma, :suggestions,
|
config :pleroma, :suggestions,
|
||||||
|
|
|
@ -28,7 +28,8 @@
|
||||||
config :pleroma, :instance,
|
config :pleroma, :instance,
|
||||||
email: "admin@example.com",
|
email: "admin@example.com",
|
||||||
notify_email: "noreply@example.com",
|
notify_email: "noreply@example.com",
|
||||||
skip_thread_containment: false
|
skip_thread_containment: false,
|
||||||
|
federating: false
|
||||||
|
|
||||||
# Configure your database
|
# Configure your database
|
||||||
config :pleroma, Pleroma.Repo,
|
config :pleroma, Pleroma.Repo,
|
||||||
|
@ -76,6 +77,8 @@
|
||||||
|
|
||||||
config :joken, default_signer: "yU8uHKq+yyAkZ11Hx//jcdacWc8yQ1bxAAGrplzB0Zwwjkp35v0RK9SO8WTPr6QZ"
|
config :joken, default_signer: "yU8uHKq+yyAkZ11Hx//jcdacWc8yQ1bxAAGrplzB0Zwwjkp35v0RK9SO8WTPr6QZ"
|
||||||
|
|
||||||
|
config :pleroma, Pleroma.ReverseProxy.Client, Pleroma.ReverseProxy.ClientMock
|
||||||
|
|
||||||
try do
|
try do
|
||||||
import_config "test.secret.exs"
|
import_config "test.secret.exs"
|
||||||
rescue
|
rescue
|
||||||
|
|
|
@ -38,7 +38,9 @@ Authentication is required and the user must be an admin.
|
||||||
"moderator": bool
|
"moderator": bool
|
||||||
},
|
},
|
||||||
"local": bool,
|
"local": bool,
|
||||||
"tags": array
|
"tags": array,
|
||||||
|
"avatar": string,
|
||||||
|
"display_name": string
|
||||||
},
|
},
|
||||||
...
|
...
|
||||||
]
|
]
|
||||||
|
@ -174,13 +176,13 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
|
||||||
- `nickname`
|
- `nickname`
|
||||||
- `status` BOOLEAN field, false value means deactivation.
|
- `status` BOOLEAN field, false value means deactivation.
|
||||||
|
|
||||||
## `/api/pleroma/admin/users/:nickname`
|
## `/api/pleroma/admin/users/:nickname_or_id`
|
||||||
|
|
||||||
### Retrive the details of a user
|
### Retrive the details of a user
|
||||||
|
|
||||||
- Method: `GET`
|
- Method: `GET`
|
||||||
- Params:
|
- Params:
|
||||||
- `nickname`
|
- `nickname` or `id`
|
||||||
- Response:
|
- Response:
|
||||||
- On failure: `Not found`
|
- On failure: `Not found`
|
||||||
- On success: JSON of the user
|
- On success: JSON of the user
|
||||||
|
@ -331,6 +333,7 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
|
||||||
"pleroma": {},
|
"pleroma": {},
|
||||||
"sensitive": false
|
"sensitive": false
|
||||||
},
|
},
|
||||||
|
"tags": ["force_unlisted"],
|
||||||
"statuses_count": 3,
|
"statuses_count": 3,
|
||||||
"url": "https://pleroma.example.org/users/user",
|
"url": "https://pleroma.example.org/users/user",
|
||||||
"username": "user"
|
"username": "user"
|
||||||
|
@ -366,6 +369,7 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
|
||||||
"pleroma": {},
|
"pleroma": {},
|
||||||
"sensitive": false
|
"sensitive": false
|
||||||
},
|
},
|
||||||
|
"tags": ["force_unlisted"],
|
||||||
"statuses_count": 1,
|
"statuses_count": 1,
|
||||||
"url": "https://pleroma.example.org/users/lain",
|
"url": "https://pleroma.example.org/users/lain",
|
||||||
"username": "lain"
|
"username": "lain"
|
||||||
|
|
|
@ -87,6 +87,7 @@ config :pleroma, Pleroma.Emails.Mailer,
|
||||||
* `invites_enabled`: Enable user invitations for admins (depends on `registrations_open: false`).
|
* `invites_enabled`: Enable user invitations for admins (depends on `registrations_open: false`).
|
||||||
* `account_activation_required`: Require users to confirm their emails before signing in.
|
* `account_activation_required`: Require users to confirm their emails before signing in.
|
||||||
* `federating`: Enable federation with other instances
|
* `federating`: Enable federation with other instances
|
||||||
|
* `federation_incoming_replies_max_depth`: Max. depth of reply-to activities fetching on incoming federation, to prevent out-of-memory situations while fetching very long threads. If set to `nil`, threads of any depth will be fetched. Lower this value if you experience out-of-memory crashes.
|
||||||
* `federation_reachability_timeout_days`: Timeout (in days) of each external federation target being unreachable prior to pausing federating to it.
|
* `federation_reachability_timeout_days`: Timeout (in days) of each external federation target being unreachable prior to pausing federating to it.
|
||||||
* `allow_relay`: Enable Pleroma’s Relay, which makes it possible to follow a whole instance
|
* `allow_relay`: Enable Pleroma’s Relay, which makes it possible to follow a whole instance
|
||||||
* `rewrite_policy`: Message Rewrite Policy, either one or a list. Here are the ones available by default:
|
* `rewrite_policy`: Message Rewrite Policy, either one or a list. Here are the ones available by default:
|
||||||
|
@ -124,6 +125,12 @@ config :pleroma, Pleroma.Emails.Mailer,
|
||||||
* `skip_thread_containment`: Skip filter out broken threads. The default is `false`.
|
* `skip_thread_containment`: Skip filter out broken threads. The default is `false`.
|
||||||
* `limit_to_local_content`: Limit unauthenticated users to search for local statutes and users only. Possible values: `:unauthenticated`, `:all` and `false`. The default is `:unauthenticated`.
|
* `limit_to_local_content`: Limit unauthenticated users to search for local statutes and users only. Possible values: `:unauthenticated`, `:all` and `false`. The default is `:unauthenticated`.
|
||||||
* `dynamic_configuration`: Allow transferring configuration to DB with the subsequent customization from Admin api.
|
* `dynamic_configuration`: Allow transferring configuration to DB with the subsequent customization from Admin api.
|
||||||
|
* `external_user_synchronization`: Following/followers counters synchronization settings.
|
||||||
|
* `enabled`: Enables synchronization
|
||||||
|
* `interval`: Interval between synchronization.
|
||||||
|
* `max_retries`: Max rettries for host. After exceeding the limit, the check will not be carried out for users from this host.
|
||||||
|
* `limit`: Users batch size for processing in one time.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## :logger
|
## :logger
|
||||||
|
|
|
@ -207,7 +207,7 @@ certbot renew --cert-name yourinstance.tld --webroot -w /var/lib/letsencrypt/ --
|
||||||
|
|
||||||
# Add it to the daily cron
|
# Add it to the daily cron
|
||||||
echo '#!/bin/sh
|
echo '#!/bin/sh
|
||||||
certbot renew --cert-name yourinstance.tld --webroot -w /var/lib/letsencrypt/ --dry-run --post-hook "systemctl reload nginx"
|
certbot renew --cert-name yourinstance.tld --webroot -w /var/lib/letsencrypt/ --post-hook "systemctl reload nginx"
|
||||||
' > /etc/cron.daily/renew-pleroma-cert
|
' > /etc/cron.daily/renew-pleroma-cert
|
||||||
chmod +x /etc/cron.daily/renew-pleroma-cert
|
chmod +x /etc/cron.daily/renew-pleroma-cert
|
||||||
|
|
||||||
|
@ -228,7 +228,7 @@ certbot renew --cert-name yourinstance.tld --webroot -w /var/lib/letsencrypt/ --
|
||||||
|
|
||||||
# Add it to the daily cron
|
# Add it to the daily cron
|
||||||
echo '#!/bin/sh
|
echo '#!/bin/sh
|
||||||
certbot renew --cert-name yourinstance.tld --webroot -w /var/lib/letsencrypt/ --dry-run --post-hook "rc-service nginx reload"
|
certbot renew --cert-name yourinstance.tld --webroot -w /var/lib/letsencrypt/ --post-hook "rc-service nginx reload"
|
||||||
' > /etc/periodic/daily/renew-pleroma-cert
|
' > /etc/periodic/daily/renew-pleroma-cert
|
||||||
chmod +x /etc/periodic/daily/renew-pleroma-cert
|
chmod +x /etc/periodic/daily/renew-pleroma-cert
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
# Pleroma: A lightweight social networking server
|
# Pleroma: A lightweight social networking server
|
||||||
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-onl
|
# SPDX-License-Identifier: AGPL-3.0-onl
|
||||||
|
|
||||||
defmodule Mix.Tasks.Pleroma.Ecto do
|
defmodule Mix.Tasks.Pleroma.Ecto do
|
||||||
@doc """
|
@doc """
|
||||||
Ensures the given repository's migrations path exists on the file system.
|
Ensures the given repository's migrations path exists on the file system.
|
||||||
|
|
|
@ -149,7 +149,7 @@ def run(["gen" | rest]) do
|
||||||
uploads_dir =
|
uploads_dir =
|
||||||
get_option(
|
get_option(
|
||||||
options,
|
options,
|
||||||
:upload_dir,
|
:uploads_dir,
|
||||||
"What directory should media uploads go in (when using the local uploader)?",
|
"What directory should media uploads go in (when using the local uploader)?",
|
||||||
Pleroma.Config.get([Pleroma.Uploaders.Local, :uploads])
|
Pleroma.Config.get([Pleroma.Uploaders.Local, :uploads])
|
||||||
)
|
)
|
||||||
|
|
|
@ -155,7 +155,11 @@ def start(_type, _args) do
|
||||||
start: {Pleroma.Web.Endpoint, :start_link, []},
|
start: {Pleroma.Web.Endpoint, :start_link, []},
|
||||||
type: :supervisor
|
type: :supervisor
|
||||||
},
|
},
|
||||||
%{id: Pleroma.Gopher.Server, start: {Pleroma.Gopher.Server, :start_link, []}}
|
%{id: Pleroma.Gopher.Server, start: {Pleroma.Gopher.Server, :start_link, []}},
|
||||||
|
%{
|
||||||
|
id: Pleroma.User.SynchronizationWorker,
|
||||||
|
start: {Pleroma.User.SynchronizationWorker, :start_link, []}
|
||||||
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
|
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
|
||||||
|
|
|
@ -44,20 +44,20 @@ def get_by_ap_id(ap_id) do
|
||||||
Repo.one(from(object in Object, where: fragment("(?)->>'id' = ?", object.data, ^ap_id)))
|
Repo.one(from(object in Object, where: fragment("(?)->>'id' = ?", object.data, ^ap_id)))
|
||||||
end
|
end
|
||||||
|
|
||||||
def normalize(_, fetch_remote \\ true)
|
def normalize(_, fetch_remote \\ true, options \\ [])
|
||||||
# If we pass an Activity to Object.normalize(), we can try to use the preloaded object.
|
# If we pass an Activity to Object.normalize(), we can try to use the preloaded object.
|
||||||
# Use this whenever possible, especially when walking graphs in an O(N) loop!
|
# Use this whenever possible, especially when walking graphs in an O(N) loop!
|
||||||
def normalize(%Object{} = object, _), do: object
|
def normalize(%Object{} = object, _, _), do: object
|
||||||
def normalize(%Activity{object: %Object{} = object}, _), do: object
|
def normalize(%Activity{object: %Object{} = object}, _, _), do: object
|
||||||
|
|
||||||
# A hack for fake activities
|
# A hack for fake activities
|
||||||
def normalize(%Activity{data: %{"object" => %{"fake" => true} = data}}, _) do
|
def normalize(%Activity{data: %{"object" => %{"fake" => true} = data}}, _, _) do
|
||||||
%Object{id: "pleroma:fake_object_id", data: data}
|
%Object{id: "pleroma:fake_object_id", data: data}
|
||||||
end
|
end
|
||||||
|
|
||||||
# Catch and log Object.normalize() calls where the Activity's child object is not
|
# Catch and log Object.normalize() calls where the Activity's child object is not
|
||||||
# preloaded.
|
# preloaded.
|
||||||
def normalize(%Activity{data: %{"object" => %{"id" => ap_id}}}, fetch_remote) do
|
def normalize(%Activity{data: %{"object" => %{"id" => ap_id}}}, fetch_remote, _) do
|
||||||
Logger.debug(
|
Logger.debug(
|
||||||
"Object.normalize() called without preloaded object (#{ap_id}). Consider preloading the object!"
|
"Object.normalize() called without preloaded object (#{ap_id}). Consider preloading the object!"
|
||||||
)
|
)
|
||||||
|
@ -67,7 +67,7 @@ def normalize(%Activity{data: %{"object" => %{"id" => ap_id}}}, fetch_remote) do
|
||||||
normalize(ap_id, fetch_remote)
|
normalize(ap_id, fetch_remote)
|
||||||
end
|
end
|
||||||
|
|
||||||
def normalize(%Activity{data: %{"object" => ap_id}}, fetch_remote) do
|
def normalize(%Activity{data: %{"object" => ap_id}}, fetch_remote, _) do
|
||||||
Logger.debug(
|
Logger.debug(
|
||||||
"Object.normalize() called without preloaded object (#{ap_id}). Consider preloading the object!"
|
"Object.normalize() called without preloaded object (#{ap_id}). Consider preloading the object!"
|
||||||
)
|
)
|
||||||
|
@ -78,10 +78,14 @@ def normalize(%Activity{data: %{"object" => ap_id}}, fetch_remote) do
|
||||||
end
|
end
|
||||||
|
|
||||||
# Old way, try fetching the object through cache.
|
# Old way, try fetching the object through cache.
|
||||||
def normalize(%{"id" => ap_id}, fetch_remote), do: normalize(ap_id, fetch_remote)
|
def normalize(%{"id" => ap_id}, fetch_remote, _), do: normalize(ap_id, fetch_remote)
|
||||||
def normalize(ap_id, false) when is_binary(ap_id), do: get_cached_by_ap_id(ap_id)
|
def normalize(ap_id, false, _) when is_binary(ap_id), do: get_cached_by_ap_id(ap_id)
|
||||||
def normalize(ap_id, true) when is_binary(ap_id), do: Fetcher.fetch_object_from_id!(ap_id)
|
|
||||||
def normalize(_, _), do: nil
|
def normalize(ap_id, true, options) when is_binary(ap_id) do
|
||||||
|
Fetcher.fetch_object_from_id!(ap_id, options)
|
||||||
|
end
|
||||||
|
|
||||||
|
def normalize(_, _, _), do: nil
|
||||||
|
|
||||||
# Owned objects can only be mutated by their owner
|
# Owned objects can only be mutated by their owner
|
||||||
def authorize_mutation(%Object{data: %{"actor" => actor}}, %User{ap_id: ap_id}),
|
def authorize_mutation(%Object{data: %{"actor" => actor}}, %User{ap_id: ap_id}),
|
||||||
|
|
|
@ -22,7 +22,7 @@ defp reinject_object(data) do
|
||||||
|
|
||||||
# TODO:
|
# TODO:
|
||||||
# This will create a Create activity, which we need internally at the moment.
|
# This will create a Create activity, which we need internally at the moment.
|
||||||
def fetch_object_from_id(id) do
|
def fetch_object_from_id(id, options \\ []) do
|
||||||
if object = Object.get_cached_by_ap_id(id) do
|
if object = Object.get_cached_by_ap_id(id) do
|
||||||
{:ok, object}
|
{:ok, object}
|
||||||
else
|
else
|
||||||
|
@ -38,7 +38,7 @@ def fetch_object_from_id(id) do
|
||||||
"object" => data
|
"object" => data
|
||||||
},
|
},
|
||||||
:ok <- Containment.contain_origin(id, params),
|
:ok <- Containment.contain_origin(id, params),
|
||||||
{:ok, activity} <- Transmogrifier.handle_incoming(params),
|
{:ok, activity} <- Transmogrifier.handle_incoming(params, options),
|
||||||
{:object, _data, %Object{} = object} <-
|
{:object, _data, %Object{} = object} <-
|
||||||
{:object, data, Object.normalize(activity, false)} do
|
{:object, data, Object.normalize(activity, false)} do
|
||||||
{:ok, object}
|
{:ok, object}
|
||||||
|
@ -63,8 +63,8 @@ def fetch_object_from_id(id) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_object_from_id!(id) do
|
def fetch_object_from_id!(id, options \\ []) do
|
||||||
with {:ok, object} <- fetch_object_from_id(id) do
|
with {:ok, object} <- fetch_object_from_id(id, options) do
|
||||||
object
|
object
|
||||||
else
|
else
|
||||||
_e ->
|
_e ->
|
||||||
|
|
24
lib/pleroma/reverse_proxy/client.ex
Normal file
24
lib/pleroma/reverse_proxy/client.ex
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
defmodule Pleroma.ReverseProxy.Client do
|
||||||
|
@callback request(atom(), String.t(), [tuple()], String.t(), list()) ::
|
||||||
|
{:ok, pos_integer(), [tuple()], reference() | map()}
|
||||||
|
| {:ok, pos_integer(), [tuple()]}
|
||||||
|
| {:ok, reference()}
|
||||||
|
| {:error, term()}
|
||||||
|
|
||||||
|
@callback stream_body(reference() | pid() | map()) ::
|
||||||
|
{:ok, binary()} | :done | {:error, String.t()}
|
||||||
|
|
||||||
|
@callback close(reference() | pid() | map()) :: :ok
|
||||||
|
|
||||||
|
def request(method, url, headers, "", opts \\ []) do
|
||||||
|
client().request(method, url, headers, "", opts)
|
||||||
|
end
|
||||||
|
|
||||||
|
def stream_body(ref), do: client().stream_body(ref)
|
||||||
|
|
||||||
|
def close(ref), do: client().close(ref)
|
||||||
|
|
||||||
|
defp client do
|
||||||
|
Pleroma.Config.get([Pleroma.ReverseProxy.Client], :hackney)
|
||||||
|
end
|
||||||
|
end
|
|
@ -146,7 +146,7 @@ defp request(method, url, headers, hackney_opts) do
|
||||||
Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}")
|
Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}")
|
||||||
method = method |> String.downcase() |> String.to_existing_atom()
|
method = method |> String.downcase() |> String.to_existing_atom()
|
||||||
|
|
||||||
case hackney().request(method, url, headers, "", hackney_opts) do
|
case client().request(method, url, headers, "", hackney_opts) do
|
||||||
{:ok, code, headers, client} when code in @valid_resp_codes ->
|
{:ok, code, headers, client} when code in @valid_resp_codes ->
|
||||||
{:ok, code, downcase_headers(headers), client}
|
{:ok, code, downcase_headers(headers), client}
|
||||||
|
|
||||||
|
@ -173,7 +173,7 @@ defp response(conn, client, url, status, headers, opts) do
|
||||||
halt(conn)
|
halt(conn)
|
||||||
|
|
||||||
{:error, :closed, conn} ->
|
{:error, :closed, conn} ->
|
||||||
:hackney.close(client)
|
client().close(client)
|
||||||
halt(conn)
|
halt(conn)
|
||||||
|
|
||||||
{:error, error, conn} ->
|
{:error, error, conn} ->
|
||||||
|
@ -181,7 +181,7 @@ defp response(conn, client, url, status, headers, opts) do
|
||||||
"#{__MODULE__} request to #{url} failed while reading/chunking: #{inspect(error)}"
|
"#{__MODULE__} request to #{url} failed while reading/chunking: #{inspect(error)}"
|
||||||
)
|
)
|
||||||
|
|
||||||
:hackney.close(client)
|
client().close(client)
|
||||||
halt(conn)
|
halt(conn)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -196,7 +196,7 @@ defp chunk_reply(conn, client, opts, sent_so_far, duration) do
|
||||||
duration,
|
duration,
|
||||||
Keyword.get(opts, :max_read_duration, @max_read_duration)
|
Keyword.get(opts, :max_read_duration, @max_read_duration)
|
||||||
),
|
),
|
||||||
{:ok, data} <- hackney().stream_body(client),
|
{:ok, data} <- client().stream_body(client),
|
||||||
{:ok, duration} <- increase_read_duration(duration),
|
{:ok, duration} <- increase_read_duration(duration),
|
||||||
sent_so_far = sent_so_far + byte_size(data),
|
sent_so_far = sent_so_far + byte_size(data),
|
||||||
:ok <- body_size_constraint(sent_so_far, Keyword.get(opts, :max_body_size)),
|
:ok <- body_size_constraint(sent_so_far, Keyword.get(opts, :max_body_size)),
|
||||||
|
@ -378,5 +378,5 @@ defp increase_read_duration(_) do
|
||||||
{:ok, :no_duration_limit, :no_duration_limit}
|
{:ok, :no_duration_limit, :no_duration_limit}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp hackney, do: Pleroma.Config.get(:hackney, :hackney)
|
defp client, do: Pleroma.ReverseProxy.Client
|
||||||
end
|
end
|
|
@ -1,51 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Uploaders.Swift.Keystone do
|
|
||||||
use HTTPoison.Base
|
|
||||||
|
|
||||||
def process_url(url) do
|
|
||||||
Enum.join(
|
|
||||||
[Pleroma.Config.get!([Pleroma.Uploaders.Swift, :auth_url]), url],
|
|
||||||
"/"
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
def process_response_body(body) do
|
|
||||||
body
|
|
||||||
|> Jason.decode!()
|
|
||||||
end
|
|
||||||
|
|
||||||
def get_token do
|
|
||||||
settings = Pleroma.Config.get(Pleroma.Uploaders.Swift)
|
|
||||||
username = Keyword.fetch!(settings, :username)
|
|
||||||
password = Keyword.fetch!(settings, :password)
|
|
||||||
tenant_id = Keyword.fetch!(settings, :tenant_id)
|
|
||||||
|
|
||||||
case post(
|
|
||||||
"/tokens",
|
|
||||||
make_auth_body(username, password, tenant_id),
|
|
||||||
["Content-Type": "application/json"],
|
|
||||||
hackney: [:insecure]
|
|
||||||
) do
|
|
||||||
{:ok, %Tesla.Env{status: 200, body: body}} ->
|
|
||||||
body["access"]["token"]["id"]
|
|
||||||
|
|
||||||
{:ok, %Tesla.Env{status: _}} ->
|
|
||||||
""
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def make_auth_body(username, password, tenant) do
|
|
||||||
Jason.encode!(%{
|
|
||||||
:auth => %{
|
|
||||||
:passwordCredentials => %{
|
|
||||||
:username => username,
|
|
||||||
:password => password
|
|
||||||
},
|
|
||||||
:tenantId => tenant
|
|
||||||
}
|
|
||||||
})
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -1,29 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Uploaders.Swift.Client do
|
|
||||||
use HTTPoison.Base
|
|
||||||
|
|
||||||
def process_url(url) do
|
|
||||||
Enum.join(
|
|
||||||
[Pleroma.Config.get!([Pleroma.Uploaders.Swift, :storage_url]), url],
|
|
||||||
"/"
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
def upload_file(filename, body, content_type) do
|
|
||||||
token = Pleroma.Uploaders.Swift.Keystone.get_token()
|
|
||||||
|
|
||||||
case put("#{filename}", body, "X-Auth-Token": token, "Content-Type": content_type) do
|
|
||||||
{:ok, %Tesla.Env{status: 201}} ->
|
|
||||||
{:ok, {:file, filename}}
|
|
||||||
|
|
||||||
{:ok, %Tesla.Env{status: 401}} ->
|
|
||||||
{:error, "Unauthorized, Bad Token"}
|
|
||||||
|
|
||||||
{:error, _} ->
|
|
||||||
{:error, "Swift Upload Error"}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -1,19 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Uploaders.Swift do
|
|
||||||
@behaviour Pleroma.Uploaders.Uploader
|
|
||||||
|
|
||||||
def get_file(name) do
|
|
||||||
{:ok, {:url, Path.join([Pleroma.Config.get!([__MODULE__, :object_url]), name])}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def put_file(upload) do
|
|
||||||
Pleroma.Uploaders.Swift.Client.upload_file(
|
|
||||||
upload.path,
|
|
||||||
File.read!(upload.tmpfile),
|
|
||||||
upload.content_type
|
|
||||||
)
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -108,15 +108,25 @@ def ap_id(%User{nickname: nickname}) do
|
||||||
def ap_followers(%User{follower_address: fa}) when is_binary(fa), do: fa
|
def ap_followers(%User{follower_address: fa}) when is_binary(fa), do: fa
|
||||||
def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers"
|
def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers"
|
||||||
|
|
||||||
def user_info(%User{} = user) do
|
def user_info(%User{} = user, args \\ %{}) do
|
||||||
|
following_count =
|
||||||
|
if args[:following_count], do: args[:following_count], else: following_count(user)
|
||||||
|
|
||||||
|
follower_count =
|
||||||
|
if args[:follower_count], do: args[:follower_count], else: user.info.follower_count
|
||||||
|
|
||||||
%{
|
%{
|
||||||
following_count: following_count(user),
|
|
||||||
note_count: user.info.note_count,
|
note_count: user.info.note_count,
|
||||||
follower_count: user.info.follower_count,
|
|
||||||
locked: user.info.locked,
|
locked: user.info.locked,
|
||||||
confirmation_pending: user.info.confirmation_pending,
|
confirmation_pending: user.info.confirmation_pending,
|
||||||
default_scope: user.info.default_scope
|
default_scope: user.info.default_scope
|
||||||
}
|
}
|
||||||
|
|> Map.put(:following_count, following_count)
|
||||||
|
|> Map.put(:follower_count, follower_count)
|
||||||
|
end
|
||||||
|
|
||||||
|
def set_info_cache(user, args) do
|
||||||
|
Cachex.put(:user_cache, "user_info:#{user.id}", user_info(user, args))
|
||||||
end
|
end
|
||||||
|
|
||||||
def restrict_deactivated(query) do
|
def restrict_deactivated(query) do
|
||||||
|
@ -837,15 +847,12 @@ def unblock(blocker, %{ap_id: ap_id}) do
|
||||||
def mutes?(nil, _), do: false
|
def mutes?(nil, _), do: false
|
||||||
def mutes?(user, %{ap_id: ap_id}), do: Enum.member?(user.info.mutes, ap_id)
|
def mutes?(user, %{ap_id: ap_id}), do: Enum.member?(user.info.mutes, ap_id)
|
||||||
|
|
||||||
def blocks?(user, %{ap_id: ap_id}) do
|
def blocks?(%User{info: info} = _user, %{ap_id: ap_id}) do
|
||||||
blocks = user.info.blocks
|
blocks = info.blocks
|
||||||
domain_blocks = user.info.domain_blocks
|
domain_blocks = info.domain_blocks
|
||||||
%{host: host} = URI.parse(ap_id)
|
%{host: host} = URI.parse(ap_id)
|
||||||
|
|
||||||
Enum.member?(blocks, ap_id) ||
|
Enum.member?(blocks, ap_id) || Enum.any?(domain_blocks, &(&1 == host))
|
||||||
Enum.any?(domain_blocks, fn domain ->
|
|
||||||
host == domain
|
|
||||||
end)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def subscribed_to?(user, %{ap_id: ap_id}) do
|
def subscribed_to?(user, %{ap_id: ap_id}) do
|
||||||
|
@ -1004,6 +1011,56 @@ def perform(:follow_import, %User{} = follower, followed_identifiers)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@spec sync_follow_counter() :: :ok
|
||||||
|
def sync_follow_counter,
|
||||||
|
do: PleromaJobQueue.enqueue(:background, __MODULE__, [:sync_follow_counters])
|
||||||
|
|
||||||
|
@spec perform(:sync_follow_counters) :: :ok
|
||||||
|
def perform(:sync_follow_counters) do
|
||||||
|
{:ok, _pid} = Agent.start_link(fn -> %{} end, name: :domain_errors)
|
||||||
|
config = Pleroma.Config.get([:instance, :external_user_synchronization])
|
||||||
|
|
||||||
|
:ok = sync_follow_counters(config)
|
||||||
|
Agent.stop(:domain_errors)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec sync_follow_counters(keyword()) :: :ok
|
||||||
|
def sync_follow_counters(opts \\ []) do
|
||||||
|
users = external_users(opts)
|
||||||
|
|
||||||
|
if length(users) > 0 do
|
||||||
|
errors = Agent.get(:domain_errors, fn state -> state end)
|
||||||
|
{last, updated_errors} = User.Synchronization.call(users, errors, opts)
|
||||||
|
Agent.update(:domain_errors, fn _state -> updated_errors end)
|
||||||
|
sync_follow_counters(max_id: last.id, limit: opts[:limit])
|
||||||
|
else
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec external_users(keyword()) :: [User.t()]
|
||||||
|
def external_users(opts \\ []) do
|
||||||
|
query =
|
||||||
|
User.Query.build(%{
|
||||||
|
external: true,
|
||||||
|
active: true,
|
||||||
|
order_by: :id,
|
||||||
|
select: [:id, :ap_id, :info]
|
||||||
|
})
|
||||||
|
|
||||||
|
query =
|
||||||
|
if opts[:max_id],
|
||||||
|
do: where(query, [u], u.id > ^opts[:max_id]),
|
||||||
|
else: query
|
||||||
|
|
||||||
|
query =
|
||||||
|
if opts[:limit],
|
||||||
|
do: limit(query, ^opts[:limit]),
|
||||||
|
else: query
|
||||||
|
|
||||||
|
Repo.all(query)
|
||||||
|
end
|
||||||
|
|
||||||
def blocks_import(%User{} = blocker, blocked_identifiers) when is_list(blocked_identifiers),
|
def blocks_import(%User{} = blocker, blocked_identifiers) when is_list(blocked_identifiers),
|
||||||
do:
|
do:
|
||||||
PleromaJobQueue.enqueue(:background, __MODULE__, [
|
PleromaJobQueue.enqueue(:background, __MODULE__, [
|
||||||
|
|
|
@ -7,7 +7,7 @@ defmodule Pleroma.User.Query do
|
||||||
User query builder module. Builds query from new query or another user query.
|
User query builder module. Builds query from new query or another user query.
|
||||||
|
|
||||||
## Example:
|
## Example:
|
||||||
query = Pleroma.User.Query(%{nickname: "nickname"})
|
query = Pleroma.User.Query.build(%{nickname: "nickname"})
|
||||||
another_query = Pleroma.User.Query.build(query, %{email: "email@example.com"})
|
another_query = Pleroma.User.Query.build(query, %{email: "email@example.com"})
|
||||||
Pleroma.Repo.all(query)
|
Pleroma.Repo.all(query)
|
||||||
Pleroma.Repo.all(another_query)
|
Pleroma.Repo.all(another_query)
|
||||||
|
@ -47,7 +47,10 @@ defmodule Pleroma.User.Query do
|
||||||
friends: User.t(),
|
friends: User.t(),
|
||||||
recipients_from_activity: [String.t()],
|
recipients_from_activity: [String.t()],
|
||||||
nickname: [String.t()],
|
nickname: [String.t()],
|
||||||
ap_id: [String.t()]
|
ap_id: [String.t()],
|
||||||
|
order_by: term(),
|
||||||
|
select: term(),
|
||||||
|
limit: pos_integer()
|
||||||
}
|
}
|
||||||
| %{}
|
| %{}
|
||||||
|
|
||||||
|
@ -141,6 +144,18 @@ defp compose_query({:recipients_from_activity, to}, query) do
|
||||||
where(query, [u], u.ap_id in ^to or fragment("? && ?", u.following, ^to))
|
where(query, [u], u.ap_id in ^to or fragment("? && ?", u.following, ^to))
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp compose_query({:order_by, key}, query) do
|
||||||
|
order_by(query, [u], field(u, ^key))
|
||||||
|
end
|
||||||
|
|
||||||
|
defp compose_query({:select, keys}, query) do
|
||||||
|
select(query, [u], ^keys)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp compose_query({:limit, limit}, query) do
|
||||||
|
limit(query, ^limit)
|
||||||
|
end
|
||||||
|
|
||||||
defp compose_query(_unsupported_param, query), do: query
|
defp compose_query(_unsupported_param, query), do: query
|
||||||
|
|
||||||
defp prepare_tag_criteria(tag, query) do
|
defp prepare_tag_criteria(tag, query) do
|
||||||
|
|
|
@ -43,6 +43,8 @@ def search(query_string, opts \\ []) do
|
||||||
defp search_query(query_string, for_user, following) do
|
defp search_query(query_string, for_user, following) do
|
||||||
for_user
|
for_user
|
||||||
|> base_query(following)
|
|> base_query(following)
|
||||||
|
|> filter_blocked_user(for_user)
|
||||||
|
|> filter_blocked_domains(for_user)
|
||||||
|> search_subqueries(query_string)
|
|> search_subqueries(query_string)
|
||||||
|> union_subqueries
|
|> union_subqueries
|
||||||
|> distinct_query()
|
|> distinct_query()
|
||||||
|
@ -55,6 +57,25 @@ defp search_query(query_string, for_user, following) do
|
||||||
defp base_query(_user, false), do: User
|
defp base_query(_user, false), do: User
|
||||||
defp base_query(user, true), do: User.get_followers_query(user)
|
defp base_query(user, true), do: User.get_followers_query(user)
|
||||||
|
|
||||||
|
defp filter_blocked_user(query, %User{info: %{blocks: blocks}})
|
||||||
|
when length(blocks) > 0 do
|
||||||
|
from(q in query, where: not (q.ap_id in ^blocks))
|
||||||
|
end
|
||||||
|
|
||||||
|
defp filter_blocked_user(query, _), do: query
|
||||||
|
|
||||||
|
defp filter_blocked_domains(query, %User{info: %{domain_blocks: domain_blocks}})
|
||||||
|
when length(domain_blocks) > 0 do
|
||||||
|
domains = Enum.join(domain_blocks, ",")
|
||||||
|
|
||||||
|
from(
|
||||||
|
q in query,
|
||||||
|
where: fragment("substring(ap_id from '.*://([^/]*)') NOT IN (?)", ^domains)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp filter_blocked_domains(query, _), do: query
|
||||||
|
|
||||||
defp paginate(query, limit, offset) do
|
defp paginate(query, limit, offset) do
|
||||||
from(q in query, limit: ^limit, offset: ^offset)
|
from(q in query, limit: ^limit, offset: ^offset)
|
||||||
end
|
end
|
||||||
|
@ -129,7 +150,7 @@ defp boost_search_rank_query(query, for_user) do
|
||||||
@spec fts_search_subquery(User.t() | Ecto.Query.t(), String.t()) :: Ecto.Query.t()
|
@spec fts_search_subquery(User.t() | Ecto.Query.t(), String.t()) :: Ecto.Query.t()
|
||||||
defp fts_search_subquery(query, term) do
|
defp fts_search_subquery(query, term) do
|
||||||
processed_query =
|
processed_query =
|
||||||
term
|
String.trim_trailing(term, "@" <> local_domain())
|
||||||
|> String.replace(~r/\W+/, " ")
|
|> String.replace(~r/\W+/, " ")
|
||||||
|> String.trim()
|
|> String.trim()
|
||||||
|> String.split()
|
|> String.split()
|
||||||
|
@ -171,6 +192,8 @@ defp fts_search_subquery(query, term) do
|
||||||
|
|
||||||
@spec trigram_search_subquery(User.t() | Ecto.Query.t(), String.t()) :: Ecto.Query.t()
|
@spec trigram_search_subquery(User.t() | Ecto.Query.t(), String.t()) :: Ecto.Query.t()
|
||||||
defp trigram_search_subquery(query, term) do
|
defp trigram_search_subquery(query, term) do
|
||||||
|
term = String.trim_trailing(term, "@" <> local_domain())
|
||||||
|
|
||||||
from(
|
from(
|
||||||
u in query,
|
u in query,
|
||||||
select_merge: %{
|
select_merge: %{
|
||||||
|
@ -188,4 +211,6 @@ defp trigram_search_subquery(query, term) do
|
||||||
)
|
)
|
||||||
|> User.restrict_deactivated()
|
|> User.restrict_deactivated()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp local_domain, do: Pleroma.Config.get([Pleroma.Web.Endpoint, :url, :host])
|
||||||
end
|
end
|
||||||
|
|
60
lib/pleroma/user/synchronization.ex
Normal file
60
lib/pleroma/user/synchronization.ex
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.User.Synchronization do
|
||||||
|
alias Pleroma.HTTP
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
@spec call([User.t()], map(), keyword()) :: {User.t(), map()}
|
||||||
|
def call(users, errors, opts \\ []) do
|
||||||
|
do_call(users, errors, opts)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_call([user | []], errors, opts) do
|
||||||
|
updated = fetch_counters(user, errors, opts)
|
||||||
|
{user, updated}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_call([user | others], errors, opts) do
|
||||||
|
updated = fetch_counters(user, errors, opts)
|
||||||
|
do_call(others, updated, opts)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fetch_counters(user, errors, opts) do
|
||||||
|
%{host: host} = URI.parse(user.ap_id)
|
||||||
|
|
||||||
|
info = %{}
|
||||||
|
{following, errors} = fetch_counter(user.ap_id <> "/following", host, errors, opts)
|
||||||
|
info = if following, do: Map.put(info, :following_count, following), else: info
|
||||||
|
|
||||||
|
{followers, errors} = fetch_counter(user.ap_id <> "/followers", host, errors, opts)
|
||||||
|
info = if followers, do: Map.put(info, :follower_count, followers), else: info
|
||||||
|
|
||||||
|
User.set_info_cache(user, info)
|
||||||
|
errors
|
||||||
|
end
|
||||||
|
|
||||||
|
defp available_domain?(domain, errors, opts) do
|
||||||
|
max_retries = Keyword.get(opts, :max_retries, 3)
|
||||||
|
not (Map.has_key?(errors, domain) && errors[domain] >= max_retries)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fetch_counter(url, host, errors, opts) do
|
||||||
|
with true <- available_domain?(host, errors, opts),
|
||||||
|
{:ok, %{body: body, status: code}} when code in 200..299 <-
|
||||||
|
HTTP.get(
|
||||||
|
url,
|
||||||
|
[{:Accept, "application/activity+json"}]
|
||||||
|
),
|
||||||
|
{:ok, data} <- Jason.decode(body) do
|
||||||
|
{data["totalItems"], errors}
|
||||||
|
else
|
||||||
|
false ->
|
||||||
|
{nil, errors}
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
{nil, Map.update(errors, host, 1, &(&1 + 1))}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
32
lib/pleroma/user/synchronization_worker.ex
Normal file
32
lib/pleroma/user/synchronization_worker.ex
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-onl
|
||||||
|
|
||||||
|
defmodule Pleroma.User.SynchronizationWorker do
|
||||||
|
use GenServer
|
||||||
|
|
||||||
|
def start_link do
|
||||||
|
config = Pleroma.Config.get([:instance, :external_user_synchronization])
|
||||||
|
|
||||||
|
if config[:enabled] do
|
||||||
|
GenServer.start_link(__MODULE__, interval: config[:interval])
|
||||||
|
else
|
||||||
|
:ignore
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def init(opts) do
|
||||||
|
schedule_next(opts)
|
||||||
|
{:ok, opts}
|
||||||
|
end
|
||||||
|
|
||||||
|
def handle_info(:sync_follow_counters, opts) do
|
||||||
|
Pleroma.User.sync_follow_counter()
|
||||||
|
schedule_next(opts)
|
||||||
|
{:noreply, opts}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp schedule_next(opts) do
|
||||||
|
Process.send_after(self(), :sync_follow_counters, opts[:interval])
|
||||||
|
end
|
||||||
|
end
|
|
@ -14,6 +14,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.ActivityPub.Utils
|
alias Pleroma.Web.ActivityPub.Utils
|
||||||
alias Pleroma.Web.ActivityPub.Visibility
|
alias Pleroma.Web.ActivityPub.Visibility
|
||||||
|
alias Pleroma.Web.Federator
|
||||||
|
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
|
||||||
|
@ -22,20 +23,20 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
||||||
@doc """
|
@doc """
|
||||||
Modifies an incoming AP object (mastodon format) to our internal format.
|
Modifies an incoming AP object (mastodon format) to our internal format.
|
||||||
"""
|
"""
|
||||||
def fix_object(object) do
|
def fix_object(object, options \\ []) do
|
||||||
object
|
object
|
||||||
|> fix_actor
|
|> fix_actor
|
||||||
|> fix_url
|
|> fix_url
|
||||||
|> fix_attachments
|
|> fix_attachments
|
||||||
|> fix_context
|
|> fix_context
|
||||||
|> fix_in_reply_to
|
|> fix_in_reply_to(options)
|
||||||
|> fix_emoji
|
|> fix_emoji
|
||||||
|> fix_tag
|
|> fix_tag
|
||||||
|> fix_content_map
|
|> fix_content_map
|
||||||
|> fix_likes
|
|> fix_likes
|
||||||
|> fix_addressing
|
|> fix_addressing
|
||||||
|> fix_summary
|
|> fix_summary
|
||||||
|> fix_type
|
|> fix_type(options)
|
||||||
end
|
end
|
||||||
|
|
||||||
def fix_summary(%{"summary" => nil} = object) do
|
def fix_summary(%{"summary" => nil} = object) do
|
||||||
|
@ -164,7 +165,9 @@ def fix_likes(object) do
|
||||||
object
|
object
|
||||||
end
|
end
|
||||||
|
|
||||||
def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object)
|
def fix_in_reply_to(object, options \\ [])
|
||||||
|
|
||||||
|
def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object, options)
|
||||||
when not is_nil(in_reply_to) do
|
when not is_nil(in_reply_to) do
|
||||||
in_reply_to_id =
|
in_reply_to_id =
|
||||||
cond do
|
cond do
|
||||||
|
@ -182,28 +185,34 @@ def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object)
|
||||||
""
|
""
|
||||||
end
|
end
|
||||||
|
|
||||||
case get_obj_helper(in_reply_to_id) do
|
object = Map.put(object, "inReplyToAtomUri", in_reply_to_id)
|
||||||
{:ok, replied_object} ->
|
|
||||||
with %Activity{} = _activity <-
|
|
||||||
Activity.get_create_by_object_ap_id(replied_object.data["id"]) do
|
|
||||||
object
|
|
||||||
|> Map.put("inReplyTo", replied_object.data["id"])
|
|
||||||
|> Map.put("inReplyToAtomUri", object["inReplyToAtomUri"] || in_reply_to_id)
|
|
||||||
|> Map.put("conversation", replied_object.data["context"] || object["conversation"])
|
|
||||||
|> Map.put("context", replied_object.data["context"] || object["conversation"])
|
|
||||||
else
|
|
||||||
e ->
|
|
||||||
Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}")
|
|
||||||
object
|
|
||||||
end
|
|
||||||
|
|
||||||
e ->
|
if Federator.allowed_incoming_reply_depth?(options[:depth]) do
|
||||||
Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}")
|
case get_obj_helper(in_reply_to_id, options) do
|
||||||
object
|
{:ok, replied_object} ->
|
||||||
|
with %Activity{} = _activity <-
|
||||||
|
Activity.get_create_by_object_ap_id(replied_object.data["id"]) do
|
||||||
|
object
|
||||||
|
|> Map.put("inReplyTo", replied_object.data["id"])
|
||||||
|
|> Map.put("inReplyToAtomUri", object["inReplyToAtomUri"] || in_reply_to_id)
|
||||||
|
|> Map.put("conversation", replied_object.data["context"] || object["conversation"])
|
||||||
|
|> Map.put("context", replied_object.data["context"] || object["conversation"])
|
||||||
|
else
|
||||||
|
e ->
|
||||||
|
Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}")
|
||||||
|
object
|
||||||
|
end
|
||||||
|
|
||||||
|
e ->
|
||||||
|
Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}")
|
||||||
|
object
|
||||||
|
end
|
||||||
|
else
|
||||||
|
object
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def fix_in_reply_to(object), do: object
|
def fix_in_reply_to(object, _options), do: object
|
||||||
|
|
||||||
def fix_context(object) do
|
def fix_context(object) do
|
||||||
context = object["context"] || object["conversation"] || Utils.generate_context_id()
|
context = object["context"] || object["conversation"] || Utils.generate_context_id()
|
||||||
|
@ -336,8 +345,13 @@ def fix_content_map(%{"contentMap" => content_map} = object) do
|
||||||
|
|
||||||
def fix_content_map(object), do: object
|
def fix_content_map(object), do: object
|
||||||
|
|
||||||
def fix_type(%{"inReplyTo" => reply_id} = object) when is_binary(reply_id) do
|
def fix_type(object, options \\ [])
|
||||||
reply = Object.normalize(reply_id)
|
|
||||||
|
def fix_type(%{"inReplyTo" => reply_id} = object, options) when is_binary(reply_id) do
|
||||||
|
reply =
|
||||||
|
if Federator.allowed_incoming_reply_depth?(options[:depth]) do
|
||||||
|
Object.normalize(reply_id, true)
|
||||||
|
end
|
||||||
|
|
||||||
if reply && (reply.data["type"] == "Question" and object["name"]) do
|
if reply && (reply.data["type"] == "Question" and object["name"]) do
|
||||||
Map.put(object, "type", "Answer")
|
Map.put(object, "type", "Answer")
|
||||||
|
@ -346,7 +360,7 @@ def fix_type(%{"inReplyTo" => reply_id} = object) when is_binary(reply_id) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def fix_type(object), do: object
|
def fix_type(object, _), do: object
|
||||||
|
|
||||||
defp mastodon_follow_hack(%{"id" => id, "actor" => follower_id}, followed) do
|
defp mastodon_follow_hack(%{"id" => id, "actor" => follower_id}, followed) do
|
||||||
with true <- id =~ "follows",
|
with true <- id =~ "follows",
|
||||||
|
@ -374,9 +388,11 @@ defp get_follow_activity(follow_object, followed) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def handle_incoming(data, options \\ [])
|
||||||
|
|
||||||
# Flag objects are placed ahead of the ID check because Mastodon 2.8 and earlier send them
|
# Flag objects are placed ahead of the ID check because Mastodon 2.8 and earlier send them
|
||||||
# with nil ID.
|
# with nil ID.
|
||||||
def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} = data) do
|
def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} = data, _options) do
|
||||||
with context <- data["context"] || Utils.generate_context_id(),
|
with context <- data["context"] || Utils.generate_context_id(),
|
||||||
content <- data["content"] || "",
|
content <- data["content"] || "",
|
||||||
%User{} = actor <- User.get_cached_by_ap_id(actor),
|
%User{} = actor <- User.get_cached_by_ap_id(actor),
|
||||||
|
@ -409,15 +425,19 @@ def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} =
|
||||||
end
|
end
|
||||||
|
|
||||||
# disallow objects with bogus IDs
|
# disallow objects with bogus IDs
|
||||||
def handle_incoming(%{"id" => nil}), do: :error
|
def handle_incoming(%{"id" => nil}, _options), do: :error
|
||||||
def handle_incoming(%{"id" => ""}), do: :error
|
def handle_incoming(%{"id" => ""}, _options), do: :error
|
||||||
# length of https:// = 8, should validate better, but good enough for now.
|
# length of https:// = 8, should validate better, but good enough for now.
|
||||||
def handle_incoming(%{"id" => id}) when not (is_binary(id) and length(id) > 8), do: :error
|
def handle_incoming(%{"id" => id}, _options) when not (is_binary(id) and length(id) > 8),
|
||||||
|
do: :error
|
||||||
|
|
||||||
# TODO: validate those with a Ecto scheme
|
# TODO: validate those with a Ecto scheme
|
||||||
# - tags
|
# - tags
|
||||||
# - emoji
|
# - emoji
|
||||||
def handle_incoming(%{"type" => "Create", "object" => %{"type" => objtype} = object} = data)
|
def handle_incoming(
|
||||||
|
%{"type" => "Create", "object" => %{"type" => objtype} = object} = data,
|
||||||
|
options
|
||||||
|
)
|
||||||
when objtype in ["Article", "Note", "Video", "Page", "Question", "Answer"] do
|
when objtype in ["Article", "Note", "Video", "Page", "Question", "Answer"] do
|
||||||
actor = Containment.get_actor(data)
|
actor = Containment.get_actor(data)
|
||||||
|
|
||||||
|
@ -427,7 +447,8 @@ def handle_incoming(%{"type" => "Create", "object" => %{"type" => objtype} = obj
|
||||||
|
|
||||||
with nil <- Activity.get_create_by_object_ap_id(object["id"]),
|
with nil <- Activity.get_create_by_object_ap_id(object["id"]),
|
||||||
{:ok, %User{} = user} <- User.get_or_fetch_by_ap_id(data["actor"]) do
|
{:ok, %User{} = user} <- User.get_or_fetch_by_ap_id(data["actor"]) do
|
||||||
object = fix_object(data["object"])
|
options = Keyword.put(options, :depth, (options[:depth] || 0) + 1)
|
||||||
|
object = fix_object(data["object"], options)
|
||||||
|
|
||||||
params = %{
|
params = %{
|
||||||
to: data["to"],
|
to: data["to"],
|
||||||
|
@ -452,7 +473,8 @@ def handle_incoming(%{"type" => "Create", "object" => %{"type" => objtype} = obj
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_incoming(
|
def handle_incoming(
|
||||||
%{"type" => "Follow", "object" => followed, "actor" => follower, "id" => id} = data
|
%{"type" => "Follow", "object" => followed, "actor" => follower, "id" => id} = data,
|
||||||
|
_options
|
||||||
) do
|
) do
|
||||||
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
|
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
|
||||||
{:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower),
|
{:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower),
|
||||||
|
@ -503,7 +525,8 @@ def handle_incoming(
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_incoming(
|
def handle_incoming(
|
||||||
%{"type" => "Accept", "object" => follow_object, "actor" => _actor, "id" => _id} = data
|
%{"type" => "Accept", "object" => follow_object, "actor" => _actor, "id" => _id} = data,
|
||||||
|
_options
|
||||||
) do
|
) do
|
||||||
with actor <- Containment.get_actor(data),
|
with actor <- Containment.get_actor(data),
|
||||||
{:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor),
|
{:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor),
|
||||||
|
@ -524,7 +547,8 @@ def handle_incoming(
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_incoming(
|
def handle_incoming(
|
||||||
%{"type" => "Reject", "object" => follow_object, "actor" => _actor, "id" => _id} = data
|
%{"type" => "Reject", "object" => follow_object, "actor" => _actor, "id" => _id} = data,
|
||||||
|
_options
|
||||||
) do
|
) do
|
||||||
with actor <- Containment.get_actor(data),
|
with actor <- Containment.get_actor(data),
|
||||||
{:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor),
|
{:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor),
|
||||||
|
@ -548,7 +572,8 @@ def handle_incoming(
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_incoming(
|
def handle_incoming(
|
||||||
%{"type" => "Like", "object" => object_id, "actor" => _actor, "id" => id} = data
|
%{"type" => "Like", "object" => object_id, "actor" => _actor, "id" => id} = data,
|
||||||
|
_options
|
||||||
) do
|
) do
|
||||||
with actor <- Containment.get_actor(data),
|
with actor <- Containment.get_actor(data),
|
||||||
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
||||||
|
@ -561,7 +586,8 @@ def handle_incoming(
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_incoming(
|
def handle_incoming(
|
||||||
%{"type" => "Announce", "object" => object_id, "actor" => _actor, "id" => id} = data
|
%{"type" => "Announce", "object" => object_id, "actor" => _actor, "id" => id} = data,
|
||||||
|
_options
|
||||||
) do
|
) do
|
||||||
with actor <- Containment.get_actor(data),
|
with actor <- Containment.get_actor(data),
|
||||||
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
||||||
|
@ -576,7 +602,8 @@ def handle_incoming(
|
||||||
|
|
||||||
def handle_incoming(
|
def handle_incoming(
|
||||||
%{"type" => "Update", "object" => %{"type" => object_type} = object, "actor" => actor_id} =
|
%{"type" => "Update", "object" => %{"type" => object_type} = object, "actor" => actor_id} =
|
||||||
data
|
data,
|
||||||
|
_options
|
||||||
)
|
)
|
||||||
when object_type in ["Person", "Application", "Service", "Organization"] do
|
when object_type in ["Person", "Application", "Service", "Organization"] do
|
||||||
with %User{ap_id: ^actor_id} = actor <- User.get_cached_by_ap_id(object["id"]) do
|
with %User{ap_id: ^actor_id} = actor <- User.get_cached_by_ap_id(object["id"]) do
|
||||||
|
@ -614,7 +641,8 @@ def handle_incoming(
|
||||||
# an error or a tombstone. This would allow us to verify that a deletion actually took
|
# an error or a tombstone. This would allow us to verify that a deletion actually took
|
||||||
# place.
|
# place.
|
||||||
def handle_incoming(
|
def handle_incoming(
|
||||||
%{"type" => "Delete", "object" => object_id, "actor" => _actor, "id" => _id} = data
|
%{"type" => "Delete", "object" => object_id, "actor" => _actor, "id" => _id} = data,
|
||||||
|
_options
|
||||||
) do
|
) do
|
||||||
object_id = Utils.get_ap_id(object_id)
|
object_id = Utils.get_ap_id(object_id)
|
||||||
|
|
||||||
|
@ -635,7 +663,8 @@ def handle_incoming(
|
||||||
"object" => %{"type" => "Announce", "object" => object_id},
|
"object" => %{"type" => "Announce", "object" => object_id},
|
||||||
"actor" => _actor,
|
"actor" => _actor,
|
||||||
"id" => id
|
"id" => id
|
||||||
} = data
|
} = data,
|
||||||
|
_options
|
||||||
) do
|
) do
|
||||||
with actor <- Containment.get_actor(data),
|
with actor <- Containment.get_actor(data),
|
||||||
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
||||||
|
@ -653,7 +682,8 @@ def handle_incoming(
|
||||||
"object" => %{"type" => "Follow", "object" => followed},
|
"object" => %{"type" => "Follow", "object" => followed},
|
||||||
"actor" => follower,
|
"actor" => follower,
|
||||||
"id" => id
|
"id" => id
|
||||||
} = _data
|
} = _data,
|
||||||
|
_options
|
||||||
) do
|
) do
|
||||||
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
|
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
|
||||||
{:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower),
|
{:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower),
|
||||||
|
@ -671,7 +701,8 @@ def handle_incoming(
|
||||||
"object" => %{"type" => "Block", "object" => blocked},
|
"object" => %{"type" => "Block", "object" => blocked},
|
||||||
"actor" => blocker,
|
"actor" => blocker,
|
||||||
"id" => id
|
"id" => id
|
||||||
} = _data
|
} = _data,
|
||||||
|
_options
|
||||||
) do
|
) do
|
||||||
with true <- Pleroma.Config.get([:activitypub, :accept_blocks]),
|
with true <- Pleroma.Config.get([:activitypub, :accept_blocks]),
|
||||||
%User{local: true} = blocked <- User.get_cached_by_ap_id(blocked),
|
%User{local: true} = blocked <- User.get_cached_by_ap_id(blocked),
|
||||||
|
@ -685,7 +716,8 @@ def handle_incoming(
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_incoming(
|
def handle_incoming(
|
||||||
%{"type" => "Block", "object" => blocked, "actor" => blocker, "id" => id} = _data
|
%{"type" => "Block", "object" => blocked, "actor" => blocker, "id" => id} = _data,
|
||||||
|
_options
|
||||||
) do
|
) do
|
||||||
with true <- Pleroma.Config.get([:activitypub, :accept_blocks]),
|
with true <- Pleroma.Config.get([:activitypub, :accept_blocks]),
|
||||||
%User{local: true} = blocked = User.get_cached_by_ap_id(blocked),
|
%User{local: true} = blocked = User.get_cached_by_ap_id(blocked),
|
||||||
|
@ -705,7 +737,8 @@ def handle_incoming(
|
||||||
"object" => %{"type" => "Like", "object" => object_id},
|
"object" => %{"type" => "Like", "object" => object_id},
|
||||||
"actor" => _actor,
|
"actor" => _actor,
|
||||||
"id" => id
|
"id" => id
|
||||||
} = data
|
} = data,
|
||||||
|
_options
|
||||||
) do
|
) do
|
||||||
with actor <- Containment.get_actor(data),
|
with actor <- Containment.get_actor(data),
|
||||||
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
||||||
|
@ -717,10 +750,10 @@ def handle_incoming(
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_incoming(_), do: :error
|
def handle_incoming(_, _), do: :error
|
||||||
|
|
||||||
def get_obj_helper(id) do
|
def get_obj_helper(id, options \\ []) do
|
||||||
if object = Object.normalize(id), do: {:ok, object}, else: nil
|
if object = Object.normalize(id, true, options), do: {:ok, object}, else: nil
|
||||||
end
|
end
|
||||||
|
|
||||||
def set_reply_to_uri(%{"inReplyTo" => in_reply_to} = object) when is_binary(in_reply_to) do
|
def set_reply_to_uri(%{"inReplyTo" => in_reply_to} = object) when is_binary(in_reply_to) do
|
||||||
|
|
|
@ -170,14 +170,17 @@ def create_context(context) do
|
||||||
Enqueues an activity for federation if it's local
|
Enqueues an activity for federation if it's local
|
||||||
"""
|
"""
|
||||||
def maybe_federate(%Activity{local: true} = activity) do
|
def maybe_federate(%Activity{local: true} = activity) do
|
||||||
priority =
|
if Pleroma.Config.get!([:instance, :federating]) do
|
||||||
case activity.data["type"] do
|
priority =
|
||||||
"Delete" -> 10
|
case activity.data["type"] do
|
||||||
"Create" -> 1
|
"Delete" -> 10
|
||||||
_ -> 5
|
"Create" -> 1
|
||||||
end
|
_ -> 5
|
||||||
|
end
|
||||||
|
|
||||||
|
Pleroma.Web.Federator.publish(activity, priority)
|
||||||
|
end
|
||||||
|
|
||||||
Pleroma.Web.Federator.publish(activity, priority)
|
|
||||||
:ok
|
:ok
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -74,7 +74,7 @@ def user_create(
|
||||||
end
|
end
|
||||||
|
|
||||||
def user_show(conn, %{"nickname" => nickname}) do
|
def user_show(conn, %{"nickname" => nickname}) do
|
||||||
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
with %User{} = user <- User.get_cached_by_nickname_or_id(nickname) do
|
||||||
conn
|
conn
|
||||||
|> json(AccountView.render("show.json", %{user: user}))
|
|> json(AccountView.render("show.json", %{user: user}))
|
||||||
else
|
else
|
||||||
|
|
|
@ -46,8 +46,10 @@ def render("show.json", %{report: report}) do
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp merge_account_views(user) do
|
defp merge_account_views(%User{} = user) do
|
||||||
Pleroma.Web.MastodonAPI.AccountView.render("account.json", %{user: user})
|
Pleroma.Web.MastodonAPI.AccountView.render("account.json", %{user: user})
|
||||||
|> Map.merge(Pleroma.Web.AdminAPI.AccountView.render("show.json", %{user: user}))
|
|> Map.merge(Pleroma.Web.AdminAPI.AccountView.render("show.json", %{user: user}))
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp merge_account_views(_), do: %{}
|
||||||
end
|
end
|
||||||
|
|
|
@ -22,6 +22,18 @@ def init do
|
||||||
refresh_subscriptions()
|
refresh_subscriptions()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc "Addresses [memory leaks on recursive replies fetching](https://git.pleroma.social/pleroma/pleroma/issues/161)"
|
||||||
|
# credo:disable-for-previous-line Credo.Check.Readability.MaxLineLength
|
||||||
|
def allowed_incoming_reply_depth?(depth) do
|
||||||
|
max_replies_depth = Pleroma.Config.get([:instance, :federation_incoming_replies_max_depth])
|
||||||
|
|
||||||
|
if max_replies_depth do
|
||||||
|
(depth || 1) <= max_replies_depth
|
||||||
|
else
|
||||||
|
true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
# Client API
|
# Client API
|
||||||
|
|
||||||
def incoming_doc(doc) do
|
def incoming_doc(doc) do
|
||||||
|
|
|
@ -167,6 +167,69 @@ def update_credentials(%{assigns: %{user: user}} = conn, params) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def update_avatar(%{assigns: %{user: user}} = conn, %{"img" => ""}) do
|
||||||
|
change = Changeset.change(user, %{avatar: nil})
|
||||||
|
{:ok, user} = User.update_and_set_cache(change)
|
||||||
|
CommonAPI.update(user)
|
||||||
|
|
||||||
|
json(conn, %{url: nil})
|
||||||
|
end
|
||||||
|
|
||||||
|
def update_avatar(%{assigns: %{user: user}} = conn, params) do
|
||||||
|
{:ok, object} = ActivityPub.upload(params, type: :avatar)
|
||||||
|
change = Changeset.change(user, %{avatar: object.data})
|
||||||
|
{:ok, user} = User.update_and_set_cache(change)
|
||||||
|
CommonAPI.update(user)
|
||||||
|
%{"url" => [%{"href" => href} | _]} = object.data
|
||||||
|
|
||||||
|
json(conn, %{url: href})
|
||||||
|
end
|
||||||
|
|
||||||
|
def update_banner(%{assigns: %{user: user}} = conn, %{"banner" => ""}) do
|
||||||
|
with new_info <- %{"banner" => %{}},
|
||||||
|
info_cng <- User.Info.profile_update(user.info, new_info),
|
||||||
|
changeset <- Ecto.Changeset.change(user) |> Ecto.Changeset.put_embed(:info, info_cng),
|
||||||
|
{:ok, user} <- User.update_and_set_cache(changeset) do
|
||||||
|
CommonAPI.update(user)
|
||||||
|
|
||||||
|
json(conn, %{url: nil})
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def update_banner(%{assigns: %{user: user}} = conn, params) do
|
||||||
|
with {:ok, object} <- ActivityPub.upload(%{"img" => params["banner"]}, type: :banner),
|
||||||
|
new_info <- %{"banner" => object.data},
|
||||||
|
info_cng <- User.Info.profile_update(user.info, new_info),
|
||||||
|
changeset <- Ecto.Changeset.change(user) |> Ecto.Changeset.put_embed(:info, info_cng),
|
||||||
|
{:ok, user} <- User.update_and_set_cache(changeset) do
|
||||||
|
CommonAPI.update(user)
|
||||||
|
%{"url" => [%{"href" => href} | _]} = object.data
|
||||||
|
|
||||||
|
json(conn, %{url: href})
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def update_background(%{assigns: %{user: user}} = conn, %{"img" => ""}) do
|
||||||
|
with new_info <- %{"background" => %{}},
|
||||||
|
info_cng <- User.Info.profile_update(user.info, new_info),
|
||||||
|
changeset <- Ecto.Changeset.change(user) |> Ecto.Changeset.put_embed(:info, info_cng),
|
||||||
|
{:ok, _user} <- User.update_and_set_cache(changeset) do
|
||||||
|
json(conn, %{url: nil})
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def update_background(%{assigns: %{user: user}} = conn, params) do
|
||||||
|
with {:ok, object} <- ActivityPub.upload(params, type: :background),
|
||||||
|
new_info <- %{"background" => object.data},
|
||||||
|
info_cng <- User.Info.profile_update(user.info, new_info),
|
||||||
|
changeset <- Ecto.Changeset.change(user) |> Ecto.Changeset.put_embed(:info, info_cng),
|
||||||
|
{:ok, _user} <- User.update_and_set_cache(changeset) do
|
||||||
|
%{"url" => [%{"href" => href} | _]} = object.data
|
||||||
|
|
||||||
|
json(conn, %{url: href})
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def verify_credentials(%{assigns: %{user: user}} = conn, _) do
|
def verify_credentials(%{assigns: %{user: user}} = conn, _) do
|
||||||
chat_token = Phoenix.Token.sign(conn, "user socket", user.id)
|
chat_token = Phoenix.Token.sign(conn, "user socket", user.id)
|
||||||
|
|
||||||
|
|
|
@ -17,8 +17,8 @@ defmodule Pleroma.Web.MastodonAPI.SearchController do
|
||||||
plug(Pleroma.Plugs.RateLimiter, :search when action in [:search, :search2, :account_search])
|
plug(Pleroma.Plugs.RateLimiter, :search when action in [:search, :search2, :account_search])
|
||||||
|
|
||||||
def search2(%{assigns: %{user: user}} = conn, %{"q" => query} = params) do
|
def search2(%{assigns: %{user: user}} = conn, %{"q" => query} = params) do
|
||||||
accounts = User.search(query, search_options(params, user))
|
accounts = with_fallback(fn -> User.search(query, search_options(params, user)) end, [])
|
||||||
statuses = Activity.search(user, query)
|
statuses = with_fallback(fn -> Activity.search(user, query) end, [])
|
||||||
tags_path = Web.base_url() <> "/tag/"
|
tags_path = Web.base_url() <> "/tag/"
|
||||||
|
|
||||||
tags =
|
tags =
|
||||||
|
@ -40,8 +40,8 @@ def search2(%{assigns: %{user: user}} = conn, %{"q" => query} = params) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def search(%{assigns: %{user: user}} = conn, %{"q" => query} = params) do
|
def search(%{assigns: %{user: user}} = conn, %{"q" => query} = params) do
|
||||||
accounts = User.search(query, search_options(params, user))
|
accounts = with_fallback(fn -> User.search(query, search_options(params, user)) end, [])
|
||||||
statuses = Activity.search(user, query)
|
statuses = with_fallback(fn -> Activity.search(user, query) end, [])
|
||||||
|
|
||||||
tags =
|
tags =
|
||||||
query
|
query
|
||||||
|
@ -76,4 +76,14 @@ defp search_options(params, user) do
|
||||||
for_user: user
|
for_user: user
|
||||||
]
|
]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp with_fallback(f, fallback) do
|
||||||
|
try do
|
||||||
|
f.()
|
||||||
|
rescue
|
||||||
|
error ->
|
||||||
|
Logger.error("#{__MODULE__} search error: #{inspect(error)}")
|
||||||
|
fallback
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -104,7 +104,7 @@ def render(
|
||||||
id: to_string(activity.id),
|
id: to_string(activity.id),
|
||||||
uri: activity_object.data["id"],
|
uri: activity_object.data["id"],
|
||||||
url: activity_object.data["id"],
|
url: activity_object.data["id"],
|
||||||
account: AccountView.render("account.json", %{user: user}),
|
account: AccountView.render("account.json", %{user: user, for: opts[:for]}),
|
||||||
in_reply_to_id: nil,
|
in_reply_to_id: nil,
|
||||||
in_reply_to_account_id: nil,
|
in_reply_to_account_id: nil,
|
||||||
reblog: reblogged,
|
reblog: reblogged,
|
||||||
|
@ -221,7 +221,7 @@ def render("status.json", %{activity: %{data: %{"object" => _object}} = activity
|
||||||
id: to_string(activity.id),
|
id: to_string(activity.id),
|
||||||
uri: object.data["id"],
|
uri: object.data["id"],
|
||||||
url: url,
|
url: url,
|
||||||
account: AccountView.render("account.json", %{user: user}),
|
account: AccountView.render("account.json", %{user: user, for: opts[:for]}),
|
||||||
in_reply_to_id: reply_to && to_string(reply_to.id),
|
in_reply_to_id: reply_to && to_string(reply_to.id),
|
||||||
in_reply_to_account_id: reply_to_user && to_string(reply_to_user.id),
|
in_reply_to_account_id: reply_to_user && to_string(reply_to_user.id),
|
||||||
reblog: nil,
|
reblog: nil,
|
||||||
|
|
|
@ -29,9 +29,10 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do
|
||||||
|
|
||||||
def init(%{qs: qs} = req, state) do
|
def init(%{qs: qs} = req, state) do
|
||||||
with params <- :cow_qs.parse_qs(qs),
|
with params <- :cow_qs.parse_qs(qs),
|
||||||
|
sec_websocket <- :cowboy_req.header("sec-websocket-protocol", req, nil),
|
||||||
access_token <- List.keyfind(params, "access_token", 0),
|
access_token <- List.keyfind(params, "access_token", 0),
|
||||||
{_, stream} <- List.keyfind(params, "stream", 0),
|
{_, stream} <- List.keyfind(params, "stream", 0),
|
||||||
{:ok, user} <- allow_request(stream, access_token),
|
{:ok, user} <- allow_request(stream, [access_token, sec_websocket]),
|
||||||
topic when is_binary(topic) <- expand_topic(stream, params) do
|
topic when is_binary(topic) <- expand_topic(stream, params) do
|
||||||
{:cowboy_websocket, req, %{user: user, topic: topic}, %{idle_timeout: @timeout}}
|
{:cowboy_websocket, req, %{user: user, topic: topic}, %{idle_timeout: @timeout}}
|
||||||
else
|
else
|
||||||
|
@ -84,13 +85,21 @@ def terminate(reason, _req, state) do
|
||||||
end
|
end
|
||||||
|
|
||||||
# Public streams without authentication.
|
# Public streams without authentication.
|
||||||
defp allow_request(stream, nil) when stream in @anonymous_streams do
|
defp allow_request(stream, [nil, nil]) when stream in @anonymous_streams do
|
||||||
{:ok, nil}
|
{:ok, nil}
|
||||||
end
|
end
|
||||||
|
|
||||||
# Authenticated streams.
|
# Authenticated streams.
|
||||||
defp allow_request(stream, {"access_token", access_token}) when stream in @streams do
|
defp allow_request(stream, [access_token, sec_websocket]) when stream in @streams do
|
||||||
with %Token{user_id: user_id} <- Repo.get_by(Token, token: access_token),
|
token =
|
||||||
|
with {"access_token", token} <- access_token do
|
||||||
|
token
|
||||||
|
else
|
||||||
|
_ -> sec_websocket
|
||||||
|
end
|
||||||
|
|
||||||
|
with true <- is_bitstring(token),
|
||||||
|
%Token{user_id: user_id} <- Repo.get_by(Token, token: token),
|
||||||
user = %User{} <- User.get_cached_by_id(user_id) do
|
user = %User{} <- User.get_cached_by_id(user_id) do
|
||||||
{:ok, user}
|
{:ok, user}
|
||||||
else
|
else
|
||||||
|
|
|
@ -33,20 +33,7 @@ defp whitelisted?(url) do
|
||||||
|
|
||||||
def encode_url(url) do
|
def encode_url(url) do
|
||||||
secret = Pleroma.Config.get([Pleroma.Web.Endpoint, :secret_key_base])
|
secret = Pleroma.Config.get([Pleroma.Web.Endpoint, :secret_key_base])
|
||||||
|
base64 = Base.url_encode64(url, @base64_opts)
|
||||||
# Must preserve `%2F` for compatibility with S3
|
|
||||||
# https://git.pleroma.social/pleroma/pleroma/issues/580
|
|
||||||
replacement = get_replacement(url, ":2F:")
|
|
||||||
|
|
||||||
# The URL is url-decoded and encoded again to ensure it is correctly encoded and not twice.
|
|
||||||
base64 =
|
|
||||||
url
|
|
||||||
|> String.replace("%2F", replacement)
|
|
||||||
|> URI.decode()
|
|
||||||
|> URI.encode()
|
|
||||||
|> String.replace(replacement, "%2F")
|
|
||||||
|> Base.url_encode64(@base64_opts)
|
|
||||||
|
|
||||||
sig = :crypto.hmac(:sha, secret, base64)
|
sig = :crypto.hmac(:sha, secret, base64)
|
||||||
sig64 = sig |> Base.url_encode64(@base64_opts)
|
sig64 = sig |> Base.url_encode64(@base64_opts)
|
||||||
|
|
||||||
|
@ -80,12 +67,4 @@ def build_url(sig_base64, url_base64, filename \\ nil) do
|
||||||
|> Enum.filter(fn value -> value end)
|
|> Enum.filter(fn value -> value end)
|
||||||
|> Path.join()
|
|> Path.join()
|
||||||
end
|
end
|
||||||
|
|
||||||
defp get_replacement(url, replacement) do
|
|
||||||
if String.contains?(url, replacement) do
|
|
||||||
get_replacement(url, replacement <> replacement)
|
|
||||||
else
|
|
||||||
replacement
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -121,4 +121,6 @@ defp build_attachments(%{data: %{"attachment" => attachments}}) do
|
||||||
acc ++ rendered_tags
|
acc ++ rendered_tags
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp build_attachments(_), do: []
|
||||||
end
|
end
|
||||||
|
|
|
@ -117,6 +117,8 @@ defp build_attachments(id, %{data: %{"attachment" => attachments}}) do
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp build_attachments(_id, _object), do: []
|
||||||
|
|
||||||
defp player_url(id) do
|
defp player_url(id) do
|
||||||
Pleroma.Web.Router.Helpers.o_status_url(Pleroma.Web.Endpoint, :notice_player, id)
|
Pleroma.Web.Router.Helpers.o_status_url(Pleroma.Web.Endpoint, :notice_player, id)
|
||||||
end
|
end
|
||||||
|
|
|
@ -162,7 +162,8 @@ def raw_nodeinfo do
|
||||||
accountActivationRequired: Config.get([:instance, :account_activation_required], false),
|
accountActivationRequired: Config.get([:instance, :account_activation_required], false),
|
||||||
invitesEnabled: Config.get([:instance, :invites_enabled], false),
|
invitesEnabled: Config.get([:instance, :invites_enabled], false),
|
||||||
features: features,
|
features: features,
|
||||||
restrictedNicknames: Config.get([Pleroma.User, :restricted_nicknames])
|
restrictedNicknames: Config.get([Pleroma.User, :restricted_nicknames]),
|
||||||
|
skipThreadContainment: Config.get([:instance, :skip_thread_containment], false)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
|
@ -10,6 +10,7 @@ defmodule Pleroma.Web.OStatus.NoteHandler do
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.ActivityPub.Utils
|
alias Pleroma.Web.ActivityPub.Utils
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
alias Pleroma.Web.Federator
|
||||||
alias Pleroma.Web.OStatus
|
alias Pleroma.Web.OStatus
|
||||||
alias Pleroma.Web.XML
|
alias Pleroma.Web.XML
|
||||||
|
|
||||||
|
@ -88,14 +89,15 @@ def add_external_url(note, entry) do
|
||||||
Map.put(note, "external_url", url)
|
Map.put(note, "external_url", url)
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_replied_to_activity(entry, in_reply_to) do
|
def fetch_replied_to_activity(entry, in_reply_to, options \\ []) do
|
||||||
with %Activity{} = activity <- Activity.get_create_by_object_ap_id(in_reply_to) do
|
with %Activity{} = activity <- Activity.get_create_by_object_ap_id(in_reply_to) do
|
||||||
activity
|
activity
|
||||||
else
|
else
|
||||||
_e ->
|
_e ->
|
||||||
with in_reply_to_href when not is_nil(in_reply_to_href) <-
|
with true <- Federator.allowed_incoming_reply_depth?(options[:depth]),
|
||||||
|
in_reply_to_href when not is_nil(in_reply_to_href) <-
|
||||||
XML.string_from_xpath("//thr:in-reply-to[1]/@href", entry),
|
XML.string_from_xpath("//thr:in-reply-to[1]/@href", entry),
|
||||||
{:ok, [activity | _]} <- OStatus.fetch_activity_from_url(in_reply_to_href) do
|
{:ok, [activity | _]} <- OStatus.fetch_activity_from_url(in_reply_to_href, options) do
|
||||||
activity
|
activity
|
||||||
else
|
else
|
||||||
_e -> nil
|
_e -> nil
|
||||||
|
@ -104,7 +106,7 @@ def fetch_replied_to_activity(entry, in_reply_to) do
|
||||||
end
|
end
|
||||||
|
|
||||||
# TODO: Clean this up a bit.
|
# TODO: Clean this up a bit.
|
||||||
def handle_note(entry, doc \\ nil) do
|
def handle_note(entry, doc \\ nil, options \\ []) do
|
||||||
with id <- XML.string_from_xpath("//id", entry),
|
with id <- XML.string_from_xpath("//id", entry),
|
||||||
activity when is_nil(activity) <- Activity.get_create_by_object_ap_id_with_object(id),
|
activity when is_nil(activity) <- Activity.get_create_by_object_ap_id_with_object(id),
|
||||||
[author] <- :xmerl_xpath.string('//author[1]', doc),
|
[author] <- :xmerl_xpath.string('//author[1]', doc),
|
||||||
|
@ -112,7 +114,8 @@ def handle_note(entry, doc \\ nil) do
|
||||||
content_html <- OStatus.get_content(entry),
|
content_html <- OStatus.get_content(entry),
|
||||||
cw <- OStatus.get_cw(entry),
|
cw <- OStatus.get_cw(entry),
|
||||||
in_reply_to <- XML.string_from_xpath("//thr:in-reply-to[1]/@ref", entry),
|
in_reply_to <- XML.string_from_xpath("//thr:in-reply-to[1]/@ref", entry),
|
||||||
in_reply_to_activity <- fetch_replied_to_activity(entry, in_reply_to),
|
options <- Keyword.put(options, :depth, (options[:depth] || 0) + 1),
|
||||||
|
in_reply_to_activity <- fetch_replied_to_activity(entry, in_reply_to, options),
|
||||||
in_reply_to_object <-
|
in_reply_to_object <-
|
||||||
(in_reply_to_activity && Object.normalize(in_reply_to_activity)) || nil,
|
(in_reply_to_activity && Object.normalize(in_reply_to_activity)) || nil,
|
||||||
in_reply_to <- (in_reply_to_object && in_reply_to_object.data["id"]) || in_reply_to,
|
in_reply_to <- (in_reply_to_object && in_reply_to_object.data["id"]) || in_reply_to,
|
||||||
|
|
|
@ -54,7 +54,7 @@ def remote_follow_path do
|
||||||
"#{Web.base_url()}/ostatus_subscribe?acct={uri}"
|
"#{Web.base_url()}/ostatus_subscribe?acct={uri}"
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_incoming(xml_string) do
|
def handle_incoming(xml_string, options \\ []) do
|
||||||
with doc when doc != :error <- parse_document(xml_string) do
|
with doc when doc != :error <- parse_document(xml_string) do
|
||||||
with {:ok, actor_user} <- find_make_or_update_user(doc),
|
with {:ok, actor_user} <- find_make_or_update_user(doc),
|
||||||
do: Pleroma.Instances.set_reachable(actor_user.ap_id)
|
do: Pleroma.Instances.set_reachable(actor_user.ap_id)
|
||||||
|
@ -91,10 +91,12 @@ def handle_incoming(xml_string) do
|
||||||
_ ->
|
_ ->
|
||||||
case object_type do
|
case object_type do
|
||||||
'http://activitystrea.ms/schema/1.0/note' ->
|
'http://activitystrea.ms/schema/1.0/note' ->
|
||||||
with {:ok, activity} <- NoteHandler.handle_note(entry, doc), do: activity
|
with {:ok, activity} <- NoteHandler.handle_note(entry, doc, options),
|
||||||
|
do: activity
|
||||||
|
|
||||||
'http://activitystrea.ms/schema/1.0/comment' ->
|
'http://activitystrea.ms/schema/1.0/comment' ->
|
||||||
with {:ok, activity} <- NoteHandler.handle_note(entry, doc), do: activity
|
with {:ok, activity} <- NoteHandler.handle_note(entry, doc, options),
|
||||||
|
do: activity
|
||||||
|
|
||||||
_ ->
|
_ ->
|
||||||
Logger.error("Couldn't parse incoming document")
|
Logger.error("Couldn't parse incoming document")
|
||||||
|
@ -359,7 +361,7 @@ def get_atom_url(body) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_activity_from_atom_url(url) do
|
def fetch_activity_from_atom_url(url, options \\ []) do
|
||||||
with true <- String.starts_with?(url, "http"),
|
with true <- String.starts_with?(url, "http"),
|
||||||
{:ok, %{body: body, status: code}} when code in 200..299 <-
|
{:ok, %{body: body, status: code}} when code in 200..299 <-
|
||||||
HTTP.get(
|
HTTP.get(
|
||||||
|
@ -367,7 +369,7 @@ def fetch_activity_from_atom_url(url) do
|
||||||
[{:Accept, "application/atom+xml"}]
|
[{:Accept, "application/atom+xml"}]
|
||||||
) do
|
) do
|
||||||
Logger.debug("Got document from #{url}, handling...")
|
Logger.debug("Got document from #{url}, handling...")
|
||||||
handle_incoming(body)
|
handle_incoming(body, options)
|
||||||
else
|
else
|
||||||
e ->
|
e ->
|
||||||
Logger.debug("Couldn't get #{url}: #{inspect(e)}")
|
Logger.debug("Couldn't get #{url}: #{inspect(e)}")
|
||||||
|
@ -375,13 +377,13 @@ def fetch_activity_from_atom_url(url) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_activity_from_html_url(url) do
|
def fetch_activity_from_html_url(url, options \\ []) do
|
||||||
Logger.debug("Trying to fetch #{url}")
|
Logger.debug("Trying to fetch #{url}")
|
||||||
|
|
||||||
with true <- String.starts_with?(url, "http"),
|
with true <- String.starts_with?(url, "http"),
|
||||||
{:ok, %{body: body}} <- HTTP.get(url, []),
|
{:ok, %{body: body}} <- HTTP.get(url, []),
|
||||||
{:ok, atom_url} <- get_atom_url(body) do
|
{:ok, atom_url} <- get_atom_url(body) do
|
||||||
fetch_activity_from_atom_url(atom_url)
|
fetch_activity_from_atom_url(atom_url, options)
|
||||||
else
|
else
|
||||||
e ->
|
e ->
|
||||||
Logger.debug("Couldn't get #{url}: #{inspect(e)}")
|
Logger.debug("Couldn't get #{url}: #{inspect(e)}")
|
||||||
|
@ -389,11 +391,11 @@ def fetch_activity_from_html_url(url) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_activity_from_url(url) do
|
def fetch_activity_from_url(url, options \\ []) do
|
||||||
with {:ok, [_ | _] = activities} <- fetch_activity_from_atom_url(url) do
|
with {:ok, [_ | _] = activities} <- fetch_activity_from_atom_url(url, options) do
|
||||||
{:ok, activities}
|
{:ok, activities}
|
||||||
else
|
else
|
||||||
_e -> fetch_activity_from_html_url(url)
|
_e -> fetch_activity_from_html_url(url, options)
|
||||||
end
|
end
|
||||||
rescue
|
rescue
|
||||||
e ->
|
e ->
|
||||||
|
|
|
@ -322,6 +322,10 @@ defmodule Pleroma.Web.Router do
|
||||||
|
|
||||||
patch("/accounts/update_credentials", MastodonAPIController, :update_credentials)
|
patch("/accounts/update_credentials", MastodonAPIController, :update_credentials)
|
||||||
|
|
||||||
|
patch("/accounts/update_avatar", MastodonAPIController, :update_avatar)
|
||||||
|
patch("/accounts/update_banner", MastodonAPIController, :update_banner)
|
||||||
|
patch("/accounts/update_background", MastodonAPIController, :update_background)
|
||||||
|
|
||||||
post("/statuses", MastodonAPIController, :post_status)
|
post("/statuses", MastodonAPIController, :post_status)
|
||||||
delete("/statuses/:id", MastodonAPIController, :delete_status)
|
delete("/statuses/:id", MastodonAPIController, :delete_status)
|
||||||
|
|
||||||
|
@ -726,6 +730,7 @@ defmodule Pleroma.Web.Router do
|
||||||
|
|
||||||
defmodule Fallback.RedirectController do
|
defmodule Fallback.RedirectController do
|
||||||
use Pleroma.Web, :controller
|
use Pleroma.Web, :controller
|
||||||
|
require Logger
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.Metadata
|
alias Pleroma.Web.Metadata
|
||||||
|
|
||||||
|
@ -752,7 +757,20 @@ def redirector_with_meta(conn, %{"maybe_nickname_or_id" => maybe_nickname_or_id}
|
||||||
|
|
||||||
def redirector_with_meta(conn, params) do
|
def redirector_with_meta(conn, params) do
|
||||||
{:ok, index_content} = File.read(index_file_path())
|
{:ok, index_content} = File.read(index_file_path())
|
||||||
tags = Metadata.build_tags(params)
|
|
||||||
|
tags =
|
||||||
|
try do
|
||||||
|
Metadata.build_tags(params)
|
||||||
|
rescue
|
||||||
|
e ->
|
||||||
|
Logger.error(
|
||||||
|
"Metadata rendering for #{conn.request_path} failed.\n" <>
|
||||||
|
Exception.format(:error, e, __STACKTRACE__)
|
||||||
|
)
|
||||||
|
|
||||||
|
""
|
||||||
|
end
|
||||||
|
|
||||||
response = String.replace(index_content, "<!--server-generated-meta-->", tags)
|
response = String.replace(index_content, "<!--server-generated-meta-->", tags)
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|
|
|
@ -456,6 +456,16 @@ def resend_confirmation_email(conn, params) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def update_avatar(%{assigns: %{user: user}} = conn, %{"img" => ""}) do
|
||||||
|
change = Changeset.change(user, %{avatar: nil})
|
||||||
|
{:ok, user} = User.update_and_set_cache(change)
|
||||||
|
CommonAPI.update(user)
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> put_view(UserView)
|
||||||
|
|> render("show.json", %{user: user, for: user})
|
||||||
|
end
|
||||||
|
|
||||||
def update_avatar(%{assigns: %{user: user}} = conn, params) do
|
def update_avatar(%{assigns: %{user: user}} = conn, params) do
|
||||||
{:ok, object} = ActivityPub.upload(params, type: :avatar)
|
{:ok, object} = ActivityPub.upload(params, type: :avatar)
|
||||||
change = Changeset.change(user, %{avatar: object.data})
|
change = Changeset.change(user, %{avatar: object.data})
|
||||||
|
@ -467,6 +477,19 @@ def update_avatar(%{assigns: %{user: user}} = conn, params) do
|
||||||
|> render("show.json", %{user: user, for: user})
|
|> render("show.json", %{user: user, for: user})
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def update_banner(%{assigns: %{user: user}} = conn, %{"banner" => ""}) do
|
||||||
|
with new_info <- %{"banner" => %{}},
|
||||||
|
info_cng <- User.Info.profile_update(user.info, new_info),
|
||||||
|
changeset <- Ecto.Changeset.change(user) |> Ecto.Changeset.put_embed(:info, info_cng),
|
||||||
|
{:ok, user} <- User.update_and_set_cache(changeset) do
|
||||||
|
CommonAPI.update(user)
|
||||||
|
response = %{url: nil} |> Jason.encode!()
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> json_reply(200, response)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def update_banner(%{assigns: %{user: user}} = conn, params) do
|
def update_banner(%{assigns: %{user: user}} = conn, params) do
|
||||||
with {:ok, object} <- ActivityPub.upload(%{"img" => params["banner"]}, type: :banner),
|
with {:ok, object} <- ActivityPub.upload(%{"img" => params["banner"]}, type: :banner),
|
||||||
new_info <- %{"banner" => object.data},
|
new_info <- %{"banner" => object.data},
|
||||||
|
@ -482,6 +505,18 @@ def update_banner(%{assigns: %{user: user}} = conn, params) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def update_background(%{assigns: %{user: user}} = conn, %{"img" => ""}) do
|
||||||
|
with new_info <- %{"background" => %{}},
|
||||||
|
info_cng <- User.Info.profile_update(user.info, new_info),
|
||||||
|
changeset <- Ecto.Changeset.change(user) |> Ecto.Changeset.put_embed(:info, info_cng),
|
||||||
|
{:ok, _user} <- User.update_and_set_cache(changeset) do
|
||||||
|
response = %{url: nil} |> Jason.encode!()
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> json_reply(200, response)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def update_background(%{assigns: %{user: user}} = conn, params) do
|
def update_background(%{assigns: %{user: user}} = conn, params) do
|
||||||
with {:ok, object} <- ActivityPub.upload(params, type: :background),
|
with {:ok, object} <- ActivityPub.upload(params, type: :background),
|
||||||
new_info <- %{"background" => object.data},
|
new_info <- %{"background" => object.data},
|
||||||
|
|
59
mix.exs
59
mix.exs
|
@ -109,7 +109,6 @@ defp deps do
|
||||||
{:phoenix_html, "~> 2.10"},
|
{:phoenix_html, "~> 2.10"},
|
||||||
{:calendar, "~> 0.17.4"},
|
{:calendar, "~> 0.17.4"},
|
||||||
{:cachex, "~> 3.0.2"},
|
{:cachex, "~> 3.0.2"},
|
||||||
{:httpoison, "~> 1.2.0"},
|
|
||||||
{:poison, "~> 3.0", override: true},
|
{:poison, "~> 3.0", override: true},
|
||||||
{:tesla, "~> 1.2"},
|
{:tesla, "~> 1.2"},
|
||||||
{:jason, "~> 1.0"},
|
{:jason, "~> 1.0"},
|
||||||
|
@ -154,7 +153,8 @@ defp deps do
|
||||||
{:esshd, "~> 0.1.0", runtime: Application.get_env(:esshd, :enabled, false)},
|
{:esshd, "~> 0.1.0", runtime: Application.get_env(:esshd, :enabled, false)},
|
||||||
{:ex_rated, "~> 1.3"},
|
{:ex_rated, "~> 1.3"},
|
||||||
{:plug_static_index_html, "~> 1.0.0"},
|
{:plug_static_index_html, "~> 1.0.0"},
|
||||||
{:excoveralls, "~> 0.11.1", only: :test}
|
{:excoveralls, "~> 0.11.1", only: :test},
|
||||||
|
{:mox, "~> 0.5", only: :test}
|
||||||
] ++ oauth_deps()
|
] ++ oauth_deps()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -177,10 +177,14 @@ defp aliases do
|
||||||
# Builds a version string made of:
|
# Builds a version string made of:
|
||||||
# * the application version
|
# * the application version
|
||||||
# * a pre-release if ahead of the tag: the describe string (-count-commithash)
|
# * a pre-release if ahead of the tag: the describe string (-count-commithash)
|
||||||
# * build info:
|
# * branch name
|
||||||
|
# * build metadata:
|
||||||
# * a build name if `PLEROMA_BUILD_NAME` or `:pleroma, :build_name` is defined
|
# * a build name if `PLEROMA_BUILD_NAME` or `:pleroma, :build_name` is defined
|
||||||
# * the mix environment if different than prod
|
# * the mix environment if different than prod
|
||||||
defp version(version) do
|
defp version(version) do
|
||||||
|
identifier_filter = ~r/[^0-9a-z\-]+/i
|
||||||
|
|
||||||
|
# Pre-release version, denoted from patch version with a hyphen
|
||||||
{git_tag, git_pre_release} =
|
{git_tag, git_pre_release} =
|
||||||
with {tag, 0} <-
|
with {tag, 0} <-
|
||||||
System.cmd("git", ["describe", "--tags", "--abbrev=0"], stderr_to_stdout: true),
|
System.cmd("git", ["describe", "--tags", "--abbrev=0"], stderr_to_stdout: true),
|
||||||
|
@ -201,6 +205,19 @@ defp version(version) do
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Branch name as pre-release version component, denoted with a dot
|
||||||
|
branch_name =
|
||||||
|
with {branch_name, 0} <- System.cmd("git", ["rev-parse", "--abbrev-ref", "HEAD"]),
|
||||||
|
branch_name <- System.get_env("PLEROMA_BUILD_BRANCH") || branch_name,
|
||||||
|
true <- branch_name != "master" do
|
||||||
|
branch_name =
|
||||||
|
branch_name
|
||||||
|
|> String.trim()
|
||||||
|
|> String.replace(identifier_filter, "-")
|
||||||
|
|
||||||
|
"." <> branch_name
|
||||||
|
end
|
||||||
|
|
||||||
build_name =
|
build_name =
|
||||||
cond do
|
cond do
|
||||||
name = Application.get_env(:pleroma, :build_name) -> name
|
name = Application.get_env(:pleroma, :build_name) -> name
|
||||||
|
@ -209,28 +226,26 @@ defp version(version) do
|
||||||
end
|
end
|
||||||
|
|
||||||
env_name = if Mix.env() != :prod, do: to_string(Mix.env())
|
env_name = if Mix.env() != :prod, do: to_string(Mix.env())
|
||||||
|
env_override = System.get_env("PLEROMA_BUILD_ENV")
|
||||||
|
|
||||||
build =
|
env_name =
|
||||||
[build_name, env_name]
|
case env_override do
|
||||||
|> Enum.filter(fn string -> string && string != "" end)
|
nil -> env_name
|
||||||
|> Enum.join("-")
|
env_override when env_override in ["", "prod"] -> nil
|
||||||
|> (fn
|
env_override -> env_override
|
||||||
"" -> nil
|
|
||||||
string -> "+" <> string
|
|
||||||
end).()
|
|
||||||
|
|
||||||
branch_name =
|
|
||||||
with {branch_name, 0} <- System.cmd("git", ["rev-parse", "--abbrev-ref", "HEAD"]),
|
|
||||||
branch_name <- System.get_env("PLEROMA_BUILD_BRANCH") || branch_name,
|
|
||||||
true <- branch_name != "master" do
|
|
||||||
branch_name =
|
|
||||||
String.trim(branch_name)
|
|
||||||
|> String.replace(~r/[^0-9a-z\-\.]+/i, "-")
|
|
||||||
|
|
||||||
"-" <> branch_name
|
|
||||||
end
|
end
|
||||||
|
|
||||||
[version, git_pre_release, branch_name, build]
|
# Build metadata, denoted with a plus sign
|
||||||
|
build_metadata =
|
||||||
|
[build_name, env_name]
|
||||||
|
|> Enum.filter(fn string -> string && string != "" end)
|
||||||
|
|> Enum.join(".")
|
||||||
|
|> (fn
|
||||||
|
"" -> nil
|
||||||
|
string -> "+" <> String.replace(string, identifier_filter, "-")
|
||||||
|
end).()
|
||||||
|
|
||||||
|
[version, git_pre_release, branch_name, build_metadata]
|
||||||
|> Enum.filter(fn string -> string && string != "" end)
|
|> Enum.filter(fn string -> string && string != "" end)
|
||||||
|> Enum.join()
|
|> Enum.join()
|
||||||
end
|
end
|
||||||
|
|
3
mix.lock
3
mix.lock
|
@ -57,6 +57,7 @@
|
||||||
"mochiweb": {:hex, :mochiweb, "2.18.0", "eb55f1db3e6e960fac4e6db4e2db9ec3602cc9f30b86cd1481d56545c3145d2e", [:rebar3], [], "hexpm"},
|
"mochiweb": {:hex, :mochiweb, "2.18.0", "eb55f1db3e6e960fac4e6db4e2db9ec3602cc9f30b86cd1481d56545c3145d2e", [:rebar3], [], "hexpm"},
|
||||||
"mock": {:hex, :mock, "0.3.3", "42a433794b1291a9cf1525c6d26b38e039e0d3a360732b5e467bfc77ef26c914", [:mix], [{:meck, "~> 0.8.13", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm"},
|
"mock": {:hex, :mock, "0.3.3", "42a433794b1291a9cf1525c6d26b38e039e0d3a360732b5e467bfc77ef26c914", [:mix], [{:meck, "~> 0.8.13", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"mogrify": {:hex, :mogrify, "0.6.1", "de1b527514f2d95a7bbe9642eb556061afb337e220cf97adbf3a4e6438ed70af", [:mix], [], "hexpm"},
|
"mogrify": {:hex, :mogrify, "0.6.1", "de1b527514f2d95a7bbe9642eb556061afb337e220cf97adbf3a4e6438ed70af", [:mix], [], "hexpm"},
|
||||||
|
"mox": {:hex, :mox, "0.5.1", "f86bb36026aac1e6f924a4b6d024b05e9adbed5c63e8daa069bd66fb3292165b", [:mix], [], "hexpm"},
|
||||||
"nimble_parsec": {:hex, :nimble_parsec, "0.5.0", "90e2eca3d0266e5c53f8fbe0079694740b9c91b6747f2b7e3c5d21966bba8300", [:mix], [], "hexpm"},
|
"nimble_parsec": {:hex, :nimble_parsec, "0.5.0", "90e2eca3d0266e5c53f8fbe0079694740b9c91b6747f2b7e3c5d21966bba8300", [:mix], [], "hexpm"},
|
||||||
"parse_trans": {:hex, :parse_trans, "3.3.0", "09765507a3c7590a784615cfd421d101aec25098d50b89d7aa1d66646bc571c1", [:rebar3], [], "hexpm"},
|
"parse_trans": {:hex, :parse_trans, "3.3.0", "09765507a3c7590a784615cfd421d101aec25098d50b89d7aa1d66646bc571c1", [:rebar3], [], "hexpm"},
|
||||||
"pbkdf2_elixir": {:hex, :pbkdf2_elixir, "0.12.3", "6706a148809a29c306062862c803406e88f048277f6e85b68faf73291e820b84", [:mix], [], "hexpm"},
|
"pbkdf2_elixir": {:hex, :pbkdf2_elixir, "0.12.3", "6706a148809a29c306062862c803406e88f048277f6e85b68faf73291e820b84", [:mix], [], "hexpm"},
|
||||||
|
@ -71,14 +72,12 @@
|
||||||
"plug_crypto": {:hex, :plug_crypto, "1.0.0", "18e49317d3fa343f24620ed22795ec29d4a5e602d52d1513ccea0b07d8ea7d4d", [:mix], [], "hexpm"},
|
"plug_crypto": {:hex, :plug_crypto, "1.0.0", "18e49317d3fa343f24620ed22795ec29d4a5e602d52d1513ccea0b07d8ea7d4d", [:mix], [], "hexpm"},
|
||||||
"plug_static_index_html": {:hex, :plug_static_index_html, "1.0.0", "840123d4d3975585133485ea86af73cb2600afd7f2a976f9f5fd8b3808e636a0", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
"plug_static_index_html": {:hex, :plug_static_index_html, "1.0.0", "840123d4d3975585133485ea86af73cb2600afd7f2a976f9f5fd8b3808e636a0", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm"},
|
"poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm"},
|
||||||
"poolboy": {:hex, :poolboy, "1.5.2", "392b007a1693a64540cead79830443abf5762f5d30cf50bc95cb2c1aaafa006b", [:rebar3], [], "hexpm"},
|
|
||||||
"postgrex": {:hex, :postgrex, "0.14.3", "5754dee2fdf6e9e508cbf49ab138df964278700b764177e8f3871e658b345a1e", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.0", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"},
|
"postgrex": {:hex, :postgrex, "0.14.3", "5754dee2fdf6e9e508cbf49ab138df964278700b764177e8f3871e658b345a1e", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.0", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"prometheus": {:hex, :prometheus, "4.2.2", "a830e77b79dc6d28183f4db050a7cac926a6c58f1872f9ef94a35cd989aceef8", [:mix, :rebar3], [], "hexpm"},
|
"prometheus": {:hex, :prometheus, "4.2.2", "a830e77b79dc6d28183f4db050a7cac926a6c58f1872f9ef94a35cd989aceef8", [:mix, :rebar3], [], "hexpm"},
|
||||||
"prometheus_ecto": {:hex, :prometheus_ecto, "1.4.1", "6c768ea9654de871e5b32fab2eac348467b3021604ebebbcbd8bcbe806a65ed5", [:mix], [{:ecto, "~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"},
|
"prometheus_ecto": {:hex, :prometheus_ecto, "1.4.1", "6c768ea9654de871e5b32fab2eac348467b3021604ebebbcbd8bcbe806a65ed5", [:mix], [{:ecto, "~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"prometheus_ex": {:hex, :prometheus_ex, "3.0.5", "fa58cfd983487fc5ead331e9a3e0aa622c67232b3ec71710ced122c4c453a02f", [:mix], [{:prometheus, "~> 4.0", [hex: :prometheus, repo: "hexpm", optional: false]}], "hexpm"},
|
"prometheus_ex": {:hex, :prometheus_ex, "3.0.5", "fa58cfd983487fc5ead331e9a3e0aa622c67232b3ec71710ced122c4c453a02f", [:mix], [{:prometheus, "~> 4.0", [hex: :prometheus, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"prometheus_phoenix": {:hex, :prometheus_phoenix, "1.2.1", "964a74dfbc055f781d3a75631e06ce3816a2913976d1df7830283aa3118a797a", [:mix], [{:phoenix, "~> 1.3", [hex: :phoenix, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.3 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"},
|
"prometheus_phoenix": {:hex, :prometheus_phoenix, "1.2.1", "964a74dfbc055f781d3a75631e06ce3816a2913976d1df7830283aa3118a797a", [:mix], [{:phoenix, "~> 1.3", [hex: :phoenix, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.3 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"prometheus_plugs": {:hex, :prometheus_plugs, "1.1.5", "25933d48f8af3a5941dd7b621c889749894d8a1082a6ff7c67cc99dec26377c5", [:mix], [{:accept, "~> 0.1", [hex: :accept, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}, {:prometheus_process_collector, "~> 1.1", [hex: :prometheus_process_collector, repo: "hexpm", optional: true]}], "hexpm"},
|
"prometheus_plugs": {:hex, :prometheus_plugs, "1.1.5", "25933d48f8af3a5941dd7b621c889749894d8a1082a6ff7c67cc99dec26377c5", [:mix], [{:accept, "~> 0.1", [hex: :accept, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}, {:prometheus_process_collector, "~> 1.1", [hex: :prometheus_process_collector, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"prometheus_process_collector": {:hex, :prometheus_process_collector, "1.4.3", "657386e8f142fc817347d95c1f3a05ab08710f7df9e7f86db6facaed107ed929", [:rebar3], [{:prometheus, "~> 4.0", [hex: :prometheus, repo: "hexpm", optional: false]}], "hexpm"},
|
|
||||||
"quack": {:hex, :quack, "0.1.1", "cca7b4da1a233757fdb44b3334fce80c94785b3ad5a602053b7a002b5a8967bf", [:mix], [{:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: false]}, {:tesla, "~> 1.2.0", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm"},
|
"quack": {:hex, :quack, "0.1.1", "cca7b4da1a233757fdb44b3334fce80c94785b3ad5a602053b7a002b5a8967bf", [:mix], [{:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: false]}, {:tesla, "~> 1.2.0", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"quantum": {:hex, :quantum, "2.3.4", "72a0e8855e2adc101459eac8454787cb74ab4169de6ca50f670e72142d4960e9", [:mix], [{:calendar, "~> 0.17", [hex: :calendar, repo: "hexpm", optional: true]}, {:crontab, "~> 1.1", [hex: :crontab, repo: "hexpm", optional: false]}, {:gen_stage, "~> 0.12", [hex: :gen_stage, repo: "hexpm", optional: false]}, {:swarm, "~> 3.3", [hex: :swarm, repo: "hexpm", optional: false]}, {:timex, "~> 3.1", [hex: :timex, repo: "hexpm", optional: true]}], "hexpm"},
|
"quantum": {:hex, :quantum, "2.3.4", "72a0e8855e2adc101459eac8454787cb74ab4169de6ca50f670e72142d4960e9", [:mix], [{:calendar, "~> 0.17", [hex: :calendar, repo: "hexpm", optional: true]}, {:crontab, "~> 1.1", [hex: :crontab, repo: "hexpm", optional: false]}, {:gen_stage, "~> 0.12", [hex: :gen_stage, repo: "hexpm", optional: false]}, {:swarm, "~> 3.3", [hex: :swarm, repo: "hexpm", optional: false]}, {:timex, "~> 3.1", [hex: :timex, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"ranch": {:hex, :ranch, "1.7.1", "6b1fab51b49196860b733a49c07604465a47bdb78aa10c1c16a3d199f7f8c881", [:rebar3], [], "hexpm"},
|
"ranch": {:hex, :ranch, "1.7.1", "6b1fab51b49196860b733a49c07604465a47bdb78aa10c1c16a3d199f7f8c881", [:rebar3], [], "hexpm"},
|
||||||
|
|
|
@ -2,7 +2,7 @@ defmodule Pleroma.Repo.Migrations.CreatePleroma.User do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create table(:users) do
|
create_if_not_exists table(:users) do
|
||||||
add :email, :string
|
add :email, :string
|
||||||
add :password_hash, :string
|
add :password_hash, :string
|
||||||
add :name, :string
|
add :name, :string
|
||||||
|
|
|
@ -2,13 +2,13 @@ defmodule Pleroma.Repo.Migrations.CreatePleroma.Activity do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create table(:activities) do
|
create_if_not_exists table(:activities) do
|
||||||
add :data, :map
|
add :data, :map
|
||||||
|
|
||||||
timestamps()
|
timestamps()
|
||||||
end
|
end
|
||||||
|
|
||||||
create index(:activities, [:data], using: :gin)
|
create_if_not_exists index(:activities, [:data], using: :gin)
|
||||||
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,7 +2,7 @@ defmodule Pleroma.Repo.Migrations.CreatePleroma.Object do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create table(:objects) do
|
create_if_not_exists table(:objects) do
|
||||||
add :data, :map
|
add :data, :map
|
||||||
|
|
||||||
timestamps()
|
timestamps()
|
||||||
|
|
|
@ -2,6 +2,6 @@ defmodule Pleroma.Repo.Migrations.AddIndexToObjects do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create index(:objects, [:data], using: :gin)
|
create_if_not_exists index(:objects, [:data], using: :gin)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,7 +2,7 @@ defmodule Pleroma.Repo.Migrations.AddUniqueIndexToEmailAndNickname do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create unique_index(:users, [:email])
|
create_if_not_exists unique_index(:users, [:email])
|
||||||
create unique_index(:users, [:nickname])
|
create_if_not_exists unique_index(:users, [:nickname])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,7 +2,7 @@ defmodule Pleroma.Repo.Migrations.CreateWebsubServerSubscription do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create table(:websub_server_subscriptions) do
|
create_if_not_exists table(:websub_server_subscriptions) do
|
||||||
add :topic, :string
|
add :topic, :string
|
||||||
add :callback, :string
|
add :callback, :string
|
||||||
add :secret, :string
|
add :secret, :string
|
||||||
|
|
|
@ -2,7 +2,7 @@ defmodule Pleroma.Repo.Migrations.CreateWebsubClientSubscription do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create table(:websub_client_subscriptions) do
|
create_if_not_exists table(:websub_client_subscriptions) do
|
||||||
add :topic, :string
|
add :topic, :string
|
||||||
add :secret, :string
|
add :secret, :string
|
||||||
add :valid_until, :naive_datetime_usec
|
add :valid_until, :naive_datetime_usec
|
||||||
|
|
|
@ -1,10 +1,12 @@
|
||||||
defmodule Pleroma.Repo.Migrations.AddIdContraintsToActivitiesAndObjectsPartTwo do
|
defmodule Pleroma.Repo.Migrations.AddIdContraintsToActivitiesAndObjectsPartTwo do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def up do
|
||||||
drop_if_exists index(:objects, ["(data->>\"id\")"], name: :objects_unique_apid_index)
|
drop_if_exists index(:objects, ["(data->>\"id\")"], name: :objects_unique_apid_index)
|
||||||
drop_if_exists index(:activities, ["(data->>\"id\")"], name: :activities_unique_apid_index)
|
drop_if_exists index(:activities, ["(data->>\"id\")"], name: :activities_unique_apid_index)
|
||||||
create unique_index(:objects, ["(data->>'id')"], name: :objects_unique_apid_index)
|
create_if_not_exists unique_index(:objects, ["(data->>'id')"], name: :objects_unique_apid_index)
|
||||||
create unique_index(:activities, ["(data->>'id')"], name: :activities_unique_apid_index)
|
create_if_not_exists unique_index(:activities, ["(data->>'id')"], name: :activities_unique_apid_index)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def down, do: :ok
|
||||||
end
|
end
|
||||||
|
|
|
@ -6,6 +6,6 @@ def change do
|
||||||
add :local, :boolean, default: true
|
add :local, :boolean, default: true
|
||||||
end
|
end
|
||||||
|
|
||||||
create index(:activities, [:local])
|
create_if_not_exists index(:activities, [:local])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,6 +2,6 @@ defmodule Pleroma.Repo.Migrations.AddUniqueIndexToAPID do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create unique_index(:users, [:ap_id])
|
create_if_not_exists unique_index(:users, [:ap_id])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,19 +1,31 @@
|
||||||
defmodule Pleroma.Repo.Migrations.CaseInsensivtivity do
|
defmodule Pleroma.Repo.Migrations.CaseInsensivtivity do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
|
# Two-steps alters are intentional.
|
||||||
|
# When alter of 2 columns is done in a single operation,
|
||||||
|
# inconsistent failures happen because of index on `email` column.
|
||||||
|
|
||||||
def up do
|
def up do
|
||||||
execute ("create extension if not exists citext")
|
execute("create extension if not exists citext")
|
||||||
|
|
||||||
alter table(:users) do
|
alter table(:users) do
|
||||||
modify :email, :citext
|
modify(:email, :citext)
|
||||||
modify :nickname, :citext
|
end
|
||||||
|
|
||||||
|
alter table(:users) do
|
||||||
|
modify(:nickname, :citext)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def down do
|
def down do
|
||||||
alter table(:users) do
|
alter table(:users) do
|
||||||
modify :email, :string
|
modify(:email, :string)
|
||||||
modify :nickname, :string
|
|
||||||
end
|
end
|
||||||
execute ("drop extension if exists citext")
|
|
||||||
|
alter table(:users) do
|
||||||
|
modify(:nickname, :string)
|
||||||
|
end
|
||||||
|
|
||||||
|
execute("drop extension if exists citext")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,9 +1,16 @@
|
||||||
defmodule Pleroma.Repo.Migrations.LongerBios do
|
defmodule Pleroma.Repo.Migrations.LongerBios do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def up do
|
||||||
alter table(:users) do
|
alter table(:users) do
|
||||||
modify :bio, :text
|
modify :bio, :text
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def down do
|
||||||
|
alter table(:users) do
|
||||||
|
modify :bio, :string
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,6 +2,6 @@ defmodule Pleroma.Repo.Migrations.RemoveActivitiesIndex do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
drop index(:activities, [:data])
|
drop_if_exists index(:activities, [:data])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,6 +3,6 @@ defmodule Pleroma.Repo.Migrations.AddObjectActivityIndex do
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
# This was wrong, now a noop
|
# This was wrong, now a noop
|
||||||
# create index(:objects, ["(data->'object'->>'id')", "(data->>'type')"], name: :activities_create_objects_index)
|
# create_if_not_exists index(:objects, ["(data->'object'->>'id')", "(data->>'type')"], name: :activities_create_objects_index)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,6 +3,6 @@ defmodule Pleroma.Repo.Migrations.AddObjectActivityIndexPartTwo do
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
drop_if_exists index(:objects, ["(data->'object'->>'id')", "(data->>'type')"], name: :activities_create_objects_index)
|
drop_if_exists index(:objects, ["(data->'object'->>'id')", "(data->>'type')"], name: :activities_create_objects_index)
|
||||||
create index(:activities, ["(data->'object'->>'id')", "(data->>'type')"], name: :activities_create_objects_index)
|
create_if_not_exists index(:activities, ["(data->'object'->>'id')", "(data->>'type')"], name: :activities_create_objects_index)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,6 +2,6 @@ defmodule Pleroma.Repo.Migrations.AddActorIndexToActivity do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create index(:activities, ["(data->>'actor')", "inserted_at desc"], name: :activities_actor_index)
|
create_if_not_exists index(:activities, ["(data->>'actor')", "inserted_at desc"], name: :activities_actor_index)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,7 +2,7 @@ defmodule Pleroma.Repo.Migrations.AddMastodonApps do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create table(:apps) do
|
create_if_not_exists table(:apps) do
|
||||||
add :client_name, :string
|
add :client_name, :string
|
||||||
add :redirect_uris, :string
|
add :redirect_uris, :string
|
||||||
add :scopes, :string
|
add :scopes, :string
|
||||||
|
|
|
@ -2,7 +2,7 @@ defmodule Pleroma.Repo.Migrations.CreateOAuthAuthorizations do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create table(:oauth_authorizations) do
|
create_if_not_exists table(:oauth_authorizations) do
|
||||||
add :app_id, references(:apps)
|
add :app_id, references(:apps)
|
||||||
add :user_id, references(:users)
|
add :user_id, references(:users)
|
||||||
add :token, :string
|
add :token, :string
|
||||||
|
|
|
@ -2,7 +2,7 @@ defmodule Pleroma.Repo.Migrations.CreateOAuthToken do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create table(:oauth_tokens) do
|
create_if_not_exists table(:oauth_tokens) do
|
||||||
add :app_id, references(:apps)
|
add :app_id, references(:apps)
|
||||||
add :user_id, references(:users)
|
add :user_id, references(:users)
|
||||||
add :token, :string
|
add :token, :string
|
||||||
|
|
|
@ -2,7 +2,7 @@ defmodule Pleroma.Repo.Migrations.CreateNotifications do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create table(:notifications) do
|
create_if_not_exists table(:notifications) do
|
||||||
add :user_id, references(:users, on_delete: :delete_all)
|
add :user_id, references(:users, on_delete: :delete_all)
|
||||||
add :activity_id, references(:activities, on_delete: :delete_all)
|
add :activity_id, references(:activities, on_delete: :delete_all)
|
||||||
add :seen, :boolean, default: false
|
add :seen, :boolean, default: false
|
||||||
|
@ -10,6 +10,6 @@ def change do
|
||||||
timestamps()
|
timestamps()
|
||||||
end
|
end
|
||||||
|
|
||||||
create index(:notifications, [:user_id])
|
create_if_not_exists index(:notifications, [:user_id])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,7 +2,7 @@ defmodule Pleroma.Repo.Migrations.CreatePasswordResetTokens do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create table(:password_reset_tokens) do
|
create_if_not_exists table(:password_reset_tokens) do
|
||||||
add :token, :string
|
add :token, :string
|
||||||
add :user_id, references(:users)
|
add :user_id, references(:users)
|
||||||
add :used, :boolean, default: false
|
add :used, :boolean, default: false
|
||||||
|
|
|
@ -12,7 +12,7 @@ def up do
|
||||||
end
|
end
|
||||||
|
|
||||||
def down do
|
def down do
|
||||||
drop index(:activities, [:actor, "id DESC NULLS LAST"])
|
drop_if_exists index(:activities, [:actor, "id DESC NULLS LAST"])
|
||||||
alter table(:activities) do
|
alter table(:activities) do
|
||||||
remove :actor
|
remove :actor
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,6 +2,6 @@ defmodule Pleroma.Repo.Migrations.AddLocalIndexToUser do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create index(:users, [:local])
|
create_if_not_exists index(:users, [:local])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -6,6 +6,6 @@ def change do
|
||||||
add :recipients, {:array, :string}
|
add :recipients, {:array, :string}
|
||||||
end
|
end
|
||||||
|
|
||||||
create index(:activities, [:recipients], using: :gin)
|
create_if_not_exists index(:activities, [:recipients], using: :gin)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -18,4 +18,6 @@ def up do
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def down, do: :ok
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
defmodule Pleroma.Repo.Migrations.MakeFollowingPostgresArray do
|
defmodule Pleroma.Repo.Migrations.MakeFollowingPostgresArray do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def up do
|
||||||
alter table(:users) do
|
alter table(:users) do
|
||||||
add :following_temp, {:array, :string}
|
add :following_temp, {:array, :string}
|
||||||
end
|
end
|
||||||
|
@ -15,4 +15,6 @@ def change do
|
||||||
end
|
end
|
||||||
rename table(:users), :following_temp, to: :following
|
rename table(:users), :following_temp, to: :following
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def down, do: :ok
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,6 +2,6 @@ defmodule Pleroma.Repo.Migrations.DropLocalIndexOnActivities do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
drop index(:users, [:local])
|
drop_if_exists index(:users, [:local])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,7 +2,7 @@ defmodule Pleroma.Repo.Migrations.ActuallyDropLocalIndex do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create index(:users, [:local])
|
create_if_not_exists index(:users, [:local])
|
||||||
drop_if_exists index("activities", :local)
|
drop_if_exists index("activities", :local)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,7 +2,7 @@ defmodule Pleroma.Repo.Migrations.CreateLists do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create table(:lists) do
|
create_if_not_exists table(:lists) do
|
||||||
add :user_id, references(:users, on_delete: :delete_all)
|
add :user_id, references(:users, on_delete: :delete_all)
|
||||||
add :title, :string
|
add :title, :string
|
||||||
add :following, {:array, :string}
|
add :following, {:array, :string}
|
||||||
|
@ -10,6 +10,6 @@ def change do
|
||||||
timestamps()
|
timestamps()
|
||||||
end
|
end
|
||||||
|
|
||||||
create index(:lists, [:user_id])
|
create_if_not_exists index(:lists, [:user_id])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,6 +2,6 @@ defmodule Pleroma.Repo.Migrations.CreateUserTrigramIndex do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create index(:users, ["(nickname || name) gist_trgm_ops"], name: :users_trigram_index, using: :gist)
|
create_if_not_exists index(:users, ["(nickname || name) gist_trgm_ops"], name: :users_trigram_index, using: :gist)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,6 +2,6 @@ defmodule Pleroma.Repo.Migrations.AddListFollowIndex do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create index(:lists, [:following])
|
create_if_not_exists index(:lists, [:following])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,7 +2,7 @@ defmodule Pleroma.Repo.Migrations.CreateUserInviteTokens do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create table(:user_invite_tokens) do
|
create_if_not_exists table(:user_invite_tokens) do
|
||||||
add :token, :string
|
add :token, :string
|
||||||
add :used, :boolean, default: false
|
add :used, :boolean, default: false
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@ defmodule Pleroma.Repo.Migrations.CreateFilters do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create table(:filters) do
|
create_if_not_exists table(:filters) do
|
||||||
add :user_id, references(:users, on_delete: :delete_all)
|
add :user_id, references(:users, on_delete: :delete_all)
|
||||||
add :filter_id, :integer
|
add :filter_id, :integer
|
||||||
add :hide, :boolean
|
add :hide, :boolean
|
||||||
|
@ -14,7 +14,7 @@ def change do
|
||||||
timestamps()
|
timestamps()
|
||||||
end
|
end
|
||||||
|
|
||||||
create index(:filters, [:user_id])
|
create_if_not_exists index(:filters, [:user_id])
|
||||||
create index(:filters, [:phrase], where: "hide = true", name: :hided_phrases_index)
|
create_if_not_exists index(:filters, [:phrase], where: "hide = true", name: :hided_phrases_index)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -7,7 +7,7 @@ def change do
|
||||||
add :recipients_cc, {:array, :string}
|
add :recipients_cc, {:array, :string}
|
||||||
end
|
end
|
||||||
|
|
||||||
create index(:activities, [:recipients_to], using: :gin)
|
create_if_not_exists index(:activities, [:recipients_to], using: :gin)
|
||||||
create index(:activities, [:recipients_cc], using: :gin)
|
create_if_not_exists index(:activities, [:recipients_cc], using: :gin)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,7 +2,7 @@ defmodule Pleroma.Repo.Migrations.ActivitiesAddToCcIndices do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create index(:activities, ["(data->'to')"], name: :activities_to_index, using: :gin)
|
create_if_not_exists index(:activities, ["(data->'to')"], name: :activities_to_index, using: :gin)
|
||||||
create index(:activities, ["(data->'cc')"], name: :activities_cc_index, using: :gin)
|
create_if_not_exists index(:activities, ["(data->'cc')"], name: :activities_cc_index, using: :gin)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,10 +1,17 @@
|
||||||
defmodule Pleroma.Repo.Migrations.RemoveRecipientsToAndCcFieldsFromActivities do
|
defmodule Pleroma.Repo.Migrations.RemoveRecipientsToAndCcFieldsFromActivities do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def up do
|
||||||
alter table(:activities) do
|
alter table(:activities) do
|
||||||
remove :recipients_to
|
remove :recipients_to
|
||||||
remove :recipients_cc
|
remove :recipients_cc
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def down do
|
||||||
|
alter table(:activities) do
|
||||||
|
add :recipients_to, {:array, :string}
|
||||||
|
add :recipients_cc, {:array, :string}
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,6 +2,6 @@ defmodule Pleroma.Repo.Migrations.UsersAddIsModeratorIndex do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create index(:users, ["(info->'is_moderator')"], name: :users_is_moderator_index, using: :gin)
|
create_if_not_exists index(:users, ["(info->'is_moderator')"], name: :users_is_moderator_index, using: :gin)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,7 +2,7 @@ defmodule Pleroma.Repo.Migrations.CreatePushSubscriptions do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create table("push_subscriptions") do
|
create_if_not_exists table("push_subscriptions") do
|
||||||
add :user_id, references("users", on_delete: :delete_all)
|
add :user_id, references("users", on_delete: :delete_all)
|
||||||
add :token_id, references("oauth_tokens", on_delete: :delete_all)
|
add :token_id, references("oauth_tokens", on_delete: :delete_all)
|
||||||
add :endpoint, :string
|
add :endpoint, :string
|
||||||
|
@ -13,6 +13,6 @@ def change do
|
||||||
timestamps()
|
timestamps()
|
||||||
end
|
end
|
||||||
|
|
||||||
create index("push_subscriptions", [:user_id, :token_id], unique: true)
|
create_if_not_exists index("push_subscriptions", [:user_id, :token_id], unique: true)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
defmodule Pleroma.Repo.Migrations.AddUUIDExtension do
|
defmodule Pleroma.Repo.Migrations.AddUUIDExtension do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def up do
|
||||||
execute("create extension if not exists \"uuid-ossp\"")
|
execute("create extension if not exists \"uuid-ossp\"")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def down, do: :ok
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
defmodule Pleroma.Repo.Migrations.AddUUIDsToUserInfo do
|
defmodule Pleroma.Repo.Migrations.AddUUIDsToUserInfo do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def up do
|
||||||
execute("update users set info = jsonb_set(info, '{\"id\"}', to_jsonb(uuid_generate_v4()))")
|
execute("update users set info = jsonb_set(info, '{\"id\"}', to_jsonb(uuid_generate_v4()))")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def down, do: :ok
|
||||||
end
|
end
|
||||||
|
|
|
@ -6,6 +6,6 @@ def change do
|
||||||
add :tags, {:array, :string}
|
add :tags, {:array, :string}
|
||||||
end
|
end
|
||||||
|
|
||||||
create index(:users, [:tags], using: :gin)
|
create_if_not_exists index(:users, [:tags], using: :gin)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -12,7 +12,7 @@ defmodule Pleroma.Repo.Migrations.UsersAndActivitiesFlakeId do
|
||||||
# 4- update relation pkeys with the new ids
|
# 4- update relation pkeys with the new ids
|
||||||
# 5- rename the temporary column to id
|
# 5- rename the temporary column to id
|
||||||
# 6- re-create the constraints
|
# 6- re-create the constraints
|
||||||
def change do
|
def up do
|
||||||
# Old serial int ids are transformed to 128bits with extra padding.
|
# Old serial int ids are transformed to 128bits with extra padding.
|
||||||
# The application (in `Pleroma.FlakeId`) handles theses IDs properly as integers; to keep compatibility
|
# The application (in `Pleroma.FlakeId`) handles theses IDs properly as integers; to keep compatibility
|
||||||
# with previously issued ids.
|
# with previously issued ids.
|
||||||
|
@ -75,6 +75,8 @@ def change do
|
||||||
stop_clippy_heartbeats(clippy)
|
stop_clippy_heartbeats(clippy)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def down, do: :ok
|
||||||
|
|
||||||
defp start_clippy_heartbeats() do
|
defp start_clippy_heartbeats() do
|
||||||
count = from(a in "activities", select: count(a.id)) |> Repo.one!
|
count = from(a in "activities", select: count(a.id)) |> Repo.one!
|
||||||
|
|
||||||
|
|
|
@ -37,12 +37,12 @@ def up do
|
||||||
end
|
end
|
||||||
|
|
||||||
def down do
|
def down do
|
||||||
drop(
|
drop_if_exists(
|
||||||
index(:activities, ["activity_visibility(actor, recipients, data)"],
|
index(:activities, ["activity_visibility(actor, recipients, data)"],
|
||||||
name: :activities_visibility_index
|
name: :activities_visibility_index
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
execute("drop function activity_visibility(actor varchar, recipients varchar[], data jsonb)")
|
execute("drop function if exists activity_visibility(actor varchar, recipients varchar[], data jsonb)")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,7 +2,7 @@ defmodule Pleroma.Repo.Migrations.CreateUserFtsIndex do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create index(
|
create_if_not_exists index(
|
||||||
:users,
|
:users,
|
||||||
[
|
[
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -4,7 +4,7 @@ defmodule Pleroma.Repo.Migrations.FixUserTrigramIndex do
|
||||||
def up do
|
def up do
|
||||||
drop_if_exists(index(:users, [], name: :users_trigram_index))
|
drop_if_exists(index(:users, [], name: :users_trigram_index))
|
||||||
|
|
||||||
create(
|
create_if_not_exists(
|
||||||
index(:users, ["(trim(nickname || ' ' || coalesce(name, ''))) gist_trgm_ops"],
|
index(:users, ["(trim(nickname || ' ' || coalesce(name, ''))) gist_trgm_ops"],
|
||||||
name: :users_trigram_index,
|
name: :users_trigram_index,
|
||||||
using: :gist
|
using: :gist
|
||||||
|
@ -15,7 +15,7 @@ def up do
|
||||||
def down do
|
def down do
|
||||||
drop_if_exists(index(:users, [], name: :users_trigram_index))
|
drop_if_exists(index(:users, [], name: :users_trigram_index))
|
||||||
|
|
||||||
create(
|
create_if_not_exists(
|
||||||
index(:users, ["(nickname || name) gist_trgm_ops"], name: :users_trigram_index, using: :gist)
|
index(:users, ["(nickname || name) gist_trgm_ops"], name: :users_trigram_index, using: :gist)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,6 +2,6 @@ defmodule Pleroma.Repo.Migrations.UsersAddIsAdminIndex do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create(index(:users, ["(info->'is_admin')"], name: :users_is_admin_index, using: :gin))
|
create_if_not_exists(index(:users, ["(info->'is_admin')"], name: :users_is_admin_index, using: :gin))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,14 +2,14 @@ defmodule Pleroma.Repo.Migrations.CreateInstances do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create table(:instances) do
|
create_if_not_exists table(:instances) do
|
||||||
add :host, :string
|
add :host, :string
|
||||||
add :unreachable_since, :naive_datetime_usec
|
add :unreachable_since, :naive_datetime_usec
|
||||||
|
|
||||||
timestamps()
|
timestamps()
|
||||||
end
|
end
|
||||||
|
|
||||||
create unique_index(:instances, [:host])
|
create_if_not_exists unique_index(:instances, [:host])
|
||||||
create index(:instances, [:unreachable_since])
|
create_if_not_exists index(:instances, [:unreachable_since])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,9 +1,11 @@
|
||||||
defmodule Pleroma.Repo.Migrations.FixInfoIds do
|
defmodule Pleroma.Repo.Migrations.FixInfoIds do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def up do
|
||||||
execute(
|
execute(
|
||||||
"update users set info = jsonb_set(info, '{id}', to_jsonb(uuid_generate_v4())) where info->'id' is null;"
|
"update users set info = jsonb_set(info, '{id}', to_jsonb(uuid_generate_v4())) where info->'id' is null;"
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def down, do: :ok
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,9 +1,15 @@
|
||||||
defmodule Pleroma.Repo.Migrations.ChangePushSubscriptionsVarchar do
|
defmodule Pleroma.Repo.Migrations.ChangePushSubscriptionsVarchar do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def up do
|
||||||
alter table(:push_subscriptions) do
|
alter table(:push_subscriptions) do
|
||||||
modify(:endpoint, :varchar)
|
modify(:endpoint, :varchar)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def down do
|
||||||
|
alter table(:push_subscriptions) do
|
||||||
|
modify(:endpoint, :string)
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -19,7 +19,7 @@ def up do
|
||||||
end
|
end
|
||||||
|
|
||||||
def down do
|
def down do
|
||||||
drop(
|
drop_if_exists(
|
||||||
index(:activities, ["activity_visibility(actor, recipients, data)", "id DESC"],
|
index(:activities, ["activity_visibility(actor, recipients, data)", "id DESC"],
|
||||||
name: :activities_visibility_index,
|
name: :activities_visibility_index,
|
||||||
concurrently: true,
|
concurrently: true,
|
||||||
|
|
|
@ -2,11 +2,11 @@ defmodule Pleroma.Repo.Migrations.CreateThreadMutes do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create table(:thread_mutes) do
|
create_if_not_exists table(:thread_mutes) do
|
||||||
add :user_id, references(:users, type: :uuid, on_delete: :delete_all)
|
add :user_id, references(:users, type: :uuid, on_delete: :delete_all)
|
||||||
add :context, :string
|
add :context, :string
|
||||||
end
|
end
|
||||||
|
|
||||||
create unique_index(:thread_mutes, [:user_id, :context], name: :unique_index)
|
create_if_not_exists unique_index(:thread_mutes, [:user_id, :context], name: :unique_index)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,7 +2,7 @@ defmodule Pleroma.Repo.Migrations.CreateRegistrations do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create table(:registrations, primary_key: false) do
|
create_if_not_exists table(:registrations, primary_key: false) do
|
||||||
add :id, :uuid, primary_key: true
|
add :id, :uuid, primary_key: true
|
||||||
add :user_id, references(:users, type: :uuid, on_delete: :delete_all)
|
add :user_id, references(:users, type: :uuid, on_delete: :delete_all)
|
||||||
add :provider, :string
|
add :provider, :string
|
||||||
|
@ -12,7 +12,7 @@ def change do
|
||||||
timestamps()
|
timestamps()
|
||||||
end
|
end
|
||||||
|
|
||||||
create unique_index(:registrations, [:provider, :uid])
|
create_if_not_exists unique_index(:registrations, [:provider, :uid])
|
||||||
create unique_index(:registrations, [:user_id, :provider, :uid])
|
create_if_not_exists unique_index(:registrations, [:user_id, :provider, :uid])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,6 +2,6 @@ defmodule Pleroma.Repo.Migrations.CreateNotificationIdIndex do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create index(:notifications, ["id desc nulls last"])
|
create_if_not_exists index(:notifications, ["id desc nulls last"])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,7 +2,7 @@ defmodule Pleroma.Repo.Migrations.CreateScheduledActivities do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create table(:scheduled_activities) do
|
create_if_not_exists table(:scheduled_activities) do
|
||||||
add(:user_id, references(:users, type: :uuid, on_delete: :delete_all))
|
add(:user_id, references(:users, type: :uuid, on_delete: :delete_all))
|
||||||
add(:scheduled_at, :naive_datetime, null: false)
|
add(:scheduled_at, :naive_datetime, null: false)
|
||||||
add(:params, :map, null: false)
|
add(:params, :map, null: false)
|
||||||
|
@ -10,7 +10,7 @@ def change do
|
||||||
timestamps()
|
timestamps()
|
||||||
end
|
end
|
||||||
|
|
||||||
create(index(:scheduled_activities, [:scheduled_at]))
|
create_if_not_exists(index(:scheduled_activities, [:scheduled_at]))
|
||||||
create(index(:scheduled_activities, [:user_id]))
|
create_if_not_exists(index(:scheduled_activities, [:user_id]))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,8 +2,8 @@ defmodule Pleroma.Repo.Migrations.AddOauthTokenIndexes do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create(unique_index(:oauth_tokens, [:token]))
|
create_if_not_exists(unique_index(:oauth_tokens, [:token]))
|
||||||
create(index(:oauth_tokens, [:app_id]))
|
create_if_not_exists(index(:oauth_tokens, [:app_id]))
|
||||||
create(index(:oauth_tokens, [:user_id]))
|
create_if_not_exists(index(:oauth_tokens, [:user_id]))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -6,12 +6,12 @@ defmodule Pleroma.Repo.Migrations.CreateConversations do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create table(:conversations) do
|
create_if_not_exists table(:conversations) do
|
||||||
add(:ap_id, :string, null: false)
|
add(:ap_id, :string, null: false)
|
||||||
timestamps()
|
timestamps()
|
||||||
end
|
end
|
||||||
|
|
||||||
create table(:conversation_participations) do
|
create_if_not_exists table(:conversation_participations) do
|
||||||
add(:user_id, references(:users, type: :uuid, on_delete: :delete_all))
|
add(:user_id, references(:users, type: :uuid, on_delete: :delete_all))
|
||||||
add(:conversation_id, references(:conversations, on_delete: :delete_all))
|
add(:conversation_id, references(:conversations, on_delete: :delete_all))
|
||||||
add(:read, :boolean, default: false)
|
add(:read, :boolean, default: false)
|
||||||
|
@ -19,8 +19,8 @@ def change do
|
||||||
timestamps()
|
timestamps()
|
||||||
end
|
end
|
||||||
|
|
||||||
create index(:conversation_participations, [:conversation_id])
|
create_if_not_exists index(:conversation_participations, [:conversation_id])
|
||||||
create unique_index(:conversation_participations, [:user_id, :conversation_id])
|
create_if_not_exists unique_index(:conversation_participations, [:user_id, :conversation_id])
|
||||||
create unique_index(:conversations, [:ap_id])
|
create_if_not_exists unique_index(:conversations, [:ap_id])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,6 +2,6 @@ defmodule Pleroma.Repo.Migrations.AddParticipationUpdatedAtIndex do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create index(:conversation_participations, ["updated_at desc"])
|
create_if_not_exists index(:conversation_participations, ["updated_at desc"])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,6 +2,6 @@ defmodule Pleroma.Repo.Migrations.AddIndexOnUserInfoDeactivated do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create(index(:users, ["(info->'deactivated')"], name: :users_deactivated_index, using: :gin))
|
create_if_not_exists(index(:users, ["(info->'deactivated')"], name: :users_deactivated_index, using: :gin))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,13 +2,13 @@ defmodule Pleroma.Repo.Migrations.CreateBookmarks do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
def change do
|
def change do
|
||||||
create table(:bookmarks) do
|
create_if_not_exists table(:bookmarks) do
|
||||||
add(:user_id, references(:users, type: :uuid, on_delete: :delete_all))
|
add(:user_id, references(:users, type: :uuid, on_delete: :delete_all))
|
||||||
add(:activity_id, references(:activities, type: :uuid, on_delete: :delete_all))
|
add(:activity_id, references(:activities, type: :uuid, on_delete: :delete_all))
|
||||||
|
|
||||||
timestamps()
|
timestamps()
|
||||||
end
|
end
|
||||||
|
|
||||||
create(unique_index(:bookmarks, [:user_id, :activity_id]))
|
create_if_not_exists(unique_index(:bookmarks, [:user_id, :activity_id]))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue