Merge branch 'develop' into feature/1893-remote-emoji-packs-pagination
This commit is contained in:
commit
346cc3ac24
|
@ -59,7 +59,7 @@ unit-testing:
|
|||
alias: postgres
|
||||
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
|
||||
script:
|
||||
- apt-get update && apt-get install -y libimage-exiftool-perl
|
||||
- apt-get update && apt-get install -y libimage-exiftool-perl ffmpeg
|
||||
- mix deps.get
|
||||
- mix ecto.create
|
||||
- mix ecto.migrate
|
||||
|
@ -93,7 +93,7 @@ unit-testing-rum:
|
|||
<<: *global_variables
|
||||
RUM_ENABLED: "true"
|
||||
script:
|
||||
- apt-get update && apt-get install -y libimage-exiftool-perl
|
||||
- apt-get update && apt-get install -y libimage-exiftool-perl ffmpeg
|
||||
- mix deps.get
|
||||
- mix ecto.create
|
||||
- mix ecto.migrate
|
||||
|
|
18
CHANGELOG.md
18
CHANGELOG.md
|
@ -5,18 +5,25 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
|
||||
## Unreleased
|
||||
|
||||
### Added
|
||||
- Mix tasks for controlling user account confirmation status in bulk (`mix pleroma.user confirm_all` and `mix pleroma.user unconfirm_all`)
|
||||
- Mix task for sending confirmation emails to all unconfirmed users (`mix pleroma.email send_confirmation_mails`)
|
||||
- Mix task option for force-unfollowing relays
|
||||
|
||||
### Changed
|
||||
|
||||
- **Breaking:** Pleroma API: packs and files routes changed.
|
||||
- **Breaking:** Pleroma Admin API: emoji packs and files routes changed.
|
||||
- Search: Users are now findable by their urls.
|
||||
- Renamed `:await_up_timeout` in `:connections_pool` namespace to `:connect_timeout`, old name is deprecated.
|
||||
- Renamed `:timeout` in `pools` namespace to `:recv_timeout`, old name is deprecated.
|
||||
- The `discoverable` field in the `User` struct will now add a NOINDEX metatag to profile pages when false.
|
||||
- Users with the `discoverable` field set to false will not show up in searches.
|
||||
- Minimum lifetime for ephmeral activities changed to 10 minutes and made configurable (`:min_lifetime` option).
|
||||
- Introduced optional dependencies on `ffmpeg`, `ImageMagick`, `exiftool` software packages. Please refer to `docs/installation/optional/media_graphics_packages.md`.
|
||||
|
||||
### Added
|
||||
|
||||
- Media preview proxy (requires media proxy be enabled; see `:media_preview_proxy` config for more details).
|
||||
- Media preview proxy (requires `ffmpeg` and `ImageMagick` to be installed and media proxy to be enabled; see `:media_preview_proxy` config for more details).
|
||||
- Pleroma API: Importing the mutes users from CSV files.
|
||||
- Experimental websocket-based federation between Pleroma instances.
|
||||
|
||||
<details>
|
||||
|
@ -41,6 +48,11 @@ switched to a new configuration mechanism, however it was not officially removed
|
|||
- Add documented-but-missing chat pagination.
|
||||
- Allow sending out emails again.
|
||||
|
||||
## Unreleased (Patch)
|
||||
|
||||
### Changed
|
||||
- API: Empty parameter values for integer parameters are now ignored in non-strict validaton mode.
|
||||
|
||||
## [2.1.2] - 2020-09-17
|
||||
|
||||
### Security
|
||||
|
|
|
@ -59,8 +59,6 @@
|
|||
"BLH1qVhJItRGCfxgTtONfsOKDc9VRAraXw-3NsmjMngWSh7NxOizN6bkuRA7iLTMPS82PjwJAr3UoK9EC1IFrz4",
|
||||
private_key: "_-XZ0iebPrRfZ_o0-IatTdszYa8VCH1yLN-JauK7HHA"
|
||||
|
||||
config :web_push_encryption, :http_client, Pleroma.Web.WebPushHttpClientMock
|
||||
|
||||
config :pleroma, Pleroma.ScheduledActivity,
|
||||
daily_user_limit: 2,
|
||||
total_user_limit: 3,
|
||||
|
|
|
@ -809,7 +809,7 @@
|
|||
|
||||
config :ex_aws, http_client: Pleroma.HTTP.ExAws
|
||||
|
||||
config :web_push_encryption, http_client: Pleroma.HTTP
|
||||
config :web_push_encryption, http_client: Pleroma.HTTP.WebPush
|
||||
|
||||
config :pleroma, :instances_favicons, enabled: false
|
||||
|
||||
|
|
|
@ -83,8 +83,6 @@
|
|||
"BLH1qVhJItRGCfxgTtONfsOKDc9VRAraXw-3NsmjMngWSh7NxOizN6bkuRA7iLTMPS82PjwJAr3UoK9EC1IFrz4",
|
||||
private_key: "_-XZ0iebPrRfZ_o0-IatTdszYa8VCH1yLN-JauK7HHA"
|
||||
|
||||
config :web_push_encryption, :http_client, Pleroma.Web.WebPushHttpClientMock
|
||||
|
||||
config :pleroma, Oban,
|
||||
queues: false,
|
||||
crontab: false,
|
||||
|
|
|
@ -349,9 +349,9 @@ Response:
|
|||
|
||||
### Unfollow a Relay
|
||||
|
||||
Params:
|
||||
|
||||
* `relay_url`
|
||||
- Params:
|
||||
- `relay_url`
|
||||
- *optional* `force`: forcefully unfollow a relay even when the relay is not available. (default is `false`)
|
||||
|
||||
Response:
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Managing emails
|
||||
# EMail administration tasks
|
||||
|
||||
{! backend/administration/CLI_tasks/general_cli_task_info.include !}
|
||||
|
||||
|
@ -30,3 +30,17 @@ Example:
|
|||
```sh
|
||||
mix pleroma.email test --to root@example.org
|
||||
```
|
||||
|
||||
## Send confirmation emails to all unconfirmed user accounts
|
||||
|
||||
=== "OTP"
|
||||
|
||||
```sh
|
||||
./bin/pleroma_ctl email send_confirmation_mails
|
||||
```
|
||||
|
||||
=== "From Source"
|
||||
|
||||
```sh
|
||||
mix pleroma.email send_confirmation_mails
|
||||
```
|
||||
|
|
|
@ -224,9 +224,10 @@
|
|||
```
|
||||
|
||||
### Options
|
||||
- `--admin`/`--no-admin` - whether the user should be an admin
|
||||
- `--confirmed`/`--no-confirmed` - whether the user account is confirmed
|
||||
- `--locked`/`--no-locked` - whether the user should be locked
|
||||
- `--moderator`/`--no-moderator` - whether the user should be a moderator
|
||||
- `--admin`/`--no-admin` - whether the user should be an admin
|
||||
|
||||
## Add tags to a user
|
||||
|
||||
|
@ -271,3 +272,33 @@
|
|||
```sh
|
||||
mix pleroma.user toggle_confirmed <nickname>
|
||||
```
|
||||
|
||||
## Set confirmation status for all regular active users
|
||||
*Admins and moderators are excluded*
|
||||
|
||||
=== "OTP"
|
||||
|
||||
```sh
|
||||
./bin/pleroma_ctl user confirm_all
|
||||
```
|
||||
|
||||
=== "From Source"
|
||||
|
||||
```sh
|
||||
mix pleroma.user confirm_all
|
||||
```
|
||||
|
||||
## Revoke confirmation status for all regular active users
|
||||
*Admins and moderators are excluded*
|
||||
|
||||
=== "OTP"
|
||||
|
||||
```sh
|
||||
./bin/pleroma_ctl user unconfirm_all
|
||||
```
|
||||
|
||||
=== "From Source"
|
||||
|
||||
```sh
|
||||
mix pleroma.user unconfirm_all
|
||||
```
|
||||
|
|
|
@ -20,6 +20,9 @@ It assumes that you have administrative rights, either as root or a user with [s
|
|||
|
||||
* `nginx` (preferred, example configs for other reverse proxies can be found in the repo)
|
||||
* `certbot` (or any other ACME client for Let’s Encrypt certificates)
|
||||
* `ImageMagick`
|
||||
* `ffmpeg`
|
||||
* `exiftool`
|
||||
|
||||
### Prepare the system
|
||||
|
||||
|
@ -29,7 +32,6 @@ It assumes that you have administrative rights, either as root or a user with [s
|
|||
awk 'NR==2' /etc/apk/repositories | sed 's/main/community/' | tee -a /etc/apk/repositories
|
||||
```
|
||||
|
||||
|
||||
* Then update the system, if not already done:
|
||||
|
||||
```shell
|
||||
|
@ -56,6 +58,7 @@ sudo apk add erlang erlang-runtime-tools erlang-xmerl elixir
|
|||
```shell
|
||||
sudo apk add erlang-eldap
|
||||
```
|
||||
|
||||
### Install PostgreSQL
|
||||
|
||||
* Install Postgresql server:
|
||||
|
@ -76,6 +79,12 @@ sudo /etc/init.d/postgresql start
|
|||
sudo rc-update add postgresql
|
||||
```
|
||||
|
||||
### Install media / graphics packages (optional, see [`docs/installation/optional/media_graphics_packages.md`](docs/installation/optional/media_graphics_packages.md))
|
||||
|
||||
```shell
|
||||
sudo apk add ffmpeg imagemagick exiftool
|
||||
```
|
||||
|
||||
### Install PleromaBE
|
||||
|
||||
* Add a new system user for the Pleroma service:
|
||||
|
|
|
@ -15,6 +15,9 @@ This guide will assume that you have administrative rights, either as root or a
|
|||
|
||||
* `nginx` (preferred, example configs for other reverse proxies can be found in the repo)
|
||||
* `certbot` (or any other ACME client for Let’s Encrypt certificates)
|
||||
* `ImageMagick`
|
||||
* `ffmpeg`
|
||||
* `exiftool`
|
||||
|
||||
### Prepare the system
|
||||
|
||||
|
@ -52,6 +55,12 @@ sudo -iu postgres initdb -D /var/lib/postgres/data
|
|||
sudo systemctl enable --now postgresql.service
|
||||
```
|
||||
|
||||
### Install media / graphics packages (optional, see [`docs/installation/optional/media_graphics_packages.md`](docs/installation/optional/media_graphics_packages.md))
|
||||
|
||||
```shell
|
||||
sudo pacman -S ffmpeg imagemagick perl-image-exiftool
|
||||
```
|
||||
|
||||
### Install PleromaBE
|
||||
|
||||
* Add a new system user for the Pleroma service:
|
||||
|
|
|
@ -18,6 +18,9 @@ This guide will assume you are on Debian Stretch. This guide should also work wi
|
|||
|
||||
* `nginx` (preferred, example configs for other reverse proxies can be found in the repo)
|
||||
* `certbot` (or any other ACME client for Let’s Encrypt certificates)
|
||||
* `ImageMagick`
|
||||
* `ffmpeg`
|
||||
* `exiftool`
|
||||
|
||||
### Prepare the system
|
||||
|
||||
|
@ -50,6 +53,12 @@ sudo apt update
|
|||
sudo apt install elixir erlang-dev erlang-nox
|
||||
```
|
||||
|
||||
### Optional packages: [`docs/installation/optional/media_graphics_packages.md`](docs/installation/optional/media_graphics_packages.md)
|
||||
|
||||
```shell
|
||||
sudo apt install imagemagick ffmpeg libimage-exiftool-perl
|
||||
```
|
||||
|
||||
### Install PleromaBE
|
||||
|
||||
* Add a new system user for the Pleroma service:
|
||||
|
|
|
@ -22,6 +22,9 @@
|
|||
|
||||
- `nginx` (おすすめです。他のリバースプロキシを使う場合は、参考となる設定をこのリポジトリから探してください)
|
||||
- `certbot` (または何らかのLet's Encrypt向けACMEクライアント)
|
||||
- `ImageMagick`
|
||||
- `ffmpeg`
|
||||
- `exiftool`
|
||||
|
||||
### システムを準備する
|
||||
|
||||
|
@ -33,10 +36,9 @@ sudo apt full-upgrade
|
|||
|
||||
* 上記に挙げたパッケージをインストールしておきます。
|
||||
```
|
||||
sudo apt install git build-essential postgresql postgresql-contrib cmake
|
||||
sudo apt install git build-essential postgresql postgresql-contrib cmake ffmpeg imagemagick
|
||||
```
|
||||
|
||||
|
||||
### ElixirとErlangをインストールします
|
||||
|
||||
* Erlangのリポジトリをダウンロードおよびインストールします。
|
||||
|
@ -51,6 +53,12 @@ sudo apt update
|
|||
sudo apt install elixir erlang-dev erlang-nox
|
||||
```
|
||||
|
||||
### オプションパッケージ: [`docs/installation/optional/media_graphics_packages.md`](docs/installation/optional/media_graphics_packages.md)
|
||||
|
||||
```shell
|
||||
sudo apt install imagemagick ffmpeg libimage-exiftool-perl
|
||||
```
|
||||
|
||||
### Pleroma BE (バックエンド) をインストールします
|
||||
|
||||
* Pleroma用に新しいユーザーを作ります。
|
||||
|
|
|
@ -26,6 +26,12 @@ Setup the required services to automatically start at boot, using `sysrc(8)`.
|
|||
# service postgresql start
|
||||
```
|
||||
|
||||
### Install media / graphics packages (optional, see [`docs/installation/optional/media_graphics_packages.md`](docs/installation/optional/media_graphics_packages.md))
|
||||
|
||||
```shell
|
||||
# pkg install imagemagick ffmpeg p5-Image-ExifTool
|
||||
```
|
||||
|
||||
## Configuring Pleroma
|
||||
|
||||
Create a user for Pleroma:
|
||||
|
|
|
@ -35,6 +35,9 @@ Gentoo quite pointedly does not come with a cron daemon installed, and as such i
|
|||
* `www-servers/nginx` (preferred, example configs for other reverse proxies can be found in the repo)
|
||||
* `app-crypt/certbot` (or any other ACME client for Let’s Encrypt certificates)
|
||||
* `app-crypt/certbot-nginx` (nginx certbot plugin that allows use of the all-powerful `--nginx` flag on certbot)
|
||||
* `media-gfx/imagemagick`
|
||||
* `media-video/ffmpeg`
|
||||
* `media-libs/exiftool`
|
||||
|
||||
### Prepare the system
|
||||
|
||||
|
@ -87,6 +90,12 @@ If you do not plan to make any modifications to your Pleroma instance, cloning d
|
|||
|
||||
Not only does this make it much easier to deploy changes you make, as you can commit and pull from upstream and all that good stuff from the comfort of your local machine then simply `git pull` on your instance server when you're ready to deploy, it also ensures you are compliant with the Affero General Public Licence that Pleroma is licenced under, which stipulates that all network services provided with modified AGPL code must publish their changes on a publicly available internet service and for free. It also makes it much easier to ask for help from and provide help to your fellow Pleroma admins if your public repo always reflects what you are running because it is part of your deployment procedure.
|
||||
|
||||
### Install media / graphics packages (optional, see [`docs/installation/optional/media_graphics_packages.md`](docs/installation/optional/media_graphics_packages.md))
|
||||
|
||||
```shell
|
||||
# emerge --ask media-video/ffmpeg media-gfx/imagemagick media-libs/exiftool
|
||||
```
|
||||
|
||||
### Install PleromaBE
|
||||
|
||||
* Add a new system user for the Pleroma service and set up default directories:
|
||||
|
|
|
@ -10,7 +10,7 @@ Pleroma uses.
|
|||
|
||||
The `mksh` shell is needed to run the Elixir `mix` script.
|
||||
|
||||
`# pkgin install acmesh elixir git-base git-docs mksh nginx postgresql11-server postgresql11-client postgresql11-contrib sudo`
|
||||
`# pkgin install acmesh elixir git-base git-docs mksh nginx postgresql11-server postgresql11-client postgresql11-contrib sudo ffmpeg4 ImageMagick`
|
||||
|
||||
You can also build these packages using pkgsrc:
|
||||
```
|
||||
|
@ -44,6 +44,10 @@ pgsql=YES
|
|||
|
||||
First, run `# /etc/rc.d/pgsql start`. Then, `$ sudo -Hu pgsql -g pgsql createdb`.
|
||||
|
||||
### Install media / graphics packages (optional, see [`docs/installation/optional/media_graphics_packages.md`](docs/installation/optional/media_graphics_packages.md))
|
||||
|
||||
`# pkgin install ImageMagick ffmpeg4 p5-Image-ExifTool`
|
||||
|
||||
## Configuring Pleroma
|
||||
|
||||
Create a user for Pleroma:
|
||||
|
|
|
@ -10,20 +10,34 @@ The following packages need to be installed:
|
|||
|
||||
* elixir
|
||||
* gmake
|
||||
* ImageMagick
|
||||
* git
|
||||
* postgresql-server
|
||||
* postgresql-contrib
|
||||
* cmake
|
||||
* ffmpeg
|
||||
* ImageMagick
|
||||
|
||||
To install them, run the following command (with doas or as root):
|
||||
|
||||
```
|
||||
pkg_add elixir gmake ImageMagick git postgresql-server postgresql-contrib cmake
|
||||
pkg_add elixir gmake git postgresql-server postgresql-contrib cmake ffmpeg ImageMagick
|
||||
```
|
||||
|
||||
Pleroma requires a reverse proxy, OpenBSD has relayd in base (and is used in this guide) and packages/ports are available for nginx (www/nginx) and apache (www/apache-httpd). Independently of the reverse proxy, [acme-client(1)](https://man.openbsd.org/acme-client) can be used to get a certificate from Let's Encrypt.
|
||||
|
||||
#### Optional software
|
||||
|
||||
Per [`docs/installation/optional/media_graphics_packages.md`](docs/installation/optional/media_graphics_packages.md):
|
||||
* ImageMagick
|
||||
* ffmpeg
|
||||
* exiftool
|
||||
|
||||
To install the above:
|
||||
|
||||
```
|
||||
pkg_add ImageMagick ffmpeg p5-Image-ExifTool
|
||||
```
|
||||
|
||||
#### Creating the pleroma user
|
||||
Pleroma will be run by a dedicated user, \_pleroma. Before creating it, insert the following lines in login.conf:
|
||||
```
|
||||
|
|
|
@ -16,7 +16,18 @@ Matrix-kanava #freenode_#pleroma:matrix.org ovat hyviä paikkoja löytää apua
|
|||
|
||||
Asenna tarvittava ohjelmisto:
|
||||
|
||||
`# pkg_add git elixir gmake postgresql-server-10.3 postgresql-contrib-10.3 cmake`
|
||||
`# pkg_add git elixir gmake postgresql-server-10.3 postgresql-contrib-10.3 cmake ffmpeg ImageMagick`
|
||||
|
||||
#### Optional software
|
||||
|
||||
[`docs/installation/optional/media_graphics_packages.md`](docs/installation/optional/media_graphics_packages.md):
|
||||
* ImageMagick
|
||||
* ffmpeg
|
||||
* exiftool
|
||||
|
||||
Asenna tarvittava ohjelmisto:
|
||||
|
||||
`# pkg_add ImageMagick ffmpeg p5-Image-ExifTool`
|
||||
|
||||
Luo postgresql-tietokanta:
|
||||
|
||||
|
|
32
docs/installation/optional/media_graphics_packages.md
Normal file
32
docs/installation/optional/media_graphics_packages.md
Normal file
|
@ -0,0 +1,32 @@
|
|||
# Optional software packages needed for specific functionality
|
||||
|
||||
For specific Pleroma functionality (which is disabled by default) some or all of the below packages are required:
|
||||
* `ImageMagic`
|
||||
* `ffmpeg`
|
||||
* `exiftool`
|
||||
|
||||
Please refer to documentation in `docs/installation` on how to install them on specific OS.
|
||||
|
||||
Note: the packages are not required with the current default settings of Pleroma.
|
||||
|
||||
## `ImageMagick`
|
||||
|
||||
`ImageMagick` is a set of tools to create, edit, compose, or convert bitmap images.
|
||||
|
||||
It is required for the following Pleroma features:
|
||||
* `Pleroma.Upload.Filters.Mogrify`, `Pleroma.Upload.Filters.Mogrifun` upload filters (related config: `Plaroma.Upload/filters` in `config/config.exs`)
|
||||
* Media preview proxy for still images (related config: `media_preview_proxy/enabled` in `config/config.exs`)
|
||||
|
||||
## `ffmpeg`
|
||||
|
||||
`ffmpeg` is software to record, convert and stream audio and video.
|
||||
|
||||
It is required for the following Pleroma features:
|
||||
* Media preview proxy for videos (related config: `media_preview_proxy/enabled` in `config/config.exs`)
|
||||
|
||||
## `exiftool`
|
||||
|
||||
`exiftool` is media files metadata reader/writer.
|
||||
|
||||
It is required for the following Pleroma features:
|
||||
* `Pleroma.Upload.Filters.Exiftool` upload filter (related config: `Plaroma.Upload/filters` in `config/config.exs`)
|
|
@ -40,6 +40,25 @@ Other than things bundled in the OTP release Pleroma depends on:
|
|||
apt install curl unzip libncurses5 postgresql postgresql-contrib nginx certbot
|
||||
```
|
||||
|
||||
### Installing optional packages
|
||||
|
||||
Per [`docs/installation/optional/media_graphics_packages.md`](docs/installation/optional/media_graphics_packages.md):
|
||||
* ImageMagick
|
||||
* ffmpeg
|
||||
* exiftool
|
||||
|
||||
=== "Alpine"
|
||||
```
|
||||
echo "http://nl.alpinelinux.org/alpine/latest-stable/community" >> /etc/apk/repositories
|
||||
apk update
|
||||
apk add imagemagick ffmpeg exiftool
|
||||
```
|
||||
|
||||
=== "Debian/Ubuntu"
|
||||
```
|
||||
apt install imagemagick ffmpeg libimage-exiftool-perl
|
||||
```
|
||||
|
||||
## Setup
|
||||
### Configuring PostgreSQL
|
||||
#### (Optional) Installing RUM indexes
|
||||
|
@ -82,6 +101,8 @@ It is encouraged to check [Optimizing your PostgreSQL performance](../configurat
|
|||
If you are using PostgreSQL 12 or higher, add this to your Ecto database configuration
|
||||
|
||||
```elixir
|
||||
#
|
||||
config :pleroma, Pleroma.Repo,
|
||||
prepare: :named,
|
||||
parameters: [
|
||||
plan_cache_mode: "force_custom_plan"
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# Recommended varnishncsa logging format: '%h %l %u %t "%m %{X-Forwarded-Proto}i://%{Host}i%U%q %H" %s %b "%{Referer}i" "%{User-agent}i"'
|
||||
vcl 4.1;
|
||||
import std;
|
||||
|
||||
|
@ -14,8 +15,11 @@ acl purge {
|
|||
sub vcl_recv {
|
||||
# Redirect HTTP to HTTPS
|
||||
if (std.port(server.ip) != 443) {
|
||||
set req.http.X-Forwarded-Proto = "http";
|
||||
set req.http.x-redir = "https://" + req.http.host + req.url;
|
||||
return (synth(750, ""));
|
||||
} else {
|
||||
set req.http.X-Forwarded-Proto = "https";
|
||||
}
|
||||
|
||||
# CHUNKED SUPPORT
|
||||
|
@ -105,7 +109,7 @@ sub vcl_hash {
|
|||
|
||||
sub vcl_backend_fetch {
|
||||
# Be more lenient for slow servers on the fediverse
|
||||
if bereq.url ~ "^/proxy/" {
|
||||
if (bereq.url ~ "^/proxy/") {
|
||||
set bereq.first_byte_timeout = 300s;
|
||||
}
|
||||
|
||||
|
|
|
@ -2,11 +2,11 @@ defmodule Mix.Tasks.Pleroma.Email do
|
|||
use Mix.Task
|
||||
import Mix.Pleroma
|
||||
|
||||
@shortdoc "Simple Email test"
|
||||
@shortdoc "Email administrative tasks"
|
||||
@moduledoc File.read!("docs/administration/CLI_tasks/email.md")
|
||||
|
||||
def run(["test" | args]) do
|
||||
Mix.Pleroma.start_pleroma()
|
||||
start_pleroma()
|
||||
|
||||
{options, [], []} =
|
||||
OptionParser.parse(
|
||||
|
@ -21,4 +21,20 @@ def run(["test" | args]) do
|
|||
|
||||
shell_info("Test email has been sent to #{inspect(email.to)} from #{inspect(email.from)}")
|
||||
end
|
||||
|
||||
def run(["resend_confirmation_emails"]) do
|
||||
start_pleroma()
|
||||
|
||||
shell_info("Sending emails to all unconfirmed users")
|
||||
|
||||
Pleroma.User.Query.build(%{
|
||||
local: true,
|
||||
deactivated: false,
|
||||
confirmation_pending: true,
|
||||
invisible: false
|
||||
})
|
||||
|> Pleroma.Repo.chunk_stream(500)
|
||||
|> Stream.each(&Pleroma.User.try_send_confirmation_email(&1))
|
||||
|> Stream.run()
|
||||
end
|
||||
end
|
||||
|
|
|
@ -21,10 +21,19 @@ def run(["follow", target]) do
|
|||
end
|
||||
end
|
||||
|
||||
def run(["unfollow", target]) do
|
||||
def run(["unfollow", target | rest]) do
|
||||
start_pleroma()
|
||||
|
||||
with {:ok, _activity} <- Relay.unfollow(target) do
|
||||
{options, [], []} =
|
||||
OptionParser.parse(
|
||||
rest,
|
||||
strict: [force: :boolean],
|
||||
aliases: [f: :force]
|
||||
)
|
||||
|
||||
force = Keyword.get(options, :force, false)
|
||||
|
||||
with {:ok, _activity} <- Relay.unfollow(target, %{force: force}) do
|
||||
# put this task to sleep to allow the genserver to push out the messages
|
||||
:timer.sleep(500)
|
||||
else
|
||||
|
|
|
@ -196,17 +196,24 @@ def run(["set", nickname | rest]) do
|
|||
OptionParser.parse(
|
||||
rest,
|
||||
strict: [
|
||||
moderator: :boolean,
|
||||
admin: :boolean,
|
||||
locked: :boolean
|
||||
confirmed: :boolean,
|
||||
locked: :boolean,
|
||||
moderator: :boolean
|
||||
]
|
||||
)
|
||||
|
||||
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
|
||||
user =
|
||||
case Keyword.get(options, :moderator) do
|
||||
case Keyword.get(options, :admin) do
|
||||
nil -> user
|
||||
value -> set_moderator(user, value)
|
||||
value -> set_admin(user, value)
|
||||
end
|
||||
|
||||
user =
|
||||
case Keyword.get(options, :confirmed) do
|
||||
nil -> user
|
||||
value -> set_confirmed(user, value)
|
||||
end
|
||||
|
||||
user =
|
||||
|
@ -216,9 +223,9 @@ def run(["set", nickname | rest]) do
|
|||
end
|
||||
|
||||
_user =
|
||||
case Keyword.get(options, :admin) do
|
||||
case Keyword.get(options, :moderator) do
|
||||
nil -> user
|
||||
value -> set_admin(user, value)
|
||||
value -> set_moderator(user, value)
|
||||
end
|
||||
else
|
||||
_ ->
|
||||
|
@ -353,6 +360,42 @@ def run(["toggle_confirmed", nickname]) do
|
|||
end
|
||||
end
|
||||
|
||||
def run(["confirm_all"]) do
|
||||
start_pleroma()
|
||||
|
||||
Pleroma.User.Query.build(%{
|
||||
local: true,
|
||||
deactivated: false,
|
||||
is_moderator: false,
|
||||
is_admin: false,
|
||||
invisible: false
|
||||
})
|
||||
|> Pleroma.Repo.chunk_stream(500, :batches)
|
||||
|> Stream.each(fn users ->
|
||||
users
|
||||
|> Enum.each(fn user -> User.need_confirmation(user, false) end)
|
||||
end)
|
||||
|> Stream.run()
|
||||
end
|
||||
|
||||
def run(["unconfirm_all"]) do
|
||||
start_pleroma()
|
||||
|
||||
Pleroma.User.Query.build(%{
|
||||
local: true,
|
||||
deactivated: false,
|
||||
is_moderator: false,
|
||||
is_admin: false,
|
||||
invisible: false
|
||||
})
|
||||
|> Pleroma.Repo.chunk_stream(500, :batches)
|
||||
|> Stream.each(fn users ->
|
||||
users
|
||||
|> Enum.each(fn user -> User.need_confirmation(user, true) end)
|
||||
end)
|
||||
|> Stream.run()
|
||||
end
|
||||
|
||||
def run(["sign_out", nickname]) do
|
||||
start_pleroma()
|
||||
|
||||
|
@ -410,4 +453,11 @@ defp set_locked(user, value) do
|
|||
shell_info("Locked status of #{user.nickname}: #{user.locked}")
|
||||
user
|
||||
end
|
||||
|
||||
defp set_confirmed(user, value) do
|
||||
{:ok, user} = User.need_confirmation(user, !value)
|
||||
|
||||
shell_info("Confirmation pending status of #{user.nickname}: #{user.confirmation_pending}")
|
||||
user
|
||||
end
|
||||
end
|
||||
|
|
|
@ -56,7 +56,6 @@ def start(_type, _args) do
|
|||
Pleroma.ApplicationRequirements.verify!()
|
||||
setup_instrumenters()
|
||||
load_custom_modules()
|
||||
check_system_commands()
|
||||
Pleroma.Docs.JSON.compile()
|
||||
|
||||
adapter = Application.get_env(:tesla, :adapter)
|
||||
|
@ -260,21 +259,4 @@ defp http_children(Tesla.Adapter.Gun, _) do
|
|||
end
|
||||
|
||||
defp http_children(_, _), do: []
|
||||
|
||||
defp check_system_commands do
|
||||
filters = Config.get([Pleroma.Upload, :filters])
|
||||
|
||||
check_filter = fn filter, command_required ->
|
||||
with true <- filter in filters,
|
||||
false <- Pleroma.Utils.command_available?(command_required) do
|
||||
Logger.error(
|
||||
"#{filter} is specified in list of Pleroma.Upload filters, but the #{command_required} command is not found"
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
check_filter.(Pleroma.Upload.Filters.Exiftool, "exiftool")
|
||||
check_filter.(Pleroma.Upload.Filters.Mogrify, "mogrify")
|
||||
check_filter.(Pleroma.Upload.Filters.Mogrifun, "mogrify")
|
||||
end
|
||||
end
|
||||
|
|
|
@ -9,6 +9,9 @@ defmodule Pleroma.ApplicationRequirements do
|
|||
|
||||
defmodule VerifyError, do: defexception([:message])
|
||||
|
||||
alias Pleroma.Config
|
||||
alias Pleroma.Helpers.MediaHelper
|
||||
|
||||
import Ecto.Query
|
||||
|
||||
require Logger
|
||||
|
@ -16,7 +19,8 @@ defmodule VerifyError, do: defexception([:message])
|
|||
@spec verify!() :: :ok | VerifyError.t()
|
||||
def verify! do
|
||||
:ok
|
||||
|> check_confirmation_accounts!
|
||||
|> check_system_commands!()
|
||||
|> check_confirmation_accounts!()
|
||||
|> check_migrations_applied!()
|
||||
|> check_welcome_message_config!()
|
||||
|> check_rum!()
|
||||
|
@ -48,7 +52,9 @@ def check_confirmation_accounts!(:ok) do
|
|||
if Pleroma.Config.get([:instance, :account_activation_required]) &&
|
||||
not Pleroma.Config.get([Pleroma.Emails.Mailer, :enabled]) do
|
||||
Logger.error(
|
||||
"Account activation enabled, but no Mailer settings enabled.\nPlease set config :pleroma, :instance, account_activation_required: false\nOtherwise setup and enable Mailer."
|
||||
"Account activation enabled, but no Mailer settings enabled.\n" <>
|
||||
"Please set config :pleroma, :instance, account_activation_required: false\n" <>
|
||||
"Otherwise setup and enable Mailer."
|
||||
)
|
||||
|
||||
{:error,
|
||||
|
@ -81,7 +87,9 @@ def check_migrations_applied!(:ok) do
|
|||
Enum.map(down_migrations, fn {:down, id, name} -> "- #{name} (#{id})\n" end)
|
||||
|
||||
Logger.error(
|
||||
"The following migrations were not applied:\n#{down_migrations_text}If you want to start Pleroma anyway, set\nconfig :pleroma, :i_am_aware_this_may_cause_data_loss, disable_migration_check: true"
|
||||
"The following migrations were not applied:\n#{down_migrations_text}" <>
|
||||
"If you want to start Pleroma anyway, set\n" <>
|
||||
"config :pleroma, :i_am_aware_this_may_cause_data_loss, disable_migration_check: true"
|
||||
)
|
||||
|
||||
{:error, "Unapplied Migrations detected"}
|
||||
|
@ -124,14 +132,22 @@ defp do_check_rum!(setting, migrate) do
|
|||
case {setting, migrate} do
|
||||
{true, false} ->
|
||||
Logger.error(
|
||||
"Use `RUM` index is enabled, but were not applied migrations for it.\nIf you want to start Pleroma anyway, set\nconfig :pleroma, :database, rum_enabled: false\nOtherwise apply the following migrations:\n`mix ecto.migrate --migrations-path priv/repo/optional_migrations/rum_indexing/`"
|
||||
"Use `RUM` index is enabled, but were not applied migrations for it.\n" <>
|
||||
"If you want to start Pleroma anyway, set\n" <>
|
||||
"config :pleroma, :database, rum_enabled: false\n" <>
|
||||
"Otherwise apply the following migrations:\n" <>
|
||||
"`mix ecto.migrate --migrations-path priv/repo/optional_migrations/rum_indexing/`"
|
||||
)
|
||||
|
||||
{:error, "Unapplied RUM Migrations detected"}
|
||||
|
||||
{false, true} ->
|
||||
Logger.error(
|
||||
"Detected applied migrations to use `RUM` index, but `RUM` isn't enable in settings.\nIf you want to use `RUM`, set\nconfig :pleroma, :database, rum_enabled: true\nOtherwise roll `RUM` migrations back.\n`mix ecto.rollback --migrations-path priv/repo/optional_migrations/rum_indexing/`"
|
||||
"Detected applied migrations to use `RUM` index, but `RUM` isn't enable in settings.\n" <>
|
||||
"If you want to use `RUM`, set\n" <>
|
||||
"config :pleroma, :database, rum_enabled: true\n" <>
|
||||
"Otherwise roll `RUM` migrations back.\n" <>
|
||||
"`mix ecto.rollback --migrations-path priv/repo/optional_migrations/rum_indexing/`"
|
||||
)
|
||||
|
||||
{:error, "RUM Migrations detected"}
|
||||
|
@ -140,4 +156,50 @@ defp do_check_rum!(setting, migrate) do
|
|||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
defp check_system_commands!(:ok) do
|
||||
filter_commands_statuses = [
|
||||
check_filter(Pleroma.Upload.Filters.Exiftool, "exiftool"),
|
||||
check_filter(Pleroma.Upload.Filters.Mogrify, "mogrify"),
|
||||
check_filter(Pleroma.Upload.Filters.Mogrifun, "mogrify")
|
||||
]
|
||||
|
||||
preview_proxy_commands_status =
|
||||
if !Config.get([:media_preview_proxy, :enabled]) or
|
||||
MediaHelper.missing_dependencies() == [] do
|
||||
true
|
||||
else
|
||||
Logger.error(
|
||||
"The following dependencies required by Media preview proxy " <>
|
||||
"(which is currently enabled) are not installed: " <>
|
||||
inspect(MediaHelper.missing_dependencies())
|
||||
)
|
||||
|
||||
false
|
||||
end
|
||||
|
||||
if Enum.all?([preview_proxy_commands_status | filter_commands_statuses], & &1) do
|
||||
:ok
|
||||
else
|
||||
{:error,
|
||||
"System commands missing. Check logs and see `docs/installation` for more details."}
|
||||
end
|
||||
end
|
||||
|
||||
defp check_system_commands!(result), do: result
|
||||
|
||||
defp check_filter(filter, command_required) do
|
||||
filters = Config.get([Pleroma.Upload, :filters])
|
||||
|
||||
if filter in filters and not Pleroma.Utils.command_available?(command_required) do
|
||||
Logger.error(
|
||||
"#{filter} is specified in list of Pleroma.Upload filters, but the " <>
|
||||
"#{command_required} command is not found"
|
||||
)
|
||||
|
||||
false
|
||||
else
|
||||
true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -33,34 +33,8 @@ def check_hellthread_threshold do
|
|||
end
|
||||
end
|
||||
|
||||
def mrf_user_allowlist do
|
||||
config = Config.get(:mrf_user_allowlist)
|
||||
|
||||
if config && Enum.any?(config, fn {k, _} -> is_atom(k) end) do
|
||||
rewritten =
|
||||
Enum.reduce(Config.get(:mrf_user_allowlist), Map.new(), fn {k, v}, acc ->
|
||||
Map.put(acc, to_string(k), v)
|
||||
end)
|
||||
|
||||
Config.put(:mrf_user_allowlist, rewritten)
|
||||
|
||||
Logger.error("""
|
||||
!!!DEPRECATION WARNING!!!
|
||||
As of Pleroma 2.0.7, the `mrf_user_allowlist` setting changed of format.
|
||||
Pleroma 2.1 will remove support for the old format. Please change your configuration to match this:
|
||||
|
||||
config :pleroma, :mrf_user_allowlist, #{inspect(rewritten, pretty: true)}
|
||||
""")
|
||||
|
||||
:error
|
||||
else
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
def warn do
|
||||
with :ok <- check_hellthread_threshold(),
|
||||
:ok <- mrf_user_allowlist(),
|
||||
:ok <- check_old_mrf_config(),
|
||||
:ok <- check_media_proxy_whitelist_config(),
|
||||
:ok <- check_welcome_message_config(),
|
||||
|
@ -83,9 +57,9 @@ def check_welcome_message_config do
|
|||
if use_old_config do
|
||||
Logger.error("""
|
||||
!!!DEPRECATION WARNING!!!
|
||||
Your config is using the old namespace for Welcome messages configuration. You need to change to the new namespace:
|
||||
\n* `config :pleroma, :instance, welcome_user_nickname` is now `config :pleroma, :welcome, :direct_message, :sender_nickname`
|
||||
\n* `config :pleroma, :instance, welcome_message` is now `config :pleroma, :welcome, :direct_message, :message`
|
||||
Your config is using the old namespace for Welcome messages configuration. You need to convert to the new namespace. e.g.,
|
||||
\n* `config :pleroma, :instance, welcome_user_nickname` and `config :pleroma, :instance, welcome_message` are now equal to:
|
||||
\n* `config :pleroma, :welcome, direct_message: [enabled: true, sender_nickname: "NICKNAME", message: "Your welcome message"]`"
|
||||
""")
|
||||
|
||||
:error
|
||||
|
@ -148,7 +122,7 @@ def check_gun_pool_options do
|
|||
if timeout = pool_config[:await_up_timeout] do
|
||||
Logger.warn("""
|
||||
!!!DEPRECATION WARNING!!!
|
||||
Your config is using old setting name `await_up_timeout` instead of `connect_timeout`. Setting should work for now, but you are advised to change format to scheme with port to prevent possible issues later.
|
||||
Your config is using old setting `config :pleroma, :connections_pool, await_up_timeout`. Please change to `config :pleroma, :connections_pool, connect_timeout` to ensure compatibility with future releases.
|
||||
""")
|
||||
|
||||
Config.put(:connections_pool, Keyword.put_new(pool_config, :connect_timeout, timeout))
|
||||
|
|
|
@ -9,6 +9,18 @@ defmodule Pleroma.Helpers.MediaHelper do
|
|||
|
||||
alias Pleroma.HTTP
|
||||
|
||||
require Logger
|
||||
|
||||
def missing_dependencies do
|
||||
Enum.reduce([imagemagick: "convert", ffmpeg: "ffmpeg"], [], fn {sym, executable}, acc ->
|
||||
if Pleroma.Utils.command_available?(executable) do
|
||||
acc
|
||||
else
|
||||
[sym | acc]
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
def image_resize(url, options) do
|
||||
with executable when is_binary(executable) <- System.find_executable("convert"),
|
||||
{:ok, args} <- prepare_image_resize_args(options),
|
||||
|
|
12
lib/pleroma/http/web_push.ex
Normal file
12
lib/pleroma/http/web_push.ex
Normal file
|
@ -0,0 +1,12 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.HTTP.WebPush do
|
||||
@moduledoc false
|
||||
|
||||
def post(url, payload, headers) do
|
||||
list_headers = Map.to_list(headers)
|
||||
Pleroma.HTTP.post(url, payload, list_headers)
|
||||
end
|
||||
end
|
|
@ -813,7 +813,8 @@ def send_welcome_email(%User{email: email} = user) when is_binary(email) do
|
|||
def send_welcome_email(_), do: {:ok, :noop}
|
||||
|
||||
@spec try_send_confirmation_email(User.t()) :: {:ok, :enqueued | :noop}
|
||||
def try_send_confirmation_email(%User{confirmation_pending: true} = user) do
|
||||
def try_send_confirmation_email(%User{confirmation_pending: true, email: email} = user)
|
||||
when is_binary(email) do
|
||||
if Config.get([:instance, :account_activation_required]) do
|
||||
send_confirmation_email(user)
|
||||
{:ok, :enqueued}
|
||||
|
@ -914,9 +915,7 @@ defp do_unfollow(%User{} = follower, %User{} = followed) do
|
|||
FollowingRelationship.unfollow(follower, followed)
|
||||
{:ok, followed} = update_follower_count(followed)
|
||||
|
||||
{:ok, follower} =
|
||||
follower
|
||||
|> update_following_count()
|
||||
{:ok, follower} = update_following_count(follower)
|
||||
|
||||
{:ok, follower, followed}
|
||||
|
||||
|
@ -2071,6 +2070,13 @@ def toggle_confirmation(users) do
|
|||
Enum.map(users, &toggle_confirmation/1)
|
||||
end
|
||||
|
||||
@spec need_confirmation(User.t(), boolean()) :: {:ok, User.t()} | {:error, Changeset.t()}
|
||||
def need_confirmation(%User{} = user, bool) do
|
||||
user
|
||||
|> confirmation_changeset(need_confirmation: bool)
|
||||
|> update_and_set_cache()
|
||||
end
|
||||
|
||||
def get_mascot(%{mascot: %{} = mascot}) when not is_nil(mascot) do
|
||||
mascot
|
||||
end
|
||||
|
@ -2285,7 +2291,9 @@ def remove_pinnned_activity(user, %Pleroma.Activity{id: id, data: data}) do
|
|||
|
||||
# if pinned activity was scheduled for deletion, we reschedule it for deletion
|
||||
if data["expires_at"] do
|
||||
{:ok, expires_at, _} = DateTime.from_iso8601(data["expires_at"])
|
||||
# MRF.ActivityExpirationPolicy used UTC timestamps for expires_at in original implementation
|
||||
{:ok, expires_at} =
|
||||
data["expires_at"] |> Pleroma.EctoType.ActivityPub.ObjectValidators.DateTime.cast()
|
||||
|
||||
Pleroma.Workers.PurgeExpiredActivity.enqueue(%{
|
||||
activity_id: id,
|
||||
|
|
|
@ -110,12 +110,12 @@ defp compose_query({:tags, tags}, query) when is_list(tags) and length(tags) > 0
|
|||
where(query, [u], fragment("? && ?", u.tags, ^tags))
|
||||
end
|
||||
|
||||
defp compose_query({:is_admin, _}, query) do
|
||||
where(query, [u], u.is_admin)
|
||||
defp compose_query({:is_admin, bool}, query) do
|
||||
where(query, [u], u.is_admin == ^bool)
|
||||
end
|
||||
|
||||
defp compose_query({:is_moderator, _}, query) do
|
||||
where(query, [u], u.is_moderator)
|
||||
defp compose_query({:is_moderator, bool}, query) do
|
||||
where(query, [u], u.is_moderator == ^bool)
|
||||
end
|
||||
|
||||
defp compose_query({:super_users, _}, query) do
|
||||
|
@ -148,6 +148,10 @@ defp compose_query({:deactivated, true}, query) do
|
|||
where(query, [u], u.deactivated == ^true)
|
||||
end
|
||||
|
||||
defp compose_query({:confirmation_pending, bool}, query) do
|
||||
where(query, [u], u.confirmation_pending == ^bool)
|
||||
end
|
||||
|
||||
defp compose_query({:need_approval, _}, query) do
|
||||
where(query, [u], u.approval_pending)
|
||||
end
|
||||
|
|
|
@ -3,8 +3,10 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.User.Search do
|
||||
alias Pleroma.EctoType.ActivityPub.ObjectValidators.Uri, as: UriType
|
||||
alias Pleroma.Pagination
|
||||
alias Pleroma.User
|
||||
|
||||
import Ecto.Query
|
||||
|
||||
@limit 20
|
||||
|
@ -19,16 +21,47 @@ def search(query_string, opts \\ []) do
|
|||
|
||||
query_string = format_query(query_string)
|
||||
|
||||
maybe_resolve(resolve, for_user, query_string)
|
||||
# If this returns anything, it should bounce to the top
|
||||
maybe_resolved = maybe_resolve(resolve, for_user, query_string)
|
||||
|
||||
top_user_ids =
|
||||
[]
|
||||
|> maybe_add_resolved(maybe_resolved)
|
||||
|> maybe_add_ap_id_match(query_string)
|
||||
|> maybe_add_uri_match(query_string)
|
||||
|
||||
results =
|
||||
query_string
|
||||
|> search_query(for_user, following)
|
||||
|> search_query(for_user, following, top_user_ids)
|
||||
|> Pagination.fetch_paginated(%{"offset" => offset, "limit" => result_limit}, :offset)
|
||||
|
||||
results
|
||||
end
|
||||
|
||||
defp maybe_add_resolved(list, {:ok, %User{} = user}) do
|
||||
[user.id | list]
|
||||
end
|
||||
|
||||
defp maybe_add_resolved(list, _), do: list
|
||||
|
||||
defp maybe_add_ap_id_match(list, query) do
|
||||
if user = User.get_cached_by_ap_id(query) do
|
||||
[user.id | list]
|
||||
else
|
||||
list
|
||||
end
|
||||
end
|
||||
|
||||
defp maybe_add_uri_match(list, query) do
|
||||
with {:ok, query} <- UriType.cast(query),
|
||||
q = from(u in User, where: u.uri == ^query, select: u.id),
|
||||
users = Pleroma.Repo.all(q) do
|
||||
users ++ list
|
||||
else
|
||||
_ -> list
|
||||
end
|
||||
end
|
||||
|
||||
defp format_query(query_string) do
|
||||
# Strip the beginning @ off if there is a query
|
||||
query_string = String.trim_leading(query_string, "@")
|
||||
|
@ -47,7 +80,7 @@ defp format_query(query_string) do
|
|||
end
|
||||
end
|
||||
|
||||
defp search_query(query_string, for_user, following) do
|
||||
defp search_query(query_string, for_user, following, top_user_ids) do
|
||||
for_user
|
||||
|> base_query(following)
|
||||
|> filter_blocked_user(for_user)
|
||||
|
@ -56,13 +89,20 @@ defp search_query(query_string, for_user, following) do
|
|||
|> filter_internal_users()
|
||||
|> filter_blocked_domains(for_user)
|
||||
|> fts_search(query_string)
|
||||
|> select_top_users(top_user_ids)
|
||||
|> trigram_rank(query_string)
|
||||
|> boost_search_rank(for_user)
|
||||
|> boost_search_rank(for_user, top_user_ids)
|
||||
|> subquery()
|
||||
|> order_by(desc: :search_rank)
|
||||
|> maybe_restrict_local(for_user)
|
||||
end
|
||||
|
||||
defp select_top_users(query, top_user_ids) do
|
||||
from(u in query,
|
||||
or_where: u.id in ^top_user_ids
|
||||
)
|
||||
end
|
||||
|
||||
defp fts_search(query, query_string) do
|
||||
query_string = to_tsquery(query_string)
|
||||
|
||||
|
@ -180,7 +220,7 @@ defp restrict_local(q), do: where(q, [u], u.local == true)
|
|||
|
||||
defp local_domain, do: Pleroma.Config.get([Pleroma.Web.Endpoint, :url, :host])
|
||||
|
||||
defp boost_search_rank(query, %User{} = for_user) do
|
||||
defp boost_search_rank(query, %User{} = for_user, top_user_ids) do
|
||||
friends_ids = User.get_friends_ids(for_user)
|
||||
followers_ids = User.get_followers_ids(for_user)
|
||||
|
||||
|
@ -192,6 +232,7 @@ defp boost_search_rank(query, %User{} = for_user) do
|
|||
CASE WHEN (?) THEN (?) * 1.5
|
||||
WHEN (?) THEN (?) * 1.3
|
||||
WHEN (?) THEN (?) * 1.1
|
||||
WHEN (?) THEN 9001
|
||||
ELSE (?) END
|
||||
""",
|
||||
u.id in ^friends_ids and u.id in ^followers_ids,
|
||||
|
@ -200,11 +241,26 @@ defp boost_search_rank(query, %User{} = for_user) do
|
|||
u.search_rank,
|
||||
u.id in ^followers_ids,
|
||||
u.search_rank,
|
||||
u.id in ^top_user_ids,
|
||||
u.search_rank
|
||||
)
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
defp boost_search_rank(query, _for_user), do: query
|
||||
defp boost_search_rank(query, _for_user, top_user_ids) do
|
||||
from(u in subquery(query),
|
||||
select_merge: %{
|
||||
search_rank:
|
||||
fragment(
|
||||
"""
|
||||
CASE WHEN (?) THEN 9001
|
||||
ELSE (?) END
|
||||
""",
|
||||
u.id in ^top_user_ids,
|
||||
u.search_rank
|
||||
)
|
||||
}
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -30,12 +30,16 @@ def follow(target_instance) do
|
|||
end
|
||||
end
|
||||
|
||||
@spec unfollow(String.t()) :: {:ok, Activity.t()} | {:error, any()}
|
||||
def unfollow(target_instance) do
|
||||
@spec unfollow(String.t(), map()) :: {:ok, Activity.t()} | {:error, any()}
|
||||
def unfollow(target_instance, opts \\ %{}) do
|
||||
with %User{} = local_user <- get_actor(),
|
||||
{:ok, %User{} = target_user} <- User.get_or_fetch_by_ap_id(target_instance),
|
||||
{:ok, target_user} <- fetch_target_user(target_instance, opts),
|
||||
{:ok, activity} <- ActivityPub.unfollow(local_user, target_user) do
|
||||
User.unfollow(local_user, target_user)
|
||||
case target_user.id do
|
||||
nil -> User.update_following_count(local_user)
|
||||
_ -> User.unfollow(local_user, target_user)
|
||||
end
|
||||
|
||||
Logger.info("relay: unfollowed instance: #{target_instance}: id=#{activity.data["id"]}")
|
||||
{:ok, activity}
|
||||
else
|
||||
|
@ -43,6 +47,14 @@ def unfollow(target_instance) do
|
|||
end
|
||||
end
|
||||
|
||||
defp fetch_target_user(ap_id, opts) do
|
||||
case {opts[:force], User.get_or_fetch_by_ap_id(ap_id)} do
|
||||
{_, {:ok, %User{} = user}} -> {:ok, user}
|
||||
{true, _} -> {:ok, %User{ap_id: ap_id}}
|
||||
{_, error} -> error
|
||||
end
|
||||
end
|
||||
|
||||
@spec publish(any()) :: {:ok, Activity.t()} | {:error, any()}
|
||||
def publish(%Activity{data: %{"type" => "Create"}} = activity) do
|
||||
with %User{} = user <- get_actor(),
|
||||
|
|
|
@ -515,15 +515,19 @@ def handle_incoming(
|
|||
end
|
||||
|
||||
def handle_incoming(
|
||||
%{"type" => "Create", "object" => %{"type" => objtype}} = data,
|
||||
%{"type" => "Create", "object" => %{"type" => objtype, "id" => obj_id}} = data,
|
||||
_options
|
||||
)
|
||||
when objtype in ~w{Question Answer ChatMessage Audio Video Event Article} do
|
||||
data = Map.put(data, "object", strip_internal_fields(data["object"]))
|
||||
|
||||
with {:ok, %User{}} <- ObjectValidator.fetch_actor(data),
|
||||
nil <- Activity.get_create_by_object_ap_id(obj_id),
|
||||
{:ok, activity, _} <- Pipeline.common_pipeline(data, local: false) do
|
||||
{:ok, activity}
|
||||
else
|
||||
%Activity{} = activity -> {:ok, activity}
|
||||
e -> e
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -33,11 +33,7 @@ def index(conn, _params) do
|
|||
|
||||
def follow(%{assigns: %{user: admin}, body_params: %{relay_url: target}} = conn, _) do
|
||||
with {:ok, _message} <- Relay.follow(target) do
|
||||
ModerationLog.insert_log(%{
|
||||
action: "relay_follow",
|
||||
actor: admin,
|
||||
target: target
|
||||
})
|
||||
ModerationLog.insert_log(%{action: "relay_follow", actor: admin, target: target})
|
||||
|
||||
json(conn, %{actor: target, followed_back: target in Relay.following()})
|
||||
else
|
||||
|
@ -48,13 +44,9 @@ def follow(%{assigns: %{user: admin}, body_params: %{relay_url: target}} = conn,
|
|||
end
|
||||
end
|
||||
|
||||
def unfollow(%{assigns: %{user: admin}, body_params: %{relay_url: target}} = conn, _) do
|
||||
with {:ok, _message} <- Relay.unfollow(target) do
|
||||
ModerationLog.insert_log(%{
|
||||
action: "relay_unfollow",
|
||||
actor: admin,
|
||||
target: target
|
||||
})
|
||||
def unfollow(%{assigns: %{user: admin}, body_params: %{relay_url: target} = params} = conn, _) do
|
||||
with {:ok, _message} <- Relay.unfollow(target, %{force: params[:force]}) do
|
||||
ModerationLog.insert_log(%{action: "relay_unfollow", actor: admin, target: target})
|
||||
|
||||
json(conn, target)
|
||||
else
|
||||
|
|
|
@ -115,6 +115,10 @@ defp cast_and_validate(spec, operation, conn, content_type, false = _strict) do
|
|||
%{reason: :unexpected_field, name: name, path: [name]}, params ->
|
||||
Map.delete(params, name)
|
||||
|
||||
# Filter out empty params
|
||||
%{reason: :invalid_type, path: [name_atom], value: ""}, params ->
|
||||
Map.delete(params, to_string(name_atom))
|
||||
|
||||
%{reason: :invalid_enum, name: nil, path: path, value: value}, params ->
|
||||
path = path |> Enum.reverse() |> tl() |> Enum.reverse() |> list_items_to_string()
|
||||
update_in(params, path, &List.delete(&1, value))
|
||||
|
|
|
@ -56,7 +56,7 @@ def unfollow_operation do
|
|||
operationId: "AdminAPI.RelayController.unfollow",
|
||||
security: [%{"oAuth" => ["write:follows"]}],
|
||||
parameters: admin_api_params(),
|
||||
requestBody: request_body("Parameters", relay_url()),
|
||||
requestBody: request_body("Parameters", relay_unfollow()),
|
||||
responses: %{
|
||||
200 =>
|
||||
Operation.response("Status", "application/json", %Schema{
|
||||
|
@ -91,4 +91,14 @@ defp relay_url do
|
|||
}
|
||||
}
|
||||
end
|
||||
|
||||
defp relay_unfollow do
|
||||
%Schema{
|
||||
type: :object,
|
||||
properties: %{
|
||||
relay_url: %Schema{type: :string, format: :uri},
|
||||
force: %Schema{type: :boolean, default: false}
|
||||
}
|
||||
}
|
||||
end
|
||||
end
|
||||
|
|
|
@ -19,7 +19,7 @@ defmodule Pleroma.Web.Push.Impl do
|
|||
@types ["Create", "Follow", "Announce", "Like", "Move"]
|
||||
|
||||
@doc "Performs sending notifications for user subscriptions"
|
||||
@spec perform(Notification.t()) :: list(any) | :error
|
||||
@spec perform(Notification.t()) :: list(any) | :error | {:error, :unknown_type}
|
||||
def perform(
|
||||
%{
|
||||
activity: %{data: %{"type" => activity_type}} = activity,
|
||||
|
@ -64,20 +64,20 @@ def perform(_) do
|
|||
@doc "Push message to web"
|
||||
def push_message(body, sub, api_key, subscription) do
|
||||
case WebPushEncryption.send_web_push(body, sub, api_key) do
|
||||
{:ok, %{status_code: code}} when 400 <= code and code < 500 ->
|
||||
{:ok, %{status: code}} when code in 400..499 ->
|
||||
Logger.debug("Removing subscription record")
|
||||
Repo.delete!(subscription)
|
||||
:ok
|
||||
|
||||
{:ok, %{status_code: code}} when 200 <= code and code < 300 ->
|
||||
{:ok, %{status: code}} when code in 200..299 ->
|
||||
:ok
|
||||
|
||||
{:ok, %{status_code: code}} ->
|
||||
{:ok, %{status: code}} ->
|
||||
Logger.error("Web Push Notification failed with code: #{code}")
|
||||
:error
|
||||
|
||||
_ ->
|
||||
Logger.error("Web Push Notification failed with unknown error")
|
||||
error ->
|
||||
Logger.error("Web Push Notification failed with #{inspect(error)}")
|
||||
:error
|
||||
end
|
||||
end
|
||||
|
|
2
mix.exs
2
mix.exs
|
@ -122,7 +122,7 @@ defp deps do
|
|||
{:ecto_enum, "~> 1.4"},
|
||||
{:ecto_sql, "~> 3.4.4"},
|
||||
{:postgrex, ">= 0.15.5"},
|
||||
{:oban, "~> 2.0.0"},
|
||||
{:oban, "~> 2.1.0"},
|
||||
{:gettext, "~> 0.18"},
|
||||
{:pbkdf2_elixir, "~> 1.2"},
|
||||
{:bcrypt_elixir, "~> 2.2"},
|
||||
|
|
12
mix.lock
12
mix.lock
|
@ -24,11 +24,11 @@
|
|||
"crypt": {:git, "https://github.com/msantos/crypt.git", "f63a705f92c26955977ee62a313012e309a4d77a", [ref: "f63a705f92c26955977ee62a313012e309a4d77a"]},
|
||||
"custom_base": {:hex, :custom_base, "0.2.1", "4a832a42ea0552299d81652aa0b1f775d462175293e99dfbe4d7dbaab785a706", [:mix], [], "hexpm", "8df019facc5ec9603e94f7270f1ac73ddf339f56ade76a721eaa57c1493ba463"},
|
||||
"db_connection": {:hex, :db_connection, "2.2.2", "3bbca41b199e1598245b716248964926303b5d4609ff065125ce98bcd368939e", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, repo: "hexpm", optional: false]}], "hexpm", "642af240d8a8affb93b4ba5a6fcd2bbcbdc327e1a524b825d383711536f8070c"},
|
||||
"decimal": {:hex, :decimal, "1.8.1", "a4ef3f5f3428bdbc0d35374029ffcf4ede8533536fa79896dd450168d9acdf3c", [:mix], [], "hexpm", "3cb154b00225ac687f6cbd4acc4b7960027c757a5152b369923ead9ddbca7aec"},
|
||||
"decimal": {:hex, :decimal, "2.0.0", "a78296e617b0f5dd4c6caf57c714431347912ffb1d0842e998e9792b5642d697", [:mix], [], "hexpm", "34666e9c55dea81013e77d9d87370fe6cb6291d1ef32f46a1600230b1d44f577"},
|
||||
"deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"},
|
||||
"earmark": {:hex, :earmark, "1.4.3", "364ca2e9710f6bff494117dbbd53880d84bebb692dafc3a78eb50aa3183f2bfd", [:mix], [], "hexpm", "8cf8a291ebf1c7b9539e3cddb19e9cef066c2441b1640f13c34c1d3cfc825fec"},
|
||||
"earmark_parser": {:hex, :earmark_parser, "1.4.10", "6603d7a603b9c18d3d20db69921527f82ef09990885ed7525003c7fe7dc86c56", [:mix], [], "hexpm", "8e2d5370b732385db2c9b22215c3f59c84ac7dda7ed7e544d7c459496ae519c0"},
|
||||
"ecto": {:hex, :ecto, "3.4.5", "2bcd262f57b2c888b0bd7f7a28c8a48aa11dc1a2c6a858e45dd8f8426d504265", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "8c6d1d4d524559e9b7a062f0498e2c206122552d63eacff0a6567ffe7a8e8691"},
|
||||
"ecto": {:hex, :ecto, "3.4.6", "08f7afad3257d6eb8613309af31037e16c36808dfda5a3cd0cb4e9738db030e4", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "6f13a9e2a62e75c2dcfc7207bfc65645ab387af8360db4c89fee8b5a4bf3f70b"},
|
||||
"ecto_enum": {:hex, :ecto_enum, "1.4.0", "d14b00e04b974afc69c251632d1e49594d899067ee2b376277efd8233027aec8", [:mix], [{:ecto, ">= 3.0.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "> 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:mariaex, ">= 0.0.0", [hex: :mariaex, repo: "hexpm", optional: true]}, {:postgrex, ">= 0.0.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "8fb55c087181c2b15eee406519dc22578fa60dd82c088be376d0010172764ee4"},
|
||||
"ecto_sql": {:hex, :ecto_sql, "3.4.5", "30161f81b167d561a9a2df4329c10ae05ff36eca7ccc84628f2c8b9fa1e43323", [:mix], [{:db_connection, "~> 2.2", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.4.3", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.3.0 or ~> 0.4.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.15.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.0", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "31990c6a3579b36a3c0841d34a94c275e727de8b84f58509da5f1b2032c98ac2"},
|
||||
"eimp": {:hex, :eimp, "1.0.14", "fc297f0c7e2700457a95a60c7010a5f1dcb768a083b6d53f49cd94ab95a28f22", [:rebar3], [{:p1_utils, "1.0.18", [hex: :p1_utils, repo: "hexpm", optional: false]}], "hexpm", "501133f3112079b92d9e22da8b88bf4f0e13d4d67ae9c15c42c30bd25ceb83b6"},
|
||||
|
@ -59,7 +59,7 @@
|
|||
"httpoison": {:hex, :httpoison, "1.6.2", "ace7c8d3a361cebccbed19c283c349b3d26991eff73a1eaaa8abae2e3c8089b6", [:mix], [{:hackney, "~> 1.15 and >= 1.15.2", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "aa2c74bd271af34239a3948779612f87df2422c2fdcfdbcec28d9c105f0773fe"},
|
||||
"idna": {:hex, :idna, "6.0.0", "689c46cbcdf3524c44d5f3dde8001f364cd7608a99556d8fbd8239a5798d4c10", [:rebar3], [{:unicode_util_compat, "0.4.1", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "4bdd305eb64e18b0273864920695cb18d7a2021f31a11b9c5fbcd9a253f936e2"},
|
||||
"inet_cidr": {:hex, :inet_cidr, "1.0.4", "a05744ab7c221ca8e395c926c3919a821eb512e8f36547c062f62c4ca0cf3d6e", [:mix], [], "hexpm", "64a2d30189704ae41ca7dbdd587f5291db5d1dda1414e0774c29ffc81088c1bc"},
|
||||
"jason": {:hex, :jason, "1.2.1", "12b22825e22f468c02eb3e4b9985f3d0cb8dc40b9bd704730efa11abd2708c44", [:mix], [{:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "b659b8571deedf60f79c5a608e15414085fa141344e2716fbd6988a084b5f993"},
|
||||
"jason": {:hex, :jason, "1.2.2", "ba43e3f2709fd1aa1dce90aaabfd039d000469c05c56f0b8e31978e03fa39052", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "18a228f5f0058ee183f29f9eae0805c6e59d61c3b006760668d8d18ff0d12179"},
|
||||
"joken": {:hex, :joken, "2.2.0", "2daa1b12be05184aff7b5ace1d43ca1f81345962285fff3f88db74927c954d3a", [:mix], [{:jose, "~> 1.9", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm", "b4f92e30388206f869dd25d1af628a1d99d7586e5cf0672f64d4df84c4d2f5e9"},
|
||||
"jose": {:hex, :jose, "1.10.1", "16d8e460dae7203c6d1efa3f277e25b5af8b659febfc2f2eb4bacf87f128b80a", [:mix, :rebar3], [], "hexpm", "3c7ddc8a9394b92891db7c2771da94bf819834a1a4c92e30857b7d582e2f8257"},
|
||||
"jumper": {:hex, :jumper, "1.0.1", "3c00542ef1a83532b72269fab9f0f0c82bf23a35e27d278bfd9ed0865cecabff", [:mix], [], "hexpm", "318c59078ac220e966d27af3646026db9b5a5e6703cb2aa3e26bcfaba65b7433"},
|
||||
|
@ -79,7 +79,7 @@
|
|||
"nimble_parsec": {:hex, :nimble_parsec, "0.6.0", "32111b3bf39137144abd7ba1cce0914533b2d16ef35e8abc5ec8be6122944263", [:mix], [], "hexpm", "27eac315a94909d4dc68bc07a4a83e06c8379237c5ea528a9acff4ca1c873c52"},
|
||||
"nimble_pool": {:hex, :nimble_pool, "0.1.0", "ffa9d5be27eee2b00b0c634eb649aa27f97b39186fec3c493716c2a33e784ec6", [:mix], [], "hexpm", "343a1eaa620ddcf3430a83f39f2af499fe2370390d4f785cd475b4df5acaf3f9"},
|
||||
"nodex": {:git, "https://git.pleroma.social/pleroma/nodex", "cb6730f943cfc6aad674c92161be23a8411f15d1", [ref: "cb6730f943cfc6aad674c92161be23a8411f15d1"]},
|
||||
"oban": {:hex, :oban, "2.0.0", "e6ce70d94dd46815ec0882a1ffb7356df9a9d5b8a40a64ce5c2536617a447379", [:mix], [{:ecto_sql, ">= 3.4.3", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.14", [hex: :postgrex, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "cf574813bd048b98a698aa587c21367d2e06842d4e1b1993dcd6a696e9e633bd"},
|
||||
"oban": {:hex, :oban, "2.1.0", "034144686f7e76a102b5d67731f098d98a9e4a52b07c25ad580a01f83a7f1cf5", [:mix], [{:ecto_sql, ">= 3.4.3", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.14", [hex: :postgrex, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c6f067fa3b308ed9e0e6beb2b34277c9c4e48bf95338edabd8f4a757a26e04c2"},
|
||||
"open_api_spex": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/open_api_spex.git", "f296ac0924ba3cf79c7a588c4c252889df4c2edd", [ref: "f296ac0924ba3cf79c7a588c4c252889df4c2edd"]},
|
||||
"p1_utils": {:hex, :p1_utils, "1.0.18", "3fe224de5b2e190d730a3c5da9d6e8540c96484cf4b4692921d1e28f0c32b01c", [:rebar3], [], "hexpm", "1fc8773a71a15553b179c986b22fbeead19b28fe486c332d4929700ffeb71f88"},
|
||||
"parse_trans": {:hex, :parse_trans, "3.3.0", "09765507a3c7590a784615cfd421d101aec25098d50b89d7aa1d66646bc571c1", [:rebar3], [], "hexpm", "17ef63abde837ad30680ea7f857dd9e7ced9476cdd7b0394432af4bfc241b960"},
|
||||
|
@ -95,7 +95,7 @@
|
|||
"plug_static_index_html": {:hex, :plug_static_index_html, "1.0.0", "840123d4d3975585133485ea86af73cb2600afd7f2a976f9f5fd8b3808e636a0", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "79fd4fcf34d110605c26560cbae8f23c603ec4158c08298bd4360fdea90bb5cf"},
|
||||
"poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm", "fec8660eb7733ee4117b85f55799fd3833eb769a6df71ccf8903e8dc5447cfce"},
|
||||
"poolboy": {:hex, :poolboy, "1.5.2", "392b007a1693a64540cead79830443abf5762f5d30cf50bc95cb2c1aaafa006b", [:rebar3], [], "hexpm", "dad79704ce5440f3d5a3681c8590b9dc25d1a561e8f5a9c995281012860901e3"},
|
||||
"postgrex": {:hex, :postgrex, "0.15.5", "aec40306a622d459b01bff890fa42f1430dac61593b122754144ad9033a2152f", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "ed90c81e1525f65a2ba2279dbcebf030d6d13328daa2f8088b9661eb9143af7f"},
|
||||
"postgrex": {:hex, :postgrex, "0.15.6", "a464c72010a56e3214fe2b99c1a76faab4c2bb0255cabdef30dea763a3569aa2", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "f99268325ac8f66ffd6c4964faab9e70fbf721234ab2ad238c00f9530b8cdd55"},
|
||||
"pot": {:hex, :pot, "0.11.0", "61bad869a94534739dd4614a25a619bc5c47b9970e9a0ea5bef4628036fc7a16", [:rebar3], [], "hexpm", "57ee6ee6bdeb639661ffafb9acefe3c8f966e45394de6a766813bb9e1be4e54b"},
|
||||
"prometheus": {:hex, :prometheus, "4.6.0", "20510f381db1ccab818b4cf2fac5fa6ab5cc91bc364a154399901c001465f46f", [:mix, :rebar3], [], "hexpm", "4905fd2992f8038eccd7aa0cd22f40637ed618c0bed1f75c05aacec15b7545de"},
|
||||
"prometheus_ecto": {:hex, :prometheus_ecto, "1.4.3", "3dd4da1812b8e0dbee81ea58bb3b62ed7588f2eae0c9e97e434c46807ff82311", [:mix], [{:ecto, "~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm", "8d66289f77f913b37eda81fd287340c17e61a447549deb28efc254532b2bed82"},
|
||||
|
@ -120,5 +120,5 @@
|
|||
"unicode_util_compat": {:hex, :unicode_util_compat, "0.4.1", "d869e4c68901dd9531385bb0c8c40444ebf624e60b6962d95952775cac5e90cd", [:rebar3], [], "hexpm", "1d1848c40487cdb0b30e8ed975e34e025860c02e419cb615d255849f3427439d"},
|
||||
"unsafe": {:hex, :unsafe, "1.0.1", "a27e1874f72ee49312e0a9ec2e0b27924214a05e3ddac90e91727bc76f8613d8", [:mix], [], "hexpm", "6c7729a2d214806450d29766abc2afaa7a2cbecf415be64f36a6691afebb50e5"},
|
||||
"web_push_encryption": {:hex, :web_push_encryption, "0.3.0", "598b5135e696fd1404dc8d0d7c0fa2c027244a4e5d5e5a98ba267f14fdeaabc8", [:mix], [{:httpoison, "~> 1.0", [hex: :httpoison, repo: "hexpm", optional: false]}, {:jose, "~> 1.8", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm", "f10bdd1afe527ede694749fb77a2f22f146a51b054c7fa541c9fd920fba7c875"},
|
||||
"websocket_client": {:git, "https://github.com/jeremyong/websocket_client.git", "9a6f65d05ebf2725d62fb19262b21f1805a59fbf", []}
|
||||
"websocket_client": {:git, "https://github.com/jeremyong/websocket_client.git", "9a6f65d05ebf2725d62fb19262b21f1805a59fbf", []},
|
||||
}
|
||||
|
|
9
priv/repo/migrations/20200925065249_make_user_ids_ci.exs
Normal file
9
priv/repo/migrations/20200925065249_make_user_ids_ci.exs
Normal file
|
@ -0,0 +1,9 @@
|
|||
defmodule Pleroma.Repo.Migrations.MakeUserIdsCI do
|
||||
use Ecto.Migration
|
||||
|
||||
def change do
|
||||
# Migration retired, see
|
||||
# https://git.pleroma.social/pleroma/pleroma/-/issues/2188
|
||||
:noop
|
||||
end
|
||||
end
|
11
priv/repo/migrations/20200928145912_revert_citext_change.exs
Normal file
11
priv/repo/migrations/20200928145912_revert_citext_change.exs
Normal file
|
@ -0,0 +1,11 @@
|
|||
defmodule Pleroma.Repo.Migrations.RevertCitextChange do
|
||||
use Ecto.Migration
|
||||
|
||||
def change do
|
||||
alter table(:users) do
|
||||
modify(:uri, :text)
|
||||
end
|
||||
|
||||
# create_if_not_exists(unique_index(:users, :uri))
|
||||
end
|
||||
end
|
|
@ -0,0 +1,8 @@
|
|||
defmodule Pleroma.Repo.Migrations.UserURIsIndexPartThree do
|
||||
use Ecto.Migration
|
||||
|
||||
def change do
|
||||
drop_if_exists(unique_index(:users, :uri))
|
||||
create_if_not_exists(index(:users, :uri))
|
||||
end
|
||||
end
|
|
@ -1,5 +1,5 @@
|
|||
defmodule Pleroma.Config.DeprecationWarningsTest do
|
||||
use ExUnit.Case, async: true
|
||||
use ExUnit.Case
|
||||
use Pleroma.Tests.Helpers
|
||||
|
||||
import ExUnit.CaptureLog
|
||||
|
@ -66,6 +66,30 @@ test "check_media_proxy_whitelist_config/0" do
|
|||
end) =~ "Your config is using old format (only domain) for MediaProxy whitelist option"
|
||||
end
|
||||
|
||||
test "check_welcome_message_config/0" do
|
||||
clear_config([:instance, :welcome_user_nickname], "LainChan")
|
||||
|
||||
assert capture_log(fn ->
|
||||
DeprecationWarnings.check_welcome_message_config()
|
||||
end) =~ "Your config is using the old namespace for Welcome messages configuration."
|
||||
end
|
||||
|
||||
test "check_hellthread_threshold/0" do
|
||||
clear_config([:mrf_hellthread, :threshold], 16)
|
||||
|
||||
assert capture_log(fn ->
|
||||
DeprecationWarnings.check_hellthread_threshold()
|
||||
end) =~ "You are using the old configuration mechanism for the hellthread filter."
|
||||
end
|
||||
|
||||
test "check_activity_expiration_config/0" do
|
||||
clear_config([Pleroma.ActivityExpiration, :enabled], true)
|
||||
|
||||
assert capture_log(fn ->
|
||||
DeprecationWarnings.check_activity_expiration_config()
|
||||
end) =~ "Your config is using old namespace for activity expiration configuration."
|
||||
end
|
||||
|
||||
describe "check_gun_pool_options/0" do
|
||||
test "await_up_timeout" do
|
||||
config = Config.get(:connections_pool)
|
||||
|
@ -74,7 +98,7 @@ test "await_up_timeout" do
|
|||
assert capture_log(fn ->
|
||||
DeprecationWarnings.check_gun_pool_options()
|
||||
end) =~
|
||||
"Your config is using old setting name `await_up_timeout` instead of `connect_timeout`"
|
||||
"Your config is using old setting `config :pleroma, :connections_pool, await_up_timeout`."
|
||||
end
|
||||
|
||||
test "pool timeout" do
|
||||
|
|
|
@ -1,23 +0,0 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.WebPushHttpClientMock do
|
||||
def get(url, headers \\ [], options \\ []) do
|
||||
{
|
||||
res,
|
||||
%Tesla.Env{status: status}
|
||||
} = Pleroma.HTTP.request(:get, url, "", headers, options)
|
||||
|
||||
{res, %{status_code: status}}
|
||||
end
|
||||
|
||||
def post(url, body, headers \\ [], options \\ []) do
|
||||
{
|
||||
res,
|
||||
%Tesla.Env{status: status}
|
||||
} = Pleroma.HTTP.request(:post, url, body, headers, options)
|
||||
|
||||
{res, %{status_code: status}}
|
||||
end
|
||||
end
|
|
@ -6,6 +6,8 @@ defmodule Mix.Tasks.Pleroma.EmailTest do
|
|||
alias Pleroma.Config
|
||||
alias Pleroma.Tests.ObanHelpers
|
||||
|
||||
import Pleroma.Factory
|
||||
|
||||
setup_all do
|
||||
Mix.shell(Mix.Shell.Process)
|
||||
|
||||
|
@ -17,6 +19,7 @@ defmodule Mix.Tasks.Pleroma.EmailTest do
|
|||
end
|
||||
|
||||
setup do: clear_config([Pleroma.Emails.Mailer, :enabled], true)
|
||||
setup do: clear_config([:instance, :account_activation_required], true)
|
||||
|
||||
describe "pleroma.email test" do
|
||||
test "Sends test email with no given address" do
|
||||
|
@ -50,5 +53,71 @@ test "Sends test email with given address" do
|
|||
html_body: ~r/a test email was requested./i
|
||||
)
|
||||
end
|
||||
|
||||
test "Sends confirmation emails" do
|
||||
local_user1 =
|
||||
insert(:user, %{
|
||||
confirmation_pending: true,
|
||||
confirmation_token: "mytoken",
|
||||
deactivated: false,
|
||||
email: "local1@pleroma.com",
|
||||
local: true
|
||||
})
|
||||
|
||||
local_user2 =
|
||||
insert(:user, %{
|
||||
confirmation_pending: true,
|
||||
confirmation_token: "mytoken",
|
||||
deactivated: false,
|
||||
email: "local2@pleroma.com",
|
||||
local: true
|
||||
})
|
||||
|
||||
:ok = Mix.Tasks.Pleroma.Email.run(["resend_confirmation_emails"])
|
||||
|
||||
ObanHelpers.perform_all()
|
||||
|
||||
assert_email_sent(to: {local_user1.name, local_user1.email})
|
||||
assert_email_sent(to: {local_user2.name, local_user2.email})
|
||||
end
|
||||
|
||||
test "Does not send confirmation email to inappropriate users" do
|
||||
# confirmed user
|
||||
insert(:user, %{
|
||||
confirmation_pending: false,
|
||||
confirmation_token: "mytoken",
|
||||
deactivated: false,
|
||||
email: "confirmed@pleroma.com",
|
||||
local: true
|
||||
})
|
||||
|
||||
# remote user
|
||||
insert(:user, %{
|
||||
deactivated: false,
|
||||
email: "remote@not-pleroma.com",
|
||||
local: false
|
||||
})
|
||||
|
||||
# deactivated user =
|
||||
insert(:user, %{
|
||||
deactivated: true,
|
||||
email: "deactivated@pleroma.com",
|
||||
local: false
|
||||
})
|
||||
|
||||
# invisible user
|
||||
insert(:user, %{
|
||||
deactivated: false,
|
||||
email: "invisible@pleroma.com",
|
||||
local: true,
|
||||
invisible: true
|
||||
})
|
||||
|
||||
:ok = Mix.Tasks.Pleroma.Email.run(["resend_confirmation_emails"])
|
||||
|
||||
ObanHelpers.perform_all()
|
||||
|
||||
refute_email_sent()
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -81,6 +81,80 @@ test "relay is unfollowed" do
|
|||
assert undo_activity.data["object"]["id"] == cancelled_activity.data["id"]
|
||||
refute "#{target_instance}/followers" in User.following(local_user)
|
||||
end
|
||||
|
||||
test "unfollow when relay is dead" do
|
||||
user = insert(:user)
|
||||
target_instance = user.ap_id
|
||||
|
||||
Mix.Tasks.Pleroma.Relay.run(["follow", target_instance])
|
||||
|
||||
%User{ap_id: follower_id} = local_user = Relay.get_actor()
|
||||
target_user = User.get_cached_by_ap_id(target_instance)
|
||||
follow_activity = Utils.fetch_latest_follow(local_user, target_user)
|
||||
User.follow(local_user, target_user)
|
||||
|
||||
assert "#{target_instance}/followers" in User.following(local_user)
|
||||
|
||||
Tesla.Mock.mock(fn %{method: :get, url: ^target_instance} ->
|
||||
%Tesla.Env{status: 404}
|
||||
end)
|
||||
|
||||
Pleroma.Repo.delete(user)
|
||||
Cachex.clear(:user_cache)
|
||||
|
||||
Mix.Tasks.Pleroma.Relay.run(["unfollow", target_instance])
|
||||
|
||||
cancelled_activity = Activity.get_by_ap_id(follow_activity.data["id"])
|
||||
assert cancelled_activity.data["state"] == "accept"
|
||||
|
||||
assert [] ==
|
||||
ActivityPub.fetch_activities(
|
||||
[],
|
||||
%{
|
||||
type: "Undo",
|
||||
actor_id: follower_id,
|
||||
skip_preload: true,
|
||||
invisible_actors: true
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
test "force unfollow when relay is dead" do
|
||||
user = insert(:user)
|
||||
target_instance = user.ap_id
|
||||
|
||||
Mix.Tasks.Pleroma.Relay.run(["follow", target_instance])
|
||||
|
||||
%User{ap_id: follower_id} = local_user = Relay.get_actor()
|
||||
target_user = User.get_cached_by_ap_id(target_instance)
|
||||
follow_activity = Utils.fetch_latest_follow(local_user, target_user)
|
||||
User.follow(local_user, target_user)
|
||||
|
||||
assert "#{target_instance}/followers" in User.following(local_user)
|
||||
|
||||
Tesla.Mock.mock(fn %{method: :get, url: ^target_instance} ->
|
||||
%Tesla.Env{status: 404}
|
||||
end)
|
||||
|
||||
Pleroma.Repo.delete(user)
|
||||
Cachex.clear(:user_cache)
|
||||
|
||||
Mix.Tasks.Pleroma.Relay.run(["unfollow", target_instance, "--force"])
|
||||
|
||||
cancelled_activity = Activity.get_by_ap_id(follow_activity.data["id"])
|
||||
assert cancelled_activity.data["state"] == "cancelled"
|
||||
|
||||
[undo_activity] =
|
||||
ActivityPub.fetch_activities(
|
||||
[],
|
||||
%{type: "Undo", actor_id: follower_id, skip_preload: true, invisible_actors: true}
|
||||
)
|
||||
|
||||
assert undo_activity.data["type"] == "Undo"
|
||||
assert undo_activity.data["actor"] == local_user.ap_id
|
||||
assert undo_activity.data["object"]["id"] == cancelled_activity.data["id"]
|
||||
refute "#{target_instance}/followers" in User.following(local_user)
|
||||
end
|
||||
end
|
||||
|
||||
describe "mix pleroma.relay list" do
|
||||
|
|
|
@ -225,47 +225,64 @@ test "no user to deactivate" do
|
|||
test "All statuses set" do
|
||||
user = insert(:user)
|
||||
|
||||
Mix.Tasks.Pleroma.User.run(["set", user.nickname, "--moderator", "--admin", "--locked"])
|
||||
Mix.Tasks.Pleroma.User.run([
|
||||
"set",
|
||||
user.nickname,
|
||||
"--admin",
|
||||
"--confirmed",
|
||||
"--locked",
|
||||
"--moderator"
|
||||
])
|
||||
|
||||
assert_received {:mix_shell, :info, [message]}
|
||||
assert message =~ ~r/Moderator status .* true/
|
||||
assert message =~ ~r/Admin status .* true/
|
||||
|
||||
assert_received {:mix_shell, :info, [message]}
|
||||
assert message =~ ~r/Confirmation pending .* false/
|
||||
|
||||
assert_received {:mix_shell, :info, [message]}
|
||||
assert message =~ ~r/Locked status .* true/
|
||||
|
||||
assert_received {:mix_shell, :info, [message]}
|
||||
assert message =~ ~r/Admin status .* true/
|
||||
assert message =~ ~r/Moderator status .* true/
|
||||
|
||||
user = User.get_cached_by_nickname(user.nickname)
|
||||
assert user.is_moderator
|
||||
assert user.locked
|
||||
assert user.is_admin
|
||||
refute user.confirmation_pending
|
||||
end
|
||||
|
||||
test "All statuses unset" do
|
||||
user = insert(:user, locked: true, is_moderator: true, is_admin: true)
|
||||
user =
|
||||
insert(:user, locked: true, is_moderator: true, is_admin: true, confirmation_pending: true)
|
||||
|
||||
Mix.Tasks.Pleroma.User.run([
|
||||
"set",
|
||||
user.nickname,
|
||||
"--no-moderator",
|
||||
"--no-admin",
|
||||
"--no-locked"
|
||||
"--no-confirmed",
|
||||
"--no-locked",
|
||||
"--no-moderator"
|
||||
])
|
||||
|
||||
assert_received {:mix_shell, :info, [message]}
|
||||
assert message =~ ~r/Moderator status .* false/
|
||||
assert message =~ ~r/Admin status .* false/
|
||||
|
||||
assert_received {:mix_shell, :info, [message]}
|
||||
assert message =~ ~r/Confirmation pending .* true/
|
||||
|
||||
assert_received {:mix_shell, :info, [message]}
|
||||
assert message =~ ~r/Locked status .* false/
|
||||
|
||||
assert_received {:mix_shell, :info, [message]}
|
||||
assert message =~ ~r/Admin status .* false/
|
||||
assert message =~ ~r/Moderator status .* false/
|
||||
|
||||
user = User.get_cached_by_nickname(user.nickname)
|
||||
refute user.is_moderator
|
||||
refute user.locked
|
||||
refute user.is_admin
|
||||
assert user.confirmation_pending
|
||||
end
|
||||
|
||||
test "no user to set status" do
|
||||
|
@ -554,4 +571,44 @@ test "it prints an error message when user is not exist" do
|
|||
assert message =~ "Could not change user tags"
|
||||
end
|
||||
end
|
||||
|
||||
describe "bulk confirm and unconfirm" do
|
||||
test "confirm all" do
|
||||
user1 = insert(:user, confirmation_pending: true)
|
||||
user2 = insert(:user, confirmation_pending: true)
|
||||
|
||||
assert user1.confirmation_pending
|
||||
assert user2.confirmation_pending
|
||||
|
||||
Mix.Tasks.Pleroma.User.run(["confirm_all"])
|
||||
|
||||
user1 = User.get_cached_by_nickname(user1.nickname)
|
||||
user2 = User.get_cached_by_nickname(user2.nickname)
|
||||
|
||||
refute user1.confirmation_pending
|
||||
refute user2.confirmation_pending
|
||||
end
|
||||
|
||||
test "unconfirm all" do
|
||||
user1 = insert(:user, confirmation_pending: false)
|
||||
user2 = insert(:user, confirmation_pending: false)
|
||||
admin = insert(:user, is_admin: true, confirmation_pending: false)
|
||||
mod = insert(:user, is_moderator: true, confirmation_pending: false)
|
||||
|
||||
refute user1.confirmation_pending
|
||||
refute user2.confirmation_pending
|
||||
|
||||
Mix.Tasks.Pleroma.User.run(["unconfirm_all"])
|
||||
|
||||
user1 = User.get_cached_by_nickname(user1.nickname)
|
||||
user2 = User.get_cached_by_nickname(user2.nickname)
|
||||
admin = User.get_cached_by_nickname(admin.nickname)
|
||||
mod = User.get_cached_by_nickname(mod.nickname)
|
||||
|
||||
assert user1.confirmation_pending
|
||||
assert user2.confirmation_pending
|
||||
refute admin.confirmation_pending
|
||||
refute mod.confirmation_pending
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -17,6 +17,46 @@ defmodule Pleroma.UserSearchTest do
|
|||
describe "User.search" do
|
||||
setup do: clear_config([:instance, :limit_to_local_content])
|
||||
|
||||
test "returns a resolved user as the first result" do
|
||||
Pleroma.Config.put([:instance, :limit_to_local_content], false)
|
||||
user = insert(:user, %{nickname: "no_relation", ap_id: "https://lain.com/users/lain"})
|
||||
_user = insert(:user, %{nickname: "com_user"})
|
||||
|
||||
[first_user, _second_user] = User.search("https://lain.com/users/lain", resolve: true)
|
||||
|
||||
assert first_user.id == user.id
|
||||
end
|
||||
|
||||
test "returns a user with matching ap_id as the first result" do
|
||||
user = insert(:user, %{nickname: "no_relation", ap_id: "https://lain.com/users/lain"})
|
||||
_user = insert(:user, %{nickname: "com_user"})
|
||||
|
||||
[first_user, _second_user] = User.search("https://lain.com/users/lain")
|
||||
|
||||
assert first_user.id == user.id
|
||||
end
|
||||
|
||||
test "doesn't die if two users have the same uri" do
|
||||
insert(:user, %{uri: "https://gensokyo.2hu/@raymoo"})
|
||||
insert(:user, %{uri: "https://gensokyo.2hu/@raymoo"})
|
||||
assert [_first_user, _second_user] = User.search("https://gensokyo.2hu/@raymoo")
|
||||
end
|
||||
|
||||
test "returns a user with matching uri as the first result" do
|
||||
user =
|
||||
insert(:user, %{
|
||||
nickname: "no_relation",
|
||||
ap_id: "https://lain.com/users/lain",
|
||||
uri: "https://lain.com/@lain"
|
||||
})
|
||||
|
||||
_user = insert(:user, %{nickname: "com_user"})
|
||||
|
||||
[first_user, _second_user] = User.search("https://lain.com/@lain")
|
||||
|
||||
assert first_user.id == user.id
|
||||
end
|
||||
|
||||
test "excludes invisible users from results" do
|
||||
user = insert(:user, %{nickname: "john t1000"})
|
||||
insert(:user, %{invisible: true, nickname: "john t800"})
|
||||
|
|
|
@ -61,6 +61,8 @@ test "matches are case-insensitive" do
|
|||
|
||||
describe "describe/0" do
|
||||
test "it works as expected with noop policy" do
|
||||
clear_config([:mrf, :policies], [Pleroma.Web.ActivityPub.MRF.NoOpPolicy])
|
||||
|
||||
expected = %{
|
||||
mrf_policies: ["NoOpPolicy"],
|
||||
exclusions: false
|
||||
|
|
|
@ -63,6 +63,46 @@ test "returns activity" do
|
|||
assert activity.data["to"] == [user.ap_id]
|
||||
refute "#{user.ap_id}/followers" in User.following(service_actor)
|
||||
end
|
||||
|
||||
test "force unfollow when target service is dead" do
|
||||
user = insert(:user)
|
||||
user_ap_id = user.ap_id
|
||||
user_id = user.id
|
||||
|
||||
Tesla.Mock.mock(fn %{method: :get, url: ^user_ap_id} ->
|
||||
%Tesla.Env{status: 404}
|
||||
end)
|
||||
|
||||
service_actor = Relay.get_actor()
|
||||
CommonAPI.follow(service_actor, user)
|
||||
assert "#{user.ap_id}/followers" in User.following(service_actor)
|
||||
|
||||
assert Pleroma.Repo.get_by(
|
||||
Pleroma.FollowingRelationship,
|
||||
follower_id: service_actor.id,
|
||||
following_id: user_id
|
||||
)
|
||||
|
||||
Pleroma.Repo.delete(user)
|
||||
Cachex.clear(:user_cache)
|
||||
|
||||
assert {:ok, %Activity{} = activity} = Relay.unfollow(user_ap_id, %{force: true})
|
||||
|
||||
assert refresh_record(service_actor).following_count == 0
|
||||
|
||||
refute Pleroma.Repo.get_by(
|
||||
Pleroma.FollowingRelationship,
|
||||
follower_id: service_actor.id,
|
||||
following_id: user_id
|
||||
)
|
||||
|
||||
assert activity.actor == "#{Pleroma.Web.Endpoint.url()}/relay"
|
||||
assert user.ap_id in activity.recipients
|
||||
assert activity.data["type"] == "Undo"
|
||||
assert activity.data["actor"] == service_actor.ap_id
|
||||
assert activity.data["to"] == [user_ap_id]
|
||||
refute "#{user.ap_id}/followers" in User.following(service_actor)
|
||||
end
|
||||
end
|
||||
|
||||
describe "publish/1" do
|
||||
|
|
|
@ -157,12 +157,12 @@ test "Mastodon Question activity with custom emojis" do
|
|||
}
|
||||
end
|
||||
|
||||
test "returns an error if received a second time" do
|
||||
test "returns same activity if received a second time" do
|
||||
data = File.read!("test/fixtures/mastodon-question-activity.json") |> Poison.decode!()
|
||||
|
||||
assert {:ok, %Activity{local: false} = activity} = Transmogrifier.handle_incoming(data)
|
||||
|
||||
assert {:error, {:validate_object, {:error, _}}} = Transmogrifier.handle_incoming(data)
|
||||
assert {:ok, ^activity} = Transmogrifier.handle_incoming(data)
|
||||
end
|
||||
|
||||
test "accepts a Question with no content" do
|
||||
|
|
|
@ -29,6 +29,23 @@ defmodule Pleroma.Web.CommonAPITest do
|
|||
setup do: clear_config([:instance, :limit])
|
||||
setup do: clear_config([:instance, :max_pinned_statuses])
|
||||
|
||||
describe "posting polls" do
|
||||
test "it posts a poll" do
|
||||
user = insert(:user)
|
||||
|
||||
{:ok, activity} =
|
||||
CommonAPI.post(user, %{
|
||||
status: "who is the best",
|
||||
poll: %{expires_in: 600, options: ["reimu", "marisa"]}
|
||||
})
|
||||
|
||||
object = Object.normalize(activity)
|
||||
|
||||
assert object.data["type"] == "Question"
|
||||
assert object.data["oneOf"] |> length() == 2
|
||||
end
|
||||
end
|
||||
|
||||
describe "blocking" do
|
||||
setup do
|
||||
blocker = insert(:user)
|
||||
|
|
|
@ -81,6 +81,15 @@ test "it returns 404 when url is in banned_urls cache", %{conn: conn, url: url}
|
|||
end
|
||||
|
||||
describe "Media Preview Proxy" do
|
||||
def assert_dependencies_installed do
|
||||
missing_dependencies = Pleroma.Helpers.MediaHelper.missing_dependencies()
|
||||
|
||||
assert missing_dependencies == [],
|
||||
"Error: missing dependencies (please refer to `docs/installation`): #{
|
||||
inspect(missing_dependencies)
|
||||
}"
|
||||
end
|
||||
|
||||
setup do
|
||||
clear_config([:media_proxy, :enabled], true)
|
||||
clear_config([:media_preview_proxy, :enabled], true)
|
||||
|
@ -184,6 +193,8 @@ test "with `static=true` and GIF image preview requested, responds with JPEG ima
|
|||
url: url,
|
||||
media_proxy_url: media_proxy_url
|
||||
} do
|
||||
assert_dependencies_installed()
|
||||
|
||||
# Setting a high :min_content_length to ensure this scenario is not affected by its logic
|
||||
clear_config([:media_preview_proxy, :min_content_length], 1_000_000_000)
|
||||
|
||||
|
@ -270,6 +281,8 @@ test "thumbnails PNG images into PNG", %{
|
|||
url: url,
|
||||
media_proxy_url: media_proxy_url
|
||||
} do
|
||||
assert_dependencies_installed()
|
||||
|
||||
Tesla.Mock.mock(fn
|
||||
%{method: "head", url: ^media_proxy_url} ->
|
||||
%Tesla.Env{status: 200, body: "", headers: [{"content-type", "image/png"}]}
|
||||
|
@ -290,6 +303,8 @@ test "thumbnails JPEG images into JPEG", %{
|
|||
url: url,
|
||||
media_proxy_url: media_proxy_url
|
||||
} do
|
||||
assert_dependencies_installed()
|
||||
|
||||
Tesla.Mock.mock(fn
|
||||
%{method: "head", url: ^media_proxy_url} ->
|
||||
%Tesla.Env{status: 200, body: "", headers: [{"content-type", "image/jpeg"}]}
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
defmodule Pleroma.Web.Push.ImplTest do
|
||||
use Pleroma.DataCase
|
||||
|
||||
import Pleroma.Factory
|
||||
|
||||
alias Pleroma.Notification
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.User
|
||||
|
@ -12,10 +14,6 @@ defmodule Pleroma.Web.Push.ImplTest do
|
|||
alias Pleroma.Web.CommonAPI
|
||||
alias Pleroma.Web.Push.Impl
|
||||
alias Pleroma.Web.Push.Subscription
|
||||
alias Pleroma.Web.WebPushHttpClientMock
|
||||
|
||||
import Mock
|
||||
import Pleroma.Factory
|
||||
|
||||
setup do
|
||||
Tesla.Mock.mock(fn
|
||||
|
@ -80,22 +78,6 @@ test "successful message sending" do
|
|||
assert Impl.push_message(@message, @sub, @api_key, %Subscription{}) == :ok
|
||||
end
|
||||
|
||||
test_with_mock "uses WebPushHttpClientMock as an HTTP client", WebPushHttpClientMock,
|
||||
post: fn _, _, _ -> {:ok, %{status_code: 200}} end do
|
||||
Impl.push_message(@message, @sub, @api_key, %Subscription{})
|
||||
assert_called(WebPushHttpClientMock.post("https://example.com/example/1234", :_, :_))
|
||||
end
|
||||
|
||||
test_with_mock "uses Pleroma.HTTP as an HTTP client", Pleroma.HTTP,
|
||||
post: fn _, _, _ -> {:ok, %{status_code: 200}} end do
|
||||
client = Application.get_env(:web_push_encryption, :http_client)
|
||||
on_exit(fn -> Application.put_env(:web_push_encryption, :http_client, client) end)
|
||||
Application.put_env(:web_push_encryption, :http_client, Pleroma.HTTP)
|
||||
|
||||
Impl.push_message(@message, @sub, @api_key, %Subscription{})
|
||||
assert_called(Pleroma.HTTP.post("https://example.com/example/1234", :_, :_))
|
||||
end
|
||||
|
||||
@tag capture_log: true
|
||||
test "fail message sending" do
|
||||
assert Impl.push_message(
|
||||
|
|
Loading…
Reference in a new issue