Merge branch 'develop' of git.pleroma.social:pleroma/pleroma into remake-remodel-dms
This commit is contained in:
commit
6ff079ca9f
|
@ -44,6 +44,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
- Fix follower/blocks import when nicknames starts with @
|
||||
- Filtering of push notifications on activities from blocked domains
|
||||
- Resolving Peertube accounts with Webfinger
|
||||
- `blob:` urls not being allowed by connect-src CSP
|
||||
|
||||
## [Unreleased (patch)]
|
||||
|
||||
|
|
|
@ -6,10 +6,6 @@ A Pleroma instance can be identified by "<Mastodon version> (compatible; Pleroma
|
|||
|
||||
Pleroma uses 128-bit ids as opposed to Mastodon's 64 bits. However just like Mastodon's ids they are lexically sortable strings
|
||||
|
||||
## Attachment cap
|
||||
|
||||
Some apps operate under the assumption that no more than 4 attachments can be returned or uploaded. Pleroma however does not enforce any limits on attachment count neither when returning the status object nor when posting.
|
||||
|
||||
## Timelines
|
||||
|
||||
Adding the parameter `with_muted=true` to the timeline queries will also return activities by muted (not by blocked!) users.
|
||||
|
@ -32,12 +28,20 @@ Has these additional fields under the `pleroma` object:
|
|||
- `thread_muted`: true if the thread the post belongs to is muted
|
||||
- `emoji_reactions`: A list with emoji / reaction maps. The format is `{name: "☕", count: 1, me: true}`. Contains no information about the reacting users, for that use the `/statuses/:id/reactions` endpoint.
|
||||
|
||||
## Attachments
|
||||
## Media Attachments
|
||||
|
||||
Has these additional fields under the `pleroma` object:
|
||||
|
||||
- `mime_type`: mime type of the attachment.
|
||||
|
||||
### Attachment cap
|
||||
|
||||
Some apps operate under the assumption that no more than 4 attachments can be returned or uploaded. Pleroma however does not enforce any limits on attachment count neither when returning the status object nor when posting.
|
||||
|
||||
### Limitations
|
||||
|
||||
Pleroma does not process remote images and therefore cannot include fields such as `meta` and `blurhash`. It does not support focal points or aspect ratios. The frontend is expected to handle it.
|
||||
|
||||
## Accounts
|
||||
|
||||
The `id` parameter can also be the `nickname` of the user. This only works in these endpoints, not the deeper nested ones for following etc.
|
||||
|
|
|
@ -69,3 +69,32 @@ mix pleroma.database update_users_following_followers_counts
|
|||
```sh tab="From Source"
|
||||
mix pleroma.database fix_likes_collections
|
||||
```
|
||||
|
||||
## Vacuum the database
|
||||
|
||||
### Analyze
|
||||
|
||||
Running an `analyze` vacuum job can improve performance by updating statistics used by the query planner. **It is safe to cancel this.**
|
||||
|
||||
```sh tab="OTP"
|
||||
./bin/pleroma_ctl database vacuum analyze
|
||||
```
|
||||
|
||||
```sh tab="From Source"
|
||||
mix pleroma.database vacuum analyze
|
||||
```
|
||||
|
||||
### Full
|
||||
|
||||
Running a `full` vacuum job rebuilds your entire database by reading all of the data and rewriting it into smaller
|
||||
and more compact files with an optimized layout. This process will take a long time and use additional disk space as
|
||||
it builds the files side-by-side the existing database files. It can make your database faster and use less disk space,
|
||||
but should only be run if necessary. **It is safe to cancel this.**
|
||||
|
||||
```sh tab="OTP"
|
||||
./bin/pleroma_ctl database vacuum full
|
||||
```
|
||||
|
||||
```sh tab="From Source"
|
||||
mix pleroma.database vacuum full
|
||||
```
|
|
@ -4,6 +4,7 @@
|
|||
|
||||
defmodule Mix.Tasks.Pleroma.Database do
|
||||
alias Pleroma.Conversation
|
||||
alias Pleroma.Maintenance
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.User
|
||||
|
@ -34,13 +35,7 @@ def run(["remove_embedded_objects" | args]) do
|
|||
)
|
||||
|
||||
if Keyword.get(options, :vacuum) do
|
||||
Logger.info("Runnning VACUUM FULL")
|
||||
|
||||
Repo.query!(
|
||||
"vacuum full;",
|
||||
[],
|
||||
timeout: :infinity
|
||||
)
|
||||
Maintenance.vacuum("full")
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -94,13 +89,7 @@ def run(["prune_objects" | args]) do
|
|||
|> Repo.delete_all(timeout: :infinity)
|
||||
|
||||
if Keyword.get(options, :vacuum) do
|
||||
Logger.info("Runnning VACUUM FULL")
|
||||
|
||||
Repo.query!(
|
||||
"vacuum full;",
|
||||
[],
|
||||
timeout: :infinity
|
||||
)
|
||||
Maintenance.vacuum("full")
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -135,4 +124,10 @@ def run(["fix_likes_collections"]) do
|
|||
end)
|
||||
|> Stream.run()
|
||||
end
|
||||
|
||||
def run(["vacuum", args]) do
|
||||
start_pleroma()
|
||||
|
||||
Maintenance.vacuum(args)
|
||||
end
|
||||
end
|
||||
|
|
37
lib/pleroma/maintenance.ex
Normal file
37
lib/pleroma/maintenance.ex
Normal file
|
@ -0,0 +1,37 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Maintenance do
|
||||
alias Pleroma.Repo
|
||||
require Logger
|
||||
|
||||
def vacuum(args) do
|
||||
case args do
|
||||
"analyze" ->
|
||||
Logger.info("Runnning VACUUM ANALYZE.")
|
||||
|
||||
Repo.query!(
|
||||
"vacuum analyze;",
|
||||
[],
|
||||
timeout: :infinity
|
||||
)
|
||||
|
||||
"full" ->
|
||||
Logger.info("Runnning VACUUM FULL.")
|
||||
|
||||
Logger.warn(
|
||||
"Re-packing your entire database may take a while and will consume extra disk space during the process."
|
||||
)
|
||||
|
||||
Repo.query!(
|
||||
"vacuum full;",
|
||||
[],
|
||||
timeout: :infinity
|
||||
)
|
||||
|
||||
_ ->
|
||||
Logger.error("Error: invalid vacuum argument.")
|
||||
end
|
||||
end
|
||||
end
|
|
@ -75,10 +75,10 @@ defp csp_string do
|
|||
sources = get_proxy_and_attachment_sources()
|
||||
{[img_src, sources], [media_src, sources]}
|
||||
else
|
||||
{img_src, media_src}
|
||||
{[img_src, " https:"], [media_src, " https:"]}
|
||||
end
|
||||
|
||||
connect_src = ["connect-src 'self' ", static_url, ?\s, websocket_url]
|
||||
connect_src = ["connect-src 'self' blob: ", static_url, ?\s, websocket_url]
|
||||
|
||||
connect_src =
|
||||
if Pleroma.Config.get(:env) == :dev do
|
||||
|
|
|
@ -8,6 +8,7 @@ defmodule Pleroma.Web.ActivityPub.Builder do
|
|||
alias Pleroma.Emoji
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.User
|
||||
alias Pleroma.Web.ActivityPub.Relay
|
||||
alias Pleroma.Web.ActivityPub.Utils
|
||||
alias Pleroma.Web.ActivityPub.Visibility
|
||||
|
||||
|
@ -122,15 +123,20 @@ def like(actor, object) do
|
|||
end
|
||||
end
|
||||
|
||||
@spec announce(User.t(), Object.t(), keyword()) :: {:ok, map(), keyword()}
|
||||
def announce(actor, object, options \\ []) do
|
||||
public? = Keyword.get(options, :public, false)
|
||||
to = [actor.follower_address, object.data["actor"]]
|
||||
|
||||
to =
|
||||
if public? do
|
||||
[Pleroma.Constants.as_public() | to]
|
||||
else
|
||||
to
|
||||
cond do
|
||||
actor.ap_id == Relay.relay_ap_id() ->
|
||||
[actor.follower_address]
|
||||
|
||||
public? ->
|
||||
[actor.follower_address, object.data["actor"], Pleroma.Constants.as_public()]
|
||||
|
||||
true ->
|
||||
[actor.follower_address, object.data["actor"]]
|
||||
end
|
||||
|
||||
{:ok,
|
||||
|
|
|
@ -49,11 +49,14 @@ def handle(%{data: %{"type" => "Create"}} = activity, meta) do
|
|||
# - Stream out the announce
|
||||
def handle(%{data: %{"type" => "Announce"}} = object, meta) do
|
||||
announced_object = Object.get_by_ap_id(object.data["object"])
|
||||
user = User.get_cached_by_ap_id(object.data["actor"])
|
||||
|
||||
Utils.add_announce_to_object(object, announced_object)
|
||||
|
||||
Notification.create_notifications(object)
|
||||
ActivityPub.stream_out(object)
|
||||
if !User.is_internal_user?(user) do
|
||||
Notification.create_notifications(object)
|
||||
ActivityPub.stream_out(object)
|
||||
end
|
||||
|
||||
{:ok, object, meta}
|
||||
end
|
||||
|
|
|
@ -21,6 +21,7 @@ defmodule Pleroma.Web.MastodonAPI.ConversationController do
|
|||
|
||||
@doc "GET /api/v1/conversations"
|
||||
def index(%{assigns: %{user: user}} = conn, params) do
|
||||
params = stringify_pagination_params(params)
|
||||
participations = Participation.for_user_with_last_activity_id(user, params)
|
||||
|
||||
conn
|
||||
|
@ -36,4 +37,20 @@ def mark_as_read(%{assigns: %{user: user}} = conn, %{id: participation_id}) do
|
|||
render(conn, "participation.json", participation: participation, for: user)
|
||||
end
|
||||
end
|
||||
|
||||
defp stringify_pagination_params(params) do
|
||||
atom_keys =
|
||||
Pleroma.Pagination.page_keys()
|
||||
|> Enum.map(&String.to_atom(&1))
|
||||
|
||||
str_keys =
|
||||
params
|
||||
|> Map.take(atom_keys)
|
||||
|> Enum.map(fn {key, value} -> {to_string(key), value} end)
|
||||
|> Enum.into(%{})
|
||||
|
||||
params
|
||||
|> Map.delete(atom_keys)
|
||||
|> Map.merge(str_keys)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -137,7 +137,7 @@ def filtered_by_user?(%User{} = user, %Activity{} = item) do
|
|||
false <- Pleroma.Web.ActivityPub.MRF.subdomain_match?(domain_blocks, item_host),
|
||||
false <- Pleroma.Web.ActivityPub.MRF.subdomain_match?(domain_blocks, parent_host),
|
||||
true <- thread_containment(item, user),
|
||||
false <- CommonAPI.thread_muted?(user, item) do
|
||||
false <- CommonAPI.thread_muted?(user, parent) do
|
||||
false
|
||||
else
|
||||
_ -> true
|
||||
|
|
|
@ -108,6 +108,7 @@ test "returns error when object is unknown" do
|
|||
assert {:ok, %Activity{} = activity} = Relay.publish(note)
|
||||
assert activity.data["type"] == "Announce"
|
||||
assert activity.data["actor"] == service_actor.ap_id
|
||||
assert activity.data["to"] == [service_actor.follower_address]
|
||||
assert called(Pleroma.Web.Federator.publish(activity))
|
||||
end
|
||||
|
||||
|
|
|
@ -12,84 +12,88 @@ defmodule Pleroma.Web.MastodonAPI.ConversationControllerTest do
|
|||
|
||||
setup do: oauth_access(["read:statuses"])
|
||||
|
||||
test "returns a list of conversations", %{user: user_one, conn: conn} do
|
||||
user_two = insert(:user)
|
||||
user_three = insert(:user)
|
||||
describe "returns a list of conversations" do
|
||||
setup(%{user: user_one, conn: conn}) do
|
||||
user_two = insert(:user)
|
||||
user_three = insert(:user)
|
||||
|
||||
{:ok, user_two} = User.follow(user_two, user_one)
|
||||
{:ok, user_two} = User.follow(user_two, user_one)
|
||||
|
||||
assert User.get_cached_by_id(user_two.id).unread_conversation_count == 0
|
||||
{:ok, %{user: user_one, user_two: user_two, user_three: user_three, conn: conn}}
|
||||
end
|
||||
|
||||
{:ok, direct} =
|
||||
CommonAPI.post(user_one, %{
|
||||
status: "Hi @#{user_two.nickname}, @#{user_three.nickname}!",
|
||||
visibility: "direct"
|
||||
})
|
||||
test "returns correct conversations", %{
|
||||
user: user_one,
|
||||
user_two: user_two,
|
||||
user_three: user_three,
|
||||
conn: conn
|
||||
} do
|
||||
assert User.get_cached_by_id(user_two.id).unread_conversation_count == 0
|
||||
{:ok, direct} = create_direct_message(user_one, [user_two, user_three])
|
||||
|
||||
assert User.get_cached_by_id(user_two.id).unread_conversation_count == 1
|
||||
assert User.get_cached_by_id(user_two.id).unread_conversation_count == 1
|
||||
|
||||
{:ok, _follower_only} =
|
||||
CommonAPI.post(user_one, %{
|
||||
status: "Hi @#{user_two.nickname}!",
|
||||
visibility: "private"
|
||||
})
|
||||
{:ok, _follower_only} =
|
||||
CommonAPI.post(user_one, %{
|
||||
status: "Hi @#{user_two.nickname}!",
|
||||
visibility: "private"
|
||||
})
|
||||
|
||||
res_conn = get(conn, "/api/v1/conversations")
|
||||
res_conn = get(conn, "/api/v1/conversations")
|
||||
|
||||
assert response = json_response_and_validate_schema(res_conn, 200)
|
||||
assert response = json_response_and_validate_schema(res_conn, 200)
|
||||
|
||||
assert [
|
||||
%{
|
||||
"id" => res_id,
|
||||
"accounts" => res_accounts,
|
||||
"last_status" => res_last_status,
|
||||
"unread" => unread
|
||||
}
|
||||
] = response
|
||||
assert [
|
||||
%{
|
||||
"id" => res_id,
|
||||
"accounts" => res_accounts,
|
||||
"last_status" => res_last_status,
|
||||
"unread" => unread
|
||||
}
|
||||
] = response
|
||||
|
||||
account_ids = Enum.map(res_accounts, & &1["id"])
|
||||
assert length(res_accounts) == 2
|
||||
assert user_two.id in account_ids
|
||||
assert user_three.id in account_ids
|
||||
assert is_binary(res_id)
|
||||
assert unread == false
|
||||
assert res_last_status["id"] == direct.id
|
||||
assert User.get_cached_by_id(user_one.id).unread_conversation_count == 0
|
||||
account_ids = Enum.map(res_accounts, & &1["id"])
|
||||
assert length(res_accounts) == 2
|
||||
assert user_two.id in account_ids
|
||||
assert user_three.id in account_ids
|
||||
assert is_binary(res_id)
|
||||
assert unread == false
|
||||
assert res_last_status["id"] == direct.id
|
||||
assert User.get_cached_by_id(user_one.id).unread_conversation_count == 0
|
||||
end
|
||||
|
||||
test "observes limit params", %{
|
||||
user: user_one,
|
||||
user_two: user_two,
|
||||
user_three: user_three,
|
||||
conn: conn
|
||||
} do
|
||||
{:ok, _} = create_direct_message(user_one, [user_two, user_three])
|
||||
{:ok, _} = create_direct_message(user_two, [user_one, user_three])
|
||||
{:ok, _} = create_direct_message(user_three, [user_two, user_one])
|
||||
|
||||
res_conn = get(conn, "/api/v1/conversations?limit=1")
|
||||
|
||||
assert response = json_response_and_validate_schema(res_conn, 200)
|
||||
|
||||
assert Enum.count(response) == 1
|
||||
|
||||
res_conn = get(conn, "/api/v1/conversations?limit=2")
|
||||
|
||||
assert response = json_response_and_validate_schema(res_conn, 200)
|
||||
|
||||
assert Enum.count(response) == 2
|
||||
end
|
||||
end
|
||||
|
||||
test "filters conversations by recipients", %{user: user_one, conn: conn} do
|
||||
user_two = insert(:user)
|
||||
user_three = insert(:user)
|
||||
|
||||
{:ok, direct1} =
|
||||
CommonAPI.post(user_one, %{
|
||||
status: "Hi @#{user_two.nickname}!",
|
||||
visibility: "direct"
|
||||
})
|
||||
|
||||
{:ok, _direct2} =
|
||||
CommonAPI.post(user_one, %{
|
||||
status: "Hi @#{user_three.nickname}!",
|
||||
visibility: "direct"
|
||||
})
|
||||
|
||||
{:ok, direct3} =
|
||||
CommonAPI.post(user_one, %{
|
||||
status: "Hi @#{user_two.nickname}, @#{user_three.nickname}!",
|
||||
visibility: "direct"
|
||||
})
|
||||
|
||||
{:ok, _direct4} =
|
||||
CommonAPI.post(user_two, %{
|
||||
status: "Hi @#{user_three.nickname}!",
|
||||
visibility: "direct"
|
||||
})
|
||||
|
||||
{:ok, direct5} =
|
||||
CommonAPI.post(user_two, %{
|
||||
status: "Hi @#{user_one.nickname}!",
|
||||
visibility: "direct"
|
||||
})
|
||||
{:ok, direct1} = create_direct_message(user_one, [user_two])
|
||||
{:ok, _direct2} = create_direct_message(user_one, [user_three])
|
||||
{:ok, direct3} = create_direct_message(user_one, [user_two, user_three])
|
||||
{:ok, _direct4} = create_direct_message(user_two, [user_three])
|
||||
{:ok, direct5} = create_direct_message(user_two, [user_one])
|
||||
|
||||
assert [conversation1, conversation2] =
|
||||
conn
|
||||
|
@ -109,12 +113,7 @@ test "filters conversations by recipients", %{user: user_one, conn: conn} do
|
|||
|
||||
test "updates the last_status on reply", %{user: user_one, conn: conn} do
|
||||
user_two = insert(:user)
|
||||
|
||||
{:ok, direct} =
|
||||
CommonAPI.post(user_one, %{
|
||||
status: "Hi @#{user_two.nickname}",
|
||||
visibility: "direct"
|
||||
})
|
||||
{:ok, direct} = create_direct_message(user_one, [user_two])
|
||||
|
||||
{:ok, direct_reply} =
|
||||
CommonAPI.post(user_two, %{
|
||||
|
@ -133,12 +132,7 @@ test "updates the last_status on reply", %{user: user_one, conn: conn} do
|
|||
|
||||
test "the user marks a conversation as read", %{user: user_one, conn: conn} do
|
||||
user_two = insert(:user)
|
||||
|
||||
{:ok, direct} =
|
||||
CommonAPI.post(user_one, %{
|
||||
status: "Hi @#{user_two.nickname}",
|
||||
visibility: "direct"
|
||||
})
|
||||
{:ok, direct} = create_direct_message(user_one, [user_two])
|
||||
|
||||
assert User.get_cached_by_id(user_one.id).unread_conversation_count == 0
|
||||
assert User.get_cached_by_id(user_two.id).unread_conversation_count == 1
|
||||
|
@ -194,15 +188,22 @@ test "the user marks a conversation as read", %{user: user_one, conn: conn} do
|
|||
|
||||
test "(vanilla) Mastodon frontend behaviour", %{user: user_one, conn: conn} do
|
||||
user_two = insert(:user)
|
||||
|
||||
{:ok, direct} =
|
||||
CommonAPI.post(user_one, %{
|
||||
status: "Hi @#{user_two.nickname}!",
|
||||
visibility: "direct"
|
||||
})
|
||||
{:ok, direct} = create_direct_message(user_one, [user_two])
|
||||
|
||||
res_conn = get(conn, "/api/v1/statuses/#{direct.id}/context")
|
||||
|
||||
assert %{"ancestors" => [], "descendants" => []} == json_response(res_conn, 200)
|
||||
end
|
||||
|
||||
defp create_direct_message(sender, recips) do
|
||||
hellos =
|
||||
recips
|
||||
|> Enum.map(fn s -> "@#{s.nickname}" end)
|
||||
|> Enum.join(", ")
|
||||
|
||||
CommonAPI.post(sender, %{
|
||||
status: "Hi #{hellos}!",
|
||||
visibility: "direct"
|
||||
})
|
||||
end
|
||||
end
|
||||
|
|
|
@ -114,6 +114,25 @@ test "it streams boosts of the user in the 'user' stream", %{user: user} do
|
|||
refute Streamer.filtered_by_user?(user, announce)
|
||||
end
|
||||
|
||||
test "it streams boosts of mastodon user in the 'user' stream", %{user: user} do
|
||||
Streamer.get_topic_and_add_socket("user", user)
|
||||
|
||||
other_user = insert(:user)
|
||||
{:ok, activity} = CommonAPI.post(other_user, %{status: "hey"})
|
||||
|
||||
data =
|
||||
File.read!("test/fixtures/mastodon-announce.json")
|
||||
|> Poison.decode!()
|
||||
|> Map.put("object", activity.data["object"])
|
||||
|> Map.put("actor", user.ap_id)
|
||||
|
||||
{:ok, %Pleroma.Activity{data: _data, local: false} = announce} =
|
||||
Pleroma.Web.ActivityPub.Transmogrifier.handle_incoming(data)
|
||||
|
||||
assert_receive {:render_with_user, Pleroma.Web.StreamerView, "update.json", ^announce}
|
||||
refute Streamer.filtered_by_user?(user, announce)
|
||||
end
|
||||
|
||||
test "it sends notify to in the 'user' stream", %{user: user, notify: notify} do
|
||||
Streamer.get_topic_and_add_socket("user", user)
|
||||
Streamer.stream("user", notify)
|
||||
|
|
Loading…
Reference in a new issue