Merge branch 'auto_linker' into 'develop'

AutoLinker

Closes #609

See merge request pleroma/pleroma!839
This commit is contained in:
kaniini 2019-02-26 23:32:26 +00:00
commit 1c265b3b19
10 changed files with 211 additions and 290 deletions

View file

@ -344,6 +344,16 @@
federator_outgoing: [max_jobs: 50], federator_outgoing: [max_jobs: 50],
mailer: [max_jobs: 10] mailer: [max_jobs: 10]
config :auto_linker,
opts: [
scheme: true,
extra: true,
class: false,
strip_prefix: false,
new_window: false,
rel: false
]
# Import environment specific config. This must remain at the bottom # Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above. # of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs" import_config "#{Mix.env()}.exs"

View file

@ -107,7 +107,7 @@ config :pleroma, Pleroma.Mailer,
An example to enable ONLY ExSyslogger (f/ex in ``prod.secret.exs``) with info and debug suppressed: An example to enable ONLY ExSyslogger (f/ex in ``prod.secret.exs``) with info and debug suppressed:
``` ```
config :logger, config :logger,
backends: [{ExSyslogger, :ex_syslogger}] backends: [{ExSyslogger, :ex_syslogger}]
config :logger, :ex_syslogger, config :logger, :ex_syslogger,
@ -301,3 +301,28 @@ For each pool, the options are:
* `max_connections` - how much connections a pool can hold * `max_connections` - how much connections a pool can hold
* `timeout` - retention duration for connections * `timeout` - retention duration for connections
## :auto_linker
Configuration for the `auto_linker` library:
* `class: "auto-linker"` - specify the class to be added to the generated link. false to clear
* `rel: "noopener noreferrer"` - override the rel attribute. false to clear
* `new_window: true` - set to false to remove `target='_blank'` attribute
* `scheme: false` - Set to true to link urls with schema `http://google.com`
* `truncate: false` - Set to a number to truncate urls longer then the number. Truncated urls will end in `..`
* `strip_prefix: true` - Strip the scheme prefix
* `extra: false` - link urls with rarely used schemes (magnet, ipfs, irc, etc.)
Example:
```exs
config :auto_linker,
opts: [
scheme: true,
extra: true,
class: false,
strip_prefix: false,
new_window: false,
rel: false
]
```

View file

@ -8,33 +8,51 @@ defmodule Pleroma.Formatter do
alias Pleroma.User alias Pleroma.User
alias Pleroma.Web.MediaProxy alias Pleroma.Web.MediaProxy
@tag_regex ~r/((?<=[^&])|\A)(\#)(\w+)/u
@markdown_characters_regex ~r/(`|\*|_|{|}|[|]|\(|\)|#|\+|-|\.|!)/ @markdown_characters_regex ~r/(`|\*|_|{|}|[|]|\(|\)|#|\+|-|\.|!)/
@link_regex ~r{((?:http(s)?:\/\/)?[\w.-]+(?:\.[\w\.-]+)+[\w\-\._~%:/?#[\]@!\$&'\(\)\*\+,;=.]+)|[0-9a-z+\-\.]+:[0-9a-z$-_.+!*'(),]+}ui
# Modified from https://www.w3.org/TR/html5/forms.html#valid-e-mail-address @auto_linker_config hashtag: true,
@mentions_regex ~r/@[a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]*@?[a-zA-Z0-9_-](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*/u hashtag_handler: &Pleroma.Formatter.hashtag_handler/4,
mention: true,
mention_handler: &Pleroma.Formatter.mention_handler/4
def parse_tags(text, data \\ %{}) do def mention_handler("@" <> nickname, buffer, opts, acc) do
Regex.scan(@tag_regex, text) case User.get_cached_by_nickname(nickname) do
|> Enum.map(fn ["#" <> tag = full_tag | _] -> {full_tag, String.downcase(tag)} end) %User{id: id} = user ->
|> (fn map -> ap_id = get_ap_id(user)
if data["sensitive"] in [true, "True", "true", "1"], nickname_text = get_nickname_text(nickname, opts) |> maybe_escape(opts)
do: [{"#nsfw", "nsfw"}] ++ map,
else: map link =
end).() "<span class='h-card'><a data-user='#{id}' class='u-url mention' href='#{ap_id}'>@<span>#{
nickname_text
}</span></a></span>"
{link, %{acc | mentions: MapSet.put(acc.mentions, {"@" <> nickname, user})}}
_ ->
{buffer, acc}
end
end end
@doc "Parses mentions text and returns list {nickname, user}." def hashtag_handler("#" <> tag = tag_text, _buffer, _opts, acc) do
@spec parse_mentions(binary()) :: list({binary(), User.t()}) tag = String.downcase(tag)
def parse_mentions(text) do url = "#{Pleroma.Web.base_url()}/tag/#{tag}"
Regex.scan(@mentions_regex, text) link = "<a class='hashtag' data-tag='#{tag}' href='#{url}' rel='tag'>#{tag_text}</a>"
|> List.flatten()
|> Enum.uniq() {link, %{acc | tags: MapSet.put(acc.tags, {tag_text, tag})}}
|> Enum.map(fn nickname -> end
with nickname <- String.trim_leading(nickname, "@"),
do: {"@" <> nickname, User.get_cached_by_nickname(nickname)} @doc """
end) Parses a text and replace plain text links with HTML. Returns a tuple with a result text, mentions, and hashtags.
|> Enum.filter(fn {_match, user} -> user end) """
@spec linkify(String.t(), keyword()) ::
{String.t(), [{String.t(), User.t()}], [{String.t(), String.t()}]}
def linkify(text, options \\ []) do
options = options ++ @auto_linker_config
acc = %{mentions: MapSet.new(), tags: MapSet.new()}
{text, %{mentions: mentions, tags: tags}} = AutoLinker.link_map(text, acc, options)
{text, MapSet.to_list(mentions), MapSet.to_list(tags)}
end end
def emojify(text) do def emojify(text) do
@ -48,9 +66,7 @@ def emojify(text, emoji, strip \\ false) do
emoji = HTML.strip_tags(emoji) emoji = HTML.strip_tags(emoji)
file = HTML.strip_tags(file) file = HTML.strip_tags(file)
String.replace( html =
text,
":#{emoji}:",
if not strip do if not strip do
"<img height='32px' width='32px' alt='#{emoji}' title='#{emoji}' src='#{ "<img height='32px' width='32px' alt='#{emoji}' title='#{emoji}' src='#{
MediaProxy.url(file) MediaProxy.url(file)
@ -58,8 +74,8 @@ def emojify(text, emoji, strip \\ false) do
else else
"" ""
end end
)
|> HTML.filter_tags() String.replace(text, ":#{emoji}:", html) |> HTML.filter_tags()
end) end)
end end
@ -75,12 +91,6 @@ def get_emoji(text) when is_binary(text) do
def get_emoji(_), do: [] def get_emoji(_), do: []
@link_regex ~r/[0-9a-z+\-\.]+:[0-9a-z$-_.+!*'(),]+/ui
@uri_schemes Application.get_env(:pleroma, :uri_schemes, [])
@valid_schemes Keyword.get(@uri_schemes, :valid_schemes, [])
# TODO: make it use something other than @link_regex
def html_escape(text, "text/html") do def html_escape(text, "text/html") do
HTML.filter_tags(text) HTML.filter_tags(text)
end end
@ -94,112 +104,6 @@ def html_escape(text, "text/plain") do
|> Enum.join("") |> Enum.join("")
end end
@doc """
Escapes a special characters in mention names.
"""
@spec mentions_escape(String.t(), list({String.t(), any()})) :: String.t()
def mentions_escape(text, mentions) do
mentions
|> Enum.reduce(text, fn {name, _}, acc ->
escape_name = String.replace(name, @markdown_characters_regex, "\\\\\\1")
String.replace(acc, name, escape_name)
end)
end
@doc "changes scheme:... urls to html links"
def add_links({subs, text}) do
links =
text
|> String.split([" ", "\t", "<br>"])
|> Enum.filter(fn word -> String.starts_with?(word, @valid_schemes) end)
|> Enum.filter(fn word -> Regex.match?(@link_regex, word) end)
|> Enum.map(fn url -> {Ecto.UUID.generate(), url} end)
|> Enum.sort_by(fn {_, url} -> -String.length(url) end)
uuid_text =
links
|> Enum.reduce(text, fn {uuid, url}, acc -> String.replace(acc, url, uuid) end)
subs =
subs ++
Enum.map(links, fn {uuid, url} ->
{uuid, "<a href=\"#{url}\">#{url}</a>"}
end)
{subs, uuid_text}
end
@doc "Adds the links to mentioned users"
def add_user_links({subs, text}, mentions, options \\ []) do
mentions =
mentions
|> Enum.sort_by(fn {name, _} -> -String.length(name) end)
|> Enum.map(fn {name, user} -> {name, user, Ecto.UUID.generate()} end)
uuid_text =
mentions
|> Enum.reduce(text, fn {match, _user, uuid}, text ->
String.replace(text, match, uuid)
end)
subs =
subs ++
Enum.map(mentions, fn {match, %User{id: id, ap_id: ap_id, info: info}, uuid} ->
ap_id =
if is_binary(info.source_data["url"]) do
info.source_data["url"]
else
ap_id
end
nickname =
if options[:format] == :full do
User.full_nickname(match)
else
User.local_nickname(match)
end
{uuid,
"<span class='h-card'><a data-user='#{id}' class='u-url mention' href='#{ap_id}'>" <>
"@<span>#{nickname}</span></a></span>"}
end)
{subs, uuid_text}
end
@doc "Adds the hashtag links"
def add_hashtag_links({subs, text}, tags) do
tags =
tags
|> Enum.sort_by(fn {name, _} -> -String.length(name) end)
|> Enum.map(fn {name, short} -> {name, short, Ecto.UUID.generate()} end)
uuid_text =
tags
|> Enum.reduce(text, fn {match, _short, uuid}, text ->
String.replace(text, ~r/((?<=[^&])|(\A))#{match}/, uuid)
end)
subs =
subs ++
Enum.map(tags, fn {tag_text, tag, uuid} ->
url =
"<a class='hashtag' data-tag='#{tag}' href='#{Pleroma.Web.base_url()}/tag/#{tag}' rel='tag'>#{
tag_text
}</a>"
{uuid, url}
end)
{subs, uuid_text}
end
def finalize({subs, text}) do
Enum.reduce(subs, text, fn {uuid, replacement}, result_text ->
String.replace(result_text, uuid, replacement)
end)
end
def truncate(text, max_length \\ 200, omission \\ "...") do def truncate(text, max_length \\ 200, omission \\ "...") do
# Remove trailing whitespace # Remove trailing whitespace
text = Regex.replace(~r/([^ \t\r\n])([ \t]+$)/u, text, "\\g{1}") text = Regex.replace(~r/([^ \t\r\n])([ \t]+$)/u, text, "\\g{1}")
@ -211,4 +115,16 @@ def truncate(text, max_length \\ 200, omission \\ "...") do
String.slice(text, 0, length_with_omission) <> omission String.slice(text, 0, length_with_omission) <> omission
end end
end end
defp get_ap_id(%User{info: %{source_data: %{"url" => url}}}) when is_binary(url), do: url
defp get_ap_id(%User{ap_id: ap_id}), do: ap_id
defp get_nickname_text(nickname, %{mentions_format: :full}), do: User.full_nickname(nickname)
defp get_nickname_text(nickname, _), do: User.local_nickname(nickname)
defp maybe_escape(str, %{mentions_escape: true}) do
String.replace(str, @markdown_characters_regex, "\\\\\\1")
end
defp maybe_escape(str, _), do: str
end end

View file

@ -1193,9 +1193,6 @@ def parse_bio(nil, _user), do: ""
def parse_bio(bio, _user) when bio == "", do: bio def parse_bio(bio, _user) when bio == "", do: bio
def parse_bio(bio, user) do def parse_bio(bio, user) do
mentions = Formatter.parse_mentions(bio)
tags = Formatter.parse_tags(bio)
emoji = emoji =
(user.info.source_data["tag"] || []) (user.info.source_data["tag"] || [])
|> Enum.filter(fn %{"type" => t} -> t == "Emoji" end) |> Enum.filter(fn %{"type" => t} -> t == "Emoji" end)
@ -1204,7 +1201,8 @@ def parse_bio(bio, user) do
end) end)
bio bio
|> CommonUtils.format_input(mentions, tags, "text/plain", user_links: [format: :full]) |> CommonUtils.format_input("text/plain", mentions_format: :full)
|> elem(0)
|> Formatter.emojify(emoji) |> Formatter.emojify(emoji)
end end

View file

@ -82,40 +82,20 @@ def get_visibility(%{"in_reply_to_status_id" => status_id}) when not is_nil(stat
def get_visibility(_), do: "public" def get_visibility(_), do: "public"
defp get_content_type(content_type) do
if Enum.member?(Pleroma.Config.get([:instance, :allowed_post_formats]), content_type) do
content_type
else
"text/plain"
end
end
def post(user, %{"status" => status} = data) do def post(user, %{"status" => status} = data) do
visibility = get_visibility(data) visibility = get_visibility(data)
limit = Pleroma.Config.get([:instance, :limit]) limit = Pleroma.Config.get([:instance, :limit])
with status <- String.trim(status), with status <- String.trim(status),
attachments <- attachments_from_ids(data), attachments <- attachments_from_ids(data),
mentions <- Formatter.parse_mentions(status),
inReplyTo <- get_replied_to_activity(data["in_reply_to_status_id"]), inReplyTo <- get_replied_to_activity(data["in_reply_to_status_id"]),
{to, cc} <- to_for_user_and_mentions(user, mentions, inReplyTo, visibility), {content_html, mentions, tags} <-
tags <- Formatter.parse_tags(status, data),
content_html <-
make_content_html( make_content_html(
status, status,
mentions,
attachments, attachments,
tags, data
get_content_type(data["content_type"]),
Enum.member?(
[true, "true"],
Map.get(
data,
"no_attachment_links",
Pleroma.Config.get([:instance, :no_attachment_links], false)
)
)
), ),
{to, cc} <- to_for_user_and_mentions(user, mentions, inReplyTo, visibility),
context <- make_context(inReplyTo), context <- make_context(inReplyTo),
cw <- data["spoiler_text"], cw <- data["spoiler_text"],
full_payload <- String.trim(status <> (data["spoiler_text"] || "")), full_payload <- String.trim(status <> (data["spoiler_text"] || "")),
@ -247,7 +227,7 @@ def thread_muted?(user, activity) do
def report(user, data) do def report(user, data) do
with {:account_id, %{"account_id" => account_id}} <- {:account_id, data}, with {:account_id, %{"account_id" => account_id}} <- {:account_id, data},
{:account, %User{} = account} <- {:account, User.get_by_id(account_id)}, {:account, %User{} = account} <- {:account, User.get_by_id(account_id)},
{:ok, content_html} <- make_report_content_html(data["comment"]), {:ok, {content_html, _, _}} <- make_report_content_html(data["comment"]),
{:ok, statuses} <- get_report_statuses(account, data), {:ok, statuses} <- get_report_statuses(account, data),
{:ok, activity} <- {:ok, activity} <-
ActivityPub.flag(%{ ActivityPub.flag(%{

View file

@ -10,7 +10,7 @@ defmodule Pleroma.Web.CommonAPI.Utils do
alias Pleroma.Object alias Pleroma.Object
alias Pleroma.Repo alias Pleroma.Repo
alias Pleroma.User alias Pleroma.User
alias Pleroma.Web alias Pleroma.Config
alias Pleroma.Web.Endpoint alias Pleroma.Web.Endpoint
alias Pleroma.Web.MediaProxy alias Pleroma.Web.MediaProxy
alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.ActivityPub.Utils
@ -100,24 +100,45 @@ def to_for_user_and_mentions(_user, mentions, inReplyTo, "direct") do
def make_content_html( def make_content_html(
status, status,
mentions,
attachments, attachments,
tags, data
content_type,
no_attachment_links \\ false
) do ) do
no_attachment_links =
data
|> Map.get("no_attachment_links", Config.get([:instance, :no_attachment_links]))
|> Kernel.in([true, "true"])
content_type = get_content_type(data["content_type"])
status status
|> format_input(mentions, tags, content_type) |> format_input(content_type)
|> maybe_add_attachments(attachments, no_attachment_links) |> maybe_add_attachments(attachments, no_attachment_links)
|> maybe_add_nsfw_tag(data)
end end
defp get_content_type(content_type) do
if Enum.member?(Config.get([:instance, :allowed_post_formats]), content_type) do
content_type
else
"text/plain"
end
end
defp maybe_add_nsfw_tag({text, mentions, tags}, %{"sensitive" => sensitive})
when sensitive in [true, "True", "true", "1"] do
{text, mentions, [{"#nsfw", "nsfw"} | tags]}
end
defp maybe_add_nsfw_tag(data, _), do: data
def make_context(%Activity{data: %{"context" => context}}), do: context def make_context(%Activity{data: %{"context" => context}}), do: context
def make_context(_), do: Utils.generate_context_id() def make_context(_), do: Utils.generate_context_id()
def maybe_add_attachments(text, _attachments, true = _no_links), do: text def maybe_add_attachments(parsed, _attachments, true = _no_links), do: parsed
def maybe_add_attachments(text, attachments, _no_links) do def maybe_add_attachments({text, mentions, tags}, attachments, _no_links) do
add_attachments(text, attachments) text = add_attachments(text, attachments)
{text, mentions, tags}
end end
def add_attachments(text, attachments) do def add_attachments(text, attachments) do
@ -135,56 +156,39 @@ def add_attachments(text, attachments) do
Enum.join([text | attachment_text], "<br>") Enum.join([text | attachment_text], "<br>")
end end
def format_input(text, mentions, tags, format, options \\ []) def format_input(text, format, options \\ [])
@doc """ @doc """
Formatting text to plain text. Formatting text to plain text.
""" """
def format_input(text, mentions, tags, "text/plain", options) do def format_input(text, "text/plain", options) do
text text
|> Formatter.html_escape("text/plain") |> Formatter.html_escape("text/plain")
|> String.replace(~r/\r?\n/, "<br>") |> Formatter.linkify(options)
|> (&{[], &1}).() |> (fn {text, mentions, tags} ->
|> Formatter.add_links() {String.replace(text, ~r/\r?\n/, "<br>"), mentions, tags}
|> Formatter.add_user_links(mentions, options[:user_links] || []) end).()
|> Formatter.add_hashtag_links(tags)
|> Formatter.finalize()
end end
@doc """ @doc """
Formatting text to html. Formatting text to html.
""" """
def format_input(text, mentions, _tags, "text/html", options) do def format_input(text, "text/html", options) do
text text
|> Formatter.html_escape("text/html") |> Formatter.html_escape("text/html")
|> (&{[], &1}).() |> Formatter.linkify(options)
|> Formatter.add_user_links(mentions, options[:user_links] || [])
|> Formatter.finalize()
end end
@doc """ @doc """
Formatting text to markdown. Formatting text to markdown.
""" """
def format_input(text, mentions, tags, "text/markdown", options) do def format_input(text, "text/markdown", options) do
options = Keyword.put(options, :mentions_escape, true)
text text
|> Formatter.mentions_escape(mentions)
|> Earmark.as_html!()
|> Formatter.html_escape("text/html") |> Formatter.html_escape("text/html")
|> (&{[], &1}).() |> Formatter.linkify(options)
|> Formatter.add_user_links(mentions, options[:user_links] || []) |> (fn {text, mentions, tags} -> {Earmark.as_html!(text), mentions, tags} end).()
|> Formatter.add_hashtag_links(tags)
|> Formatter.finalize()
end
def add_tag_links(text, tags) do
tags =
tags
|> Enum.sort_by(fn {tag, _} -> -String.length(tag) end)
Enum.reduce(tags, text, fn {full, tag}, text ->
url = "<a href='#{Web.base_url()}/tag/#{tag}' rel='tag'>##{tag}</a>"
String.replace(text, full, url)
end)
end end
def make_note_data( def make_note_data(
@ -323,13 +327,13 @@ def maybe_extract_mentions(%{"tag" => tag}) do
def maybe_extract_mentions(_), do: [] def maybe_extract_mentions(_), do: []
def make_report_content_html(nil), do: {:ok, nil} def make_report_content_html(nil), do: {:ok, {nil, [], []}}
def make_report_content_html(comment) do def make_report_content_html(comment) do
max_size = Pleroma.Config.get([:instance, :max_report_comment_size], 1000) max_size = Pleroma.Config.get([:instance, :max_report_comment_size], 1000)
if String.length(comment) <= max_size do if String.length(comment) <= max_size do
{:ok, format_input(comment, [], [], "text/plain")} {:ok, format_input(comment, "text/plain")}
else else
{:error, "Comment must be up to #{max_size} characters"} {:error, "Comment must be up to #{max_size} characters"}
end end

View file

@ -90,7 +90,10 @@ defp deps do
{:websocket_client, git: "https://github.com/jeremyong/websocket_client.git", only: :test}, {:websocket_client, git: "https://github.com/jeremyong/websocket_client.git", only: :test},
{:floki, "~> 0.20.0"}, {:floki, "~> 0.20.0"},
{:ex_syslogger, github: "slashmili/ex_syslogger", tag: "1.4.0"}, {:ex_syslogger, github: "slashmili/ex_syslogger", tag: "1.4.0"},
{:timex, "~> 3.5"} {:timex, "~> 3.5"},
{:auto_linker,
git: "https://git.pleroma.social/pleroma/auto_linker.git",
ref: "94193ca5f97c1f9fdf3d1469653e2d46fac34bcd"}
] ]
end end

View file

@ -1,4 +1,5 @@
%{ %{
"auto_linker": {:git, "https://git.pleroma.social/pleroma/auto_linker.git", "94193ca5f97c1f9fdf3d1469653e2d46fac34bcd", [ref: "94193ca5f97c1f9fdf3d1469653e2d46fac34bcd"]},
"base64url": {:hex, :base64url, "0.0.1", "36a90125f5948e3afd7be97662a1504b934dd5dac78451ca6e9abf85a10286be", [:rebar], [], "hexpm"}, "base64url": {:hex, :base64url, "0.0.1", "36a90125f5948e3afd7be97662a1504b934dd5dac78451ca6e9abf85a10286be", [:rebar], [], "hexpm"},
"bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm"}, "bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm"},
"cachex": {:hex, :cachex, "3.0.2", "1351caa4e26e29f7d7ec1d29b53d6013f0447630bbf382b4fb5d5bad0209f203", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm"}, "cachex": {:hex, :cachex, "3.0.2", "1351caa4e26e29f7d7ec1d29b53d6013f0447630bbf382b4fb5d5bad0209f203", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm"},

View file

@ -21,22 +21,16 @@ test "turns hashtags into links" do
expected_text = expected_text =
"I love <a class='hashtag' data-tag='cofe' href='http://localhost:4001/tag/cofe' rel='tag'>#cofe</a> and <a class='hashtag' data-tag='2hu' href='http://localhost:4001/tag/2hu' rel='tag'>#2hu</a>" "I love <a class='hashtag' data-tag='cofe' href='http://localhost:4001/tag/cofe' rel='tag'>#cofe</a> and <a class='hashtag' data-tag='2hu' href='http://localhost:4001/tag/2hu' rel='tag'>#2hu</a>"
tags = Formatter.parse_tags(text) assert {^expected_text, [], _tags} = Formatter.linkify(text)
assert expected_text ==
Formatter.add_hashtag_links({[], text}, tags) |> Formatter.finalize()
end end
test "does not turn html characters to tags" do test "does not turn html characters to tags" do
text = "Fact #3: pleroma does what mastodon't" text = "#fact_3: pleroma does what mastodon't"
expected_text = expected_text =
"Fact <a class='hashtag' data-tag='3' href='http://localhost:4001/tag/3' rel='tag'>#3</a>: pleroma does what mastodon't" "<a class='hashtag' data-tag='fact_3' href='http://localhost:4001/tag/fact_3' rel='tag'>#fact_3</a>: pleroma does what mastodon't"
tags = Formatter.parse_tags(text) assert {^expected_text, [], _tags} = Formatter.linkify(text)
assert expected_text ==
Formatter.add_hashtag_links({[], text}, tags) |> Formatter.finalize()
end end
end end
@ -47,79 +41,79 @@ test "turning urls into links" do
expected = expected =
"Hey, check out <a href=\"https://www.youtube.com/watch?v=8Zg1-TufF%20zY?x=1&y=2#blabla\">https://www.youtube.com/watch?v=8Zg1-TufF%20zY?x=1&y=2#blabla</a> ." "Hey, check out <a href=\"https://www.youtube.com/watch?v=8Zg1-TufF%20zY?x=1&y=2#blabla\">https://www.youtube.com/watch?v=8Zg1-TufF%20zY?x=1&y=2#blabla</a> ."
assert Formatter.add_links({[], text}) |> Formatter.finalize() == expected assert {^expected, [], []} = Formatter.linkify(text)
text = "https://mastodon.social/@lambadalambda" text = "https://mastodon.social/@lambadalambda"
expected = expected =
"<a href=\"https://mastodon.social/@lambadalambda\">https://mastodon.social/@lambadalambda</a>" "<a href=\"https://mastodon.social/@lambadalambda\">https://mastodon.social/@lambadalambda</a>"
assert Formatter.add_links({[], text}) |> Formatter.finalize() == expected assert {^expected, [], []} = Formatter.linkify(text)
text = "https://mastodon.social:4000/@lambadalambda" text = "https://mastodon.social:4000/@lambadalambda"
expected = expected =
"<a href=\"https://mastodon.social:4000/@lambadalambda\">https://mastodon.social:4000/@lambadalambda</a>" "<a href=\"https://mastodon.social:4000/@lambadalambda\">https://mastodon.social:4000/@lambadalambda</a>"
assert Formatter.add_links({[], text}) |> Formatter.finalize() == expected assert {^expected, [], []} = Formatter.linkify(text)
text = "@lambadalambda" text = "@lambadalambda"
expected = "@lambadalambda" expected = "@lambadalambda"
assert Formatter.add_links({[], text}) |> Formatter.finalize() == expected assert {^expected, [], []} = Formatter.linkify(text)
text = "http://www.cs.vu.nl/~ast/intel/" text = "http://www.cs.vu.nl/~ast/intel/"
expected = "<a href=\"http://www.cs.vu.nl/~ast/intel/\">http://www.cs.vu.nl/~ast/intel/</a>" expected = "<a href=\"http://www.cs.vu.nl/~ast/intel/\">http://www.cs.vu.nl/~ast/intel/</a>"
assert Formatter.add_links({[], text}) |> Formatter.finalize() == expected assert {^expected, [], []} = Formatter.linkify(text)
text = "https://forum.zdoom.org/viewtopic.php?f=44&t=57087" text = "https://forum.zdoom.org/viewtopic.php?f=44&t=57087"
expected = expected =
"<a href=\"https://forum.zdoom.org/viewtopic.php?f=44&t=57087\">https://forum.zdoom.org/viewtopic.php?f=44&t=57087</a>" "<a href=\"https://forum.zdoom.org/viewtopic.php?f=44&t=57087\">https://forum.zdoom.org/viewtopic.php?f=44&t=57087</a>"
assert Formatter.add_links({[], text}) |> Formatter.finalize() == expected assert {^expected, [], []} = Formatter.linkify(text)
text = "https://en.wikipedia.org/wiki/Sophia_(Gnosticism)#Mythos_of_the_soul" text = "https://en.wikipedia.org/wiki/Sophia_(Gnosticism)#Mythos_of_the_soul"
expected = expected =
"<a href=\"https://en.wikipedia.org/wiki/Sophia_(Gnosticism)#Mythos_of_the_soul\">https://en.wikipedia.org/wiki/Sophia_(Gnosticism)#Mythos_of_the_soul</a>" "<a href=\"https://en.wikipedia.org/wiki/Sophia_(Gnosticism)#Mythos_of_the_soul\">https://en.wikipedia.org/wiki/Sophia_(Gnosticism)#Mythos_of_the_soul</a>"
assert Formatter.add_links({[], text}) |> Formatter.finalize() == expected assert {^expected, [], []} = Formatter.linkify(text)
text = "https://www.google.co.jp/search?q=Nasim+Aghdam" text = "https://www.google.co.jp/search?q=Nasim+Aghdam"
expected = expected =
"<a href=\"https://www.google.co.jp/search?q=Nasim+Aghdam\">https://www.google.co.jp/search?q=Nasim+Aghdam</a>" "<a href=\"https://www.google.co.jp/search?q=Nasim+Aghdam\">https://www.google.co.jp/search?q=Nasim+Aghdam</a>"
assert Formatter.add_links({[], text}) |> Formatter.finalize() == expected assert {^expected, [], []} = Formatter.linkify(text)
text = "https://en.wikipedia.org/wiki/Duff's_device" text = "https://en.wikipedia.org/wiki/Duff's_device"
expected = expected =
"<a href=\"https://en.wikipedia.org/wiki/Duff's_device\">https://en.wikipedia.org/wiki/Duff's_device</a>" "<a href=\"https://en.wikipedia.org/wiki/Duff's_device\">https://en.wikipedia.org/wiki/Duff's_device</a>"
assert Formatter.add_links({[], text}) |> Formatter.finalize() == expected assert {^expected, [], []} = Formatter.linkify(text)
text = "https://pleroma.com https://pleroma.com/sucks" text = "https://pleroma.com https://pleroma.com/sucks"
expected = expected =
"<a href=\"https://pleroma.com\">https://pleroma.com</a> <a href=\"https://pleroma.com/sucks\">https://pleroma.com/sucks</a>" "<a href=\"https://pleroma.com\">https://pleroma.com</a> <a href=\"https://pleroma.com/sucks\">https://pleroma.com/sucks</a>"
assert Formatter.add_links({[], text}) |> Formatter.finalize() == expected assert {^expected, [], []} = Formatter.linkify(text)
text = "xmpp:contact@hacktivis.me" text = "xmpp:contact@hacktivis.me"
expected = "<a href=\"xmpp:contact@hacktivis.me\">xmpp:contact@hacktivis.me</a>" expected = "<a href=\"xmpp:contact@hacktivis.me\">xmpp:contact@hacktivis.me</a>"
assert Formatter.add_links({[], text}) |> Formatter.finalize() == expected assert {^expected, [], []} = Formatter.linkify(text)
text = text =
"magnet:?xt=urn:btih:7ec9d298e91d6e4394d1379caf073c77ff3e3136&tr=udp%3A%2F%2Fopentor.org%3A2710&tr=udp%3A%2F%2Ftracker.blackunicorn.xyz%3A6969&tr=udp%3A%2F%2Ftracker.ccc.de%3A80&tr=udp%3A%2F%2Ftracker.coppersurfer.tk%3A6969&tr=udp%3A%2F%2Ftracker.leechers-paradise.org%3A6969&tr=udp%3A%2F%2Ftracker.openbittorrent.com%3A80&tr=wss%3A%2F%2Ftracker.btorrent.xyz&tr=wss%3A%2F%2Ftracker.fastcast.nz&tr=wss%3A%2F%2Ftracker.openwebtorrent.com" "magnet:?xt=urn:btih:7ec9d298e91d6e4394d1379caf073c77ff3e3136&tr=udp%3A%2F%2Fopentor.org%3A2710&tr=udp%3A%2F%2Ftracker.blackunicorn.xyz%3A6969&tr=udp%3A%2F%2Ftracker.ccc.de%3A80&tr=udp%3A%2F%2Ftracker.coppersurfer.tk%3A6969&tr=udp%3A%2F%2Ftracker.leechers-paradise.org%3A6969&tr=udp%3A%2F%2Ftracker.openbittorrent.com%3A80&tr=wss%3A%2F%2Ftracker.btorrent.xyz&tr=wss%3A%2F%2Ftracker.fastcast.nz&tr=wss%3A%2F%2Ftracker.openwebtorrent.com"
expected = "<a href=\"#{text}\">#{text}</a>" expected = "<a href=\"#{text}\">#{text}</a>"
assert Formatter.add_links({[], text}) |> Formatter.finalize() == expected assert {^expected, [], []} = Formatter.linkify(text)
end end
end end
@ -136,12 +130,9 @@ test "gives a replacement for user links, using local nicknames in user links te
archaeme_remote = insert(:user, %{nickname: "archaeme@archae.me"}) archaeme_remote = insert(:user, %{nickname: "archaeme@archae.me"})
mentions = Pleroma.Formatter.parse_mentions(text) {text, mentions, []} = Formatter.linkify(text)
{subs, text} = Formatter.add_user_links({[], text}, mentions) assert length(mentions) == 3
assert length(subs) == 3
Enum.each(subs, fn {uuid, _} -> assert String.contains?(text, uuid) end)
expected_text = expected_text =
"<span class='h-card'><a data-user='#{gsimg.id}' class='u-url mention' href='#{ "<span class='h-card'><a data-user='#{gsimg.id}' class='u-url mention' href='#{
@ -152,7 +143,7 @@ test "gives a replacement for user links, using local nicknames in user links te
archaeme_remote.id archaeme_remote.id
}' class='u-url mention' href='#{archaeme_remote.ap_id}'>@<span>archaeme</span></a></span>" }' class='u-url mention' href='#{archaeme_remote.ap_id}'>@<span>archaeme</span></a></span>"
assert expected_text == Formatter.finalize({subs, text}) assert expected_text == text
end end
test "gives a replacement for user links when the user is using Osada" do test "gives a replacement for user links when the user is using Osada" do
@ -160,48 +151,35 @@ test "gives a replacement for user links when the user is using Osada" do
text = "@mike@osada.macgirvin.com test" text = "@mike@osada.macgirvin.com test"
mentions = Formatter.parse_mentions(text) {text, mentions, []} = Formatter.linkify(text)
{subs, text} = Formatter.add_user_links({[], text}, mentions) assert length(mentions) == 1
assert length(subs) == 1
Enum.each(subs, fn {uuid, _} -> assert String.contains?(text, uuid) end)
expected_text = expected_text =
"<span class='h-card'><a data-user='#{mike.id}' class='u-url mention' href='#{mike.ap_id}'>@<span>mike</span></a></span> test" "<span class='h-card'><a data-user='#{mike.id}' class='u-url mention' href='#{mike.ap_id}'>@<span>mike</span></a></span> test"
assert expected_text == Formatter.finalize({subs, text}) assert expected_text == text
end end
test "gives a replacement for single-character local nicknames" do test "gives a replacement for single-character local nicknames" do
text = "@o hi" text = "@o hi"
o = insert(:user, %{nickname: "o"}) o = insert(:user, %{nickname: "o"})
mentions = Formatter.parse_mentions(text) {text, mentions, []} = Formatter.linkify(text)
{subs, text} = Formatter.add_user_links({[], text}, mentions) assert length(mentions) == 1
assert length(subs) == 1
Enum.each(subs, fn {uuid, _} -> assert String.contains?(text, uuid) end)
expected_text = expected_text =
"<span class='h-card'><a data-user='#{o.id}' class='u-url mention' href='#{o.ap_id}'>@<span>o</span></a></span> hi" "<span class='h-card'><a data-user='#{o.id}' class='u-url mention' href='#{o.ap_id}'>@<span>o</span></a></span> hi"
assert expected_text == Formatter.finalize({subs, text}) assert expected_text == text
end end
test "does not give a replacement for single-character local nicknames who don't exist" do test "does not give a replacement for single-character local nicknames who don't exist" do
text = "@a hi" text = "@a hi"
mentions = Formatter.parse_mentions(text)
{subs, text} = Formatter.add_user_links({[], text}, mentions)
assert Enum.empty?(subs)
Enum.each(subs, fn {uuid, _} -> assert String.contains?(text, uuid) end)
expected_text = "@a hi" expected_text = "@a hi"
assert expected_text == Formatter.finalize({subs, text}) assert {^expected_text, [] = _mentions, [] = _tags} = Formatter.linkify(text)
end end
end end
@ -209,14 +187,14 @@ test "does not give a replacement for single-character local nicknames who don't
test "parses tags in the text" do test "parses tags in the text" do
text = "Here's a #Test. Maybe these are #working or not. What about #漢字? And #は。" text = "Here's a #Test. Maybe these are #working or not. What about #漢字? And #は。"
expected = [ expected_tags = [
{"#Test", "test"}, {"#Test", "test"},
{"#working", "working"}, {"#working", "working"},
{"#漢字", "漢字"}, {"#", ""},
{"#", ""} {"#漢字", "漢字"}
] ]
assert Formatter.parse_tags(text) == expected assert {_text, [], ^expected_tags} = Formatter.linkify(text)
end end
end end
@ -230,15 +208,15 @@ test "it can parse mentions and return the relevant users" do
archaeme = insert(:user, %{nickname: "archaeme"}) archaeme = insert(:user, %{nickname: "archaeme"})
archaeme_remote = insert(:user, %{nickname: "archaeme@archae.me"}) archaeme_remote = insert(:user, %{nickname: "archaeme@archae.me"})
expected_result = [ expected_mentions = [
{"@gsimg", gsimg},
{"@archaeme", archaeme}, {"@archaeme", archaeme},
{"@archaeme@archae.me", archaeme_remote}, {"@archaeme@archae.me", archaeme_remote},
{"@o", o}, {"@gsimg", gsimg},
{"@jimm", jimm} {"@jimm", jimm},
{"@o", o}
] ]
assert Formatter.parse_mentions(text) == expected_result assert {_text, ^expected_mentions, []} = Formatter.linkify(text)
end end
test "it adds cool emoji" do test "it adds cool emoji" do
@ -281,22 +259,10 @@ test "it doesn't die when text is absent" do
assert Formatter.get_emoji(text) == [] assert Formatter.get_emoji(text) == []
end end
describe "/mentions_escape" do test "it escapes HTML in plain text" do
test "it returns text with escaped mention names" do text = "hello & world google.com/?a=b&c=d \n http://test.com/?a=b&c=d 1"
text = """ expected = "hello &amp; world google.com/?a=b&c=d \n http://test.com/?a=b&c=d 1"
@a_breakin_glass@cybre.space
(also, little voice inside my head thinking "maybe this will encourage people
pronouncing it properly instead of saying _raKEWdo_ ")
"""
escape_text = """ assert Formatter.html_escape(text, "text/plain") == expected
@a\\_breakin\\_glass@cybre\\.space
(also, little voice inside my head thinking \"maybe this will encourage people
pronouncing it properly instead of saying _raKEWdo_ \")
"""
mentions = [{"@a_breakin_glass@cybre.space", %{}}]
assert Formatter.mentions_escape(text, mentions) == escape_text
end
end end
end end

View file

@ -57,19 +57,19 @@ test "parses emoji from name and bio" do
assert expected == Utils.emoji_from_profile(user) assert expected == Utils.emoji_from_profile(user)
end end
describe "format_input/4" do describe "format_input/3" do
test "works for bare text/plain" do test "works for bare text/plain" do
text = "hello world!" text = "hello world!"
expected = "hello world!" expected = "hello world!"
output = Utils.format_input(text, [], [], "text/plain") {output, [], []} = Utils.format_input(text, "text/plain")
assert output == expected assert output == expected
text = "hello world!\n\nsecond paragraph!" text = "hello world!\n\nsecond paragraph!"
expected = "hello world!<br><br>second paragraph!" expected = "hello world!<br><br>second paragraph!"
output = Utils.format_input(text, [], [], "text/plain") {output, [], []} = Utils.format_input(text, "text/plain")
assert output == expected assert output == expected
end end
@ -78,14 +78,14 @@ test "works for bare text/html" do
text = "<p>hello world!</p>" text = "<p>hello world!</p>"
expected = "<p>hello world!</p>" expected = "<p>hello world!</p>"
output = Utils.format_input(text, [], [], "text/html") {output, [], []} = Utils.format_input(text, "text/html")
assert output == expected assert output == expected
text = "<p>hello world!</p>\n\n<p>second paragraph</p>" text = "<p>hello world!</p>\n\n<p>second paragraph</p>"
expected = "<p>hello world!</p>\n\n<p>second paragraph</p>" expected = "<p>hello world!</p>\n\n<p>second paragraph</p>"
output = Utils.format_input(text, [], [], "text/html") {output, [], []} = Utils.format_input(text, "text/html")
assert output == expected assert output == expected
end end
@ -94,14 +94,32 @@ test "works for bare text/markdown" do
text = "**hello world**" text = "**hello world**"
expected = "<p><strong>hello world</strong></p>\n" expected = "<p><strong>hello world</strong></p>\n"
output = Utils.format_input(text, [], [], "text/markdown") {output, [], []} = Utils.format_input(text, "text/markdown")
assert output == expected assert output == expected
text = "**hello world**\n\n*another paragraph*" text = "**hello world**\n\n*another paragraph*"
expected = "<p><strong>hello world</strong></p>\n<p><em>another paragraph</em></p>\n" expected = "<p><strong>hello world</strong></p>\n<p><em>another paragraph</em></p>\n"
output = Utils.format_input(text, [], [], "text/markdown") {output, [], []} = Utils.format_input(text, "text/markdown")
assert output == expected
end
test "works for text/markdown with mentions" do
{:ok, user} =
UserBuilder.insert(%{nickname: "user__test", ap_id: "http://foo.com/user__test"})
text = "**hello world**\n\n*another @user__test and @user__test google.com paragraph*"
expected =
"<p><strong>hello world</strong></p>\n<p><em>another <span class='h-card'><a data-user='#{
user.id
}' class='u-url mention' href='http://foo.com/user__test'>@<span>user__test</span></a></span> and <span class='h-card'><a data-user='#{
user.id
}' class='u-url mention' href='http://foo.com/user__test'>@<span>user__test</span></a></span> <a href=\"http://google.com\">google.com</a> paragraph</em></p>\n"
{output, _, _} = Utils.format_input(text, "text/markdown")
assert output == expected assert output == expected
end end