Merge remote-tracking branch 'origin/develop' into fork

Signed-off-by: marcin mikołajczak <git@mkljczk.pl>
This commit is contained in:
marcin mikołajczak 2024-05-27 18:49:49 +02:00
commit a3ea7c9f06
31 changed files with 687 additions and 51 deletions

View file

@ -0,0 +1 @@
Uploader: Add support for uploading attachments using IPFS

View file

@ -0,0 +1 @@
Add NSFW-detecting MRF

View file

@ -0,0 +1 @@
Add DNSRBL MRF

View file

@ -0,0 +1 @@
Add Anti-mention Spam MRF backported from Rebased

View file

@ -0,0 +1 @@
Update the documentation for configuring Prometheus metrics.

View file

@ -0,0 +1 @@
PromEx documentation

View file

@ -0,0 +1 @@
A 422 error is returned when attempting to reply to a deleted status

View file

@ -0,0 +1 @@
Parsing of RichMedia TTLs for Amazon URLs when query parameters are nil

View file

@ -0,0 +1 @@
Monitoring of search backend health to control the processing of jobs in the search indexing Oban queue

View file

@ -0,0 +1 @@
Display reposted replies with exclude_replies: true

View file

@ -82,6 +82,10 @@
# region: "us-east-1", # may be required for Amazon AWS
scheme: "https://"
config :pleroma, Pleroma.Uploaders.IPFS,
post_gateway_url: nil,
get_gateway_url: nil
config :pleroma, :emoji,
shortcode_globs: ["/emoji/custom/**/*.png"],
pack_extensions: [".png", ".gif"],
@ -411,6 +415,11 @@
accept: [],
reject: []
config :pleroma, :mrf_dnsrbl,
nameserver: "127.0.0.1",
port: 53,
zone: "bl.pleroma.com"
# threshold of 7 days
config :pleroma, :mrf_object_age,
threshold: 604_800,
@ -440,6 +449,8 @@
mention_parent: true,
mention_quoted: true
config :pleroma, :mrf_antimentionspam, user_age_limit: 30_000
config :pleroma, :rich_media,
enabled: true,
ignore_hosts: [],
@ -597,7 +608,7 @@
attachments_cleanup: 1,
new_users_digest: 1,
mute_expire: 5,
search_indexing: 10,
search_indexing: [limit: 10, paused: true],
rich_media_expiration: 2,
check_domain_resolve: 1
],

View file

@ -136,6 +136,31 @@
}
]
},
%{
group: :pleroma,
key: Pleroma.Uploaders.IPFS,
type: :group,
description: "IPFS uploader-related settings",
children: [
%{
key: :get_gateway_url,
type: :string,
description: "GET Gateway URL",
suggestions: [
"https://ipfs.mydomain.com/{CID}",
"https://{CID}.ipfs.mydomain.com/"
]
},
%{
key: :post_gateway_url,
type: :string,
description: "POST Gateway URL",
suggestions: [
"http://localhost:5001/"
]
}
]
},
%{
group: :pleroma,
key: Pleroma.Uploaders.S3,

View file

@ -165,6 +165,7 @@
config :pleroma, Pleroma.Upload, config_impl: Pleroma.UnstubbedConfigMock
config :pleroma, Pleroma.ScheduledActivity, config_impl: Pleroma.UnstubbedConfigMock
config :pleroma, Pleroma.Web.RichMedia.Helpers, config_impl: Pleroma.StaticStubbedConfigMock
config :pleroma, Pleroma.Uploaders.IPFS, config_impl: Pleroma.UnstubbedConfigMock
peer_module =
if String.to_integer(System.otp_release()) >= 25 do

View file

@ -661,6 +661,19 @@ config :ex_aws, :s3,
host: "s3.eu-central-1.amazonaws.com"
```
#### Pleroma.Uploaders.IPFS
* `post_gateway_url`: URL with port of POST Gateway (unauthenticated)
* `get_gateway_url`: URL of public GET Gateway
Example:
```elixir
config :pleroma, Pleroma.Uploaders.IPFS,
post_gateway_url: "http://localhost:5001",
get_gateway_url: "http://{CID}.ipfs.mydomain.com"
```
### Upload filters
#### Pleroma.Upload.Filter.AnonymizeFilename

View file

@ -1,44 +1,47 @@
# Prometheus Metrics
# Prometheus / OpenTelemetry Metrics
Pleroma includes support for exporting metrics via the [prometheus_ex](https://github.com/deadtrickster/prometheus.ex) library.
Pleroma includes support for exporting metrics via the [prom_ex](https://github.com/akoutmos/prom_ex) library.
The metrics are exposed by a dedicated webserver/port to improve privacy and security.
Config example:
```
config :prometheus, Pleroma.Web.Endpoint.MetricsExporter,
enabled: true,
auth: {:basic, "myusername", "mypassword"},
ip_whitelist: ["127.0.0.1"],
path: "/api/pleroma/app_metrics",
format: :text
```
* `enabled` (Pleroma extension) enables the endpoint
* `ip_whitelist` (Pleroma extension) could be used to restrict access only to specified IPs
* `auth` sets the authentication (`false` for no auth; configurable to HTTP Basic Auth, see [prometheus-plugs](https://github.com/deadtrickster/prometheus-plugs#exporting) documentation)
* `format` sets the output format (`:text` or `:protobuf`)
* `path` sets the path to app metrics page
## `/api/pleroma/app_metrics`
### Exports Prometheus application metrics
* Method: `GET`
* Authentication: not required by default (see configuration options above)
* Params: none
* Response: text
## Grafana
### Config example
The following is a config example to use with [Grafana](https://grafana.com)
config :pleroma, Pleroma.PromEx,
disabled: false,
manual_metrics_start_delay: :no_delay,
drop_metrics_groups: [],
grafana: [
host: System.get_env("GRAFANA_HOST", "http://localhost:3000"),
auth_token: System.get_env("GRAFANA_TOKEN"),
upload_dashboards_on_start: false,
folder_name: "BEAM",
annotate_app_lifecycle: true
],
metrics_server: [
port: 4021,
path: "/metrics",
protocol: :http,
pool_size: 5,
cowboy_opts: [],
auth_strategy: :none
],
datasource: "Prometheus"
```
- job_name: 'beam'
metrics_path: /api/pleroma/app_metrics
scheme: https
PromEx supports the ability to automatically publish dashboards to your Grafana server as well as register Annotations. If you do not wish to configure this capability you must generate the dashboard JSON files and import them directly. You can find the mix commands in the upstream [documentation](https://hexdocs.pm/prom_ex/Mix.Tasks.PromEx.Dashboard.Export.html). You can find the list of modules enabled in Pleroma for which you should generate dashboards for by examining the contents of the `lib/pleroma/prom_ex.ex` module.
## prometheus.yml
The following is a bare minimum config example to use with [Prometheus](https://prometheus.io) or Prometheus-compatible software like [VictoriaMetrics](https://victoriametrics.com).
```
global:
scrape_interval: 15s
scrape_configs:
- job_name: 'pleroma'
scheme: http
static_configs:
- targets: ['pleroma.soykaf.com']
- targets: ['pleroma.soykaf.com:4021']
```

View file

@ -112,7 +112,8 @@ def start(_type, _args) do
task_children() ++
streamer_registry() ++
background_migrators() ++
[Pleroma.Gopher.Server]
[Pleroma.Gopher.Server] ++
[Pleroma.Search.Healthcheck]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options

View file

@ -10,8 +10,12 @@ def remove_from_index(%Pleroma.Object{id: object_id}) do
end
def search(query, options) do
search_module = Pleroma.Config.get([Pleroma.Search, :module], Pleroma.Activity)
search_module = Pleroma.Config.get([Pleroma.Search, :module])
search_module.search(options[:for_user], query, options)
end
def healthcheck_endpoints do
search_module = Pleroma.Config.get([Pleroma.Search, :module])
search_module.healthcheck_endpoints
end
end

View file

@ -48,6 +48,9 @@ def add_to_index(_activity), do: :ok
@impl true
def remove_from_index(_object), do: :ok
@impl true
def healthcheck_endpoints, do: nil
def maybe_restrict_author(query, %User{} = author) do
Activity.Queries.by_author(query, author)
end

View file

@ -0,0 +1,86 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Search.Healthcheck do
@doc """
Monitors health of search backend to control processing of events based on health and availability.
"""
use GenServer
require Logger
@queue :search_indexing
@tick :timer.seconds(5)
@timeout :timer.seconds(2)
def start_link(_) do
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
@impl true
def init(_) do
state = %{healthy: false}
{:ok, state, {:continue, :start}}
end
@impl true
def handle_continue(:start, state) do
tick()
{:noreply, state}
end
@impl true
def handle_info(:check, state) do
urls = Pleroma.Search.healthcheck_endpoints()
new_state =
if check(urls) do
Oban.resume_queue(queue: @queue)
Map.put(state, :healthy, true)
else
Oban.pause_queue(queue: @queue)
Map.put(state, :healthy, false)
end
maybe_log_state_change(state, new_state)
tick()
{:noreply, new_state}
end
@impl true
def handle_call(:state, _from, state) do
{:reply, state, state, :hibernate}
end
def state, do: GenServer.call(__MODULE__, :state)
def check([]), do: true
def check(urls) when is_list(urls) do
Enum.all?(
urls,
fn url ->
case Pleroma.HTTP.get(url, [], recv_timeout: @timeout) do
{:ok, %{status: 200}} -> true
_ -> false
end
end
)
end
def check(_), do: true
defp tick do
Process.send_after(self(), :check, @tick)
end
defp maybe_log_state_change(%{healthy: true}, %{healthy: false}) do
Logger.error("Pausing Oban queue #{@queue} due to search backend healthcheck failure")
end
defp maybe_log_state_change(%{healthy: false}, %{healthy: true}) do
Logger.info("Resuming Oban queue #{@queue} due to search backend healthcheck pass")
end
defp maybe_log_state_change(_, _), do: :ok
end

View file

@ -178,4 +178,15 @@ def add_to_index(activity) do
def remove_from_index(object) do
meili_delete("/indexes/objects/documents/#{object.id}")
end
@impl true
def healthcheck_endpoints do
endpoint =
Config.get([Pleroma.Search.Meilisearch, :url])
|> URI.parse()
|> Map.put(:path, "/health")
|> URI.to_string()
[endpoint]
end
end

View file

@ -21,4 +21,12 @@ defmodule Pleroma.Search.SearchBackend do
from index.
"""
@callback remove_from_index(object :: Pleroma.Object.t()) :: :ok | {:error, any()}
@doc """
Healthcheck endpoints of search backend infrastructure to monitor for controlling
processing of jobs in the Oban queue.
It is expected a 200 response is healthy and other responses are unhealthy.
"""
@callback healthcheck_endpoints :: list() | nil
end

View file

@ -239,8 +239,12 @@ defp url_from_spec(%__MODULE__{name: name}, base_url, {:file, path}) do
""
end
[base_url, path]
|> Path.join()
if String.contains?(base_url, Pleroma.Uploaders.IPFS.placeholder()) do
String.replace(base_url, Pleroma.Uploaders.IPFS.placeholder(), path)
else
[base_url, path]
|> Path.join()
end
end
defp url_from_spec(_upload, _base_url, {:url, url}), do: url
@ -277,6 +281,9 @@ def base_url do
Path.join([upload_base_url, bucket_with_namespace])
end
Pleroma.Uploaders.IPFS ->
@config_impl.get([Pleroma.Uploaders.IPFS, :get_gateway_url])
_ ->
public_endpoint || upload_base_url || Pleroma.Web.Endpoint.url() <> "/media/"
end

View file

@ -0,0 +1,77 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Uploaders.IPFS do
@behaviour Pleroma.Uploaders.Uploader
require Logger
alias Tesla.Multipart
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
defp get_final_url(method) do
config = @config_impl.get([__MODULE__])
post_base_url = Keyword.get(config, :post_gateway_url)
Path.join([post_base_url, method])
end
def put_file_endpoint do
get_final_url("/api/v0/add")
end
def delete_file_endpoint do
get_final_url("/api/v0/files/rm")
end
@placeholder "{CID}"
def placeholder, do: @placeholder
@impl true
def get_file(file) do
b_url = Pleroma.Upload.base_url()
if String.contains?(b_url, @placeholder) do
{:ok, {:url, String.replace(b_url, @placeholder, URI.decode(file))}}
else
{:error, "IPFS Get URL doesn't contain 'cid' placeholder"}
end
end
@impl true
def put_file(%Pleroma.Upload{} = upload) do
mp =
Multipart.new()
|> Multipart.add_content_type_param("charset=utf-8")
|> Multipart.add_file(upload.tempfile)
case Pleroma.HTTP.post(put_file_endpoint(), mp, [], params: ["cid-version": "1"]) do
{:ok, ret} ->
case Jason.decode(ret.body) do
{:ok, ret} ->
if Map.has_key?(ret, "Hash") do
{:ok, {:file, ret["Hash"]}}
else
{:error, "JSON doesn't contain Hash key"}
end
error ->
Logger.error("#{__MODULE__}: #{inspect(error)}")
{:error, "JSON decode failed"}
end
error ->
Logger.error("#{__MODULE__}: #{inspect(error)}")
{:error, "IPFS Gateway upload failed"}
end
end
@impl true
def delete_file(file) do
case Pleroma.HTTP.post(delete_file_endpoint(), "", [], params: [arg: file]) do
{:ok, %{status: 204}} -> :ok
error -> {:error, inspect(error)}
end
end
end

View file

@ -3,6 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.AntiMentionSpamPolicy do
alias Pleroma.Config
alias Pleroma.User
require Pleroma.Constants
@ -11,8 +12,9 @@ defmodule Pleroma.Web.ActivityPub.MRF.AntiMentionSpamPolicy do
defp user_has_posted?(%User{} = u), do: u.note_count > 0
defp user_has_age?(%User{} = u) do
diff = NaiveDateTime.utc_now() |> NaiveDateTime.diff(u.inserted_at, :second)
diff >= :timer.seconds(30)
user_age_limit = Config.get([:mrf_antimentionspam, :user_age_limit], 30_000)
diff = NaiveDateTime.utc_now() |> NaiveDateTime.diff(u.inserted_at, :millisecond)
diff >= user_age_limit
end
defp good_reputation?(%User{} = u) do

View file

@ -0,0 +1,142 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.DNSRBLPolicy do
@moduledoc """
Dynamic activity filtering based on an RBL database
This MRF makes queries to a custom DNS server which will
respond with values indicating the classification of the domain
the activity originated from. This method has been widely used
in the email anti-spam industry for very fast reputation checks.
e.g., if the DNS response is 127.0.0.1 or empty, the domain is OK
Other values such as 127.0.0.2 may be used for specific classifications.
Information for why the host is blocked can be stored in a corresponding TXT record.
This method is fail-open so if the queries fail the activites are accepted.
An example of software meant for this purpsoe is rbldnsd which can be found
at http://www.corpit.ru/mjt/rbldnsd.html or mirrored at
https://git.pleroma.social/feld/rbldnsd
It is highly recommended that you run your own copy of rbldnsd and use an
external mechanism to sync/share the contents of the zone file. This is
important to keep the latency on the queries as low as possible and prevent
your DNS server from being attacked so it fails and content is permitted.
"""
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
alias Pleroma.Config
require Logger
@query_retries 1
@query_timeout 500
@impl true
def filter(%{"actor" => actor} = object) do
actor_info = URI.parse(actor)
with {:ok, object} <- check_rbl(actor_info, object) do
{:ok, object}
else
_ -> {:reject, "[DNSRBLPolicy]"}
end
end
@impl true
def filter(object), do: {:ok, object}
@impl true
def describe do
mrf_dnsrbl =
Config.get(:mrf_dnsrbl)
|> Enum.into(%{})
{:ok, %{mrf_dnsrbl: mrf_dnsrbl}}
end
@impl true
def config_description do
%{
key: :mrf_dnsrbl,
related_policy: "Pleroma.Web.ActivityPub.MRF.DNSRBLPolicy",
label: "MRF DNSRBL",
description: "DNS RealTime Blackhole Policy",
children: [
%{
key: :nameserver,
type: {:string},
description: "DNSRBL Nameserver to Query (IP or hostame)",
suggestions: ["127.0.0.1"]
},
%{
key: :port,
type: {:string},
description: "Nameserver port",
suggestions: ["53"]
},
%{
key: :zone,
type: {:string},
description: "Root zone for querying",
suggestions: ["bl.pleroma.com"]
}
]
}
end
defp check_rbl(%{host: actor_host}, object) do
with false <- match?(^actor_host, Pleroma.Web.Endpoint.host()),
zone when not is_nil(zone) <- Keyword.get(Config.get([:mrf_dnsrbl]), :zone) do
query =
Enum.join([actor_host, zone], ".")
|> String.to_charlist()
rbl_response = rblquery(query)
if Enum.empty?(rbl_response) do
{:ok, object}
else
Task.start(fn ->
reason = rblquery(query, :txt) || "undefined"
Logger.warning(
"DNSRBL Rejected activity from #{actor_host} for reason: #{inspect(reason)}"
)
end)
:error
end
else
_ -> {:ok, object}
end
end
defp get_rblhost_ip(rblhost) do
case rblhost |> String.to_charlist() |> :inet_parse.address() do
{:ok, _} -> rblhost |> String.to_charlist() |> :inet_parse.address()
_ -> {:ok, rblhost |> String.to_charlist() |> :inet_res.lookup(:in, :a) |> Enum.random()}
end
end
defp rblquery(query, type \\ :a) do
config = Config.get([:mrf_dnsrbl])
case get_rblhost_ip(config[:nameserver]) do
{:ok, rblnsip} ->
:inet_res.lookup(query, :in, type,
nameservers: [{rblnsip, config[:port]}],
timeout: @query_timeout,
retry: @query_retries
)
_ ->
[]
end
end
end

View file

@ -178,14 +178,22 @@ defp attachments(%{params: params} = draft) do
defp in_reply_to(%{params: %{in_reply_to_status_id: ""}} = draft), do: draft
defp in_reply_to(%{params: %{in_reply_to_status_id: id}} = draft) when is_binary(id) do
case Activity.get_by_id(id) do
%Activity{} = activity ->
%__MODULE__{draft | in_reply_to: activity}
defp in_reply_to(%{params: %{in_reply_to_status_id: :deleted}} = draft) do
add_error(draft, dgettext("errors", "Cannot reply to a deleted status"))
end
_ ->
add_error(draft, dgettext("errors", "The post being replied to was deleted"))
end
defp in_reply_to(%{params: %{in_reply_to_status_id: id} = params} = draft) when is_binary(id) do
activity = Activity.get_by_id(id)
params =
if is_nil(activity) do
# Deleted activities are returned as nil
Map.put(params, :in_reply_to_status_id, :deleted)
else
Map.put(params, :in_reply_to_status_id, activity)
end
in_reply_to(%{draft | params: params})
end
defp in_reply_to(%{params: %{in_reply_to_status_id: %Activity{} = in_reply_to}} = draft) do

View file

@ -23,7 +23,7 @@ defp aws_signed_url?(image) when is_binary(image) and image != "" do
%URI{host: host, query: query} = URI.parse(image)
is_binary(host) and String.contains?(host, "amazonaws.com") and
String.contains?(query, "X-Amz-Expires")
is_binary(query) and String.contains?(query, "X-Amz-Expires")
end
defp aws_signed_url?(_), do: nil

View file

@ -0,0 +1,49 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Search.HealthcheckTest do
use Pleroma.DataCase
import Tesla.Mock
alias Pleroma.Search.Healthcheck
@good1 "http://good1.example.com/healthz"
@good2 "http://good2.example.com/health"
@bad "http://bad.example.com/healthy"
setup do
mock(fn
%{method: :get, url: @good1} ->
%Tesla.Env{
status: 200,
body: ""
}
%{method: :get, url: @good2} ->
%Tesla.Env{
status: 200,
body: ""
}
%{method: :get, url: @bad} ->
%Tesla.Env{
status: 503,
body: ""
}
end)
:ok
end
test "true for 200 responses" do
assert Healthcheck.check([@good1])
assert Healthcheck.check([@good1, @good2])
end
test "false if any response is not a 200" do
refute Healthcheck.check([@bad])
refute Healthcheck.check([@good1, @bad])
end
end

View file

@ -0,0 +1,158 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Uploaders.IPFSTest do
use Pleroma.DataCase
alias Pleroma.Uploaders.IPFS
alias Tesla.Multipart
import ExUnit.CaptureLog
import Mock
import Mox
alias Pleroma.UnstubbedConfigMock, as: Config
describe "get_final_url" do
setup do
Config
|> expect(:get, fn [Pleroma.Uploaders.IPFS] ->
[post_gateway_url: "http://localhost:5001"]
end)
:ok
end
test "it returns the final url for put_file" do
assert IPFS.put_file_endpoint() == "http://localhost:5001/api/v0/add"
end
test "it returns the final url for delete_file" do
assert IPFS.delete_file_endpoint() == "http://localhost:5001/api/v0/files/rm"
end
end
describe "get_file/1" do
setup do
Config
|> expect(:get, fn [Pleroma.Upload, :uploader] -> Pleroma.Uploaders.IPFS end)
|> expect(:get, fn [Pleroma.Upload, :base_url] -> nil end)
|> expect(:get, fn [Pleroma.Uploaders.IPFS, :public_endpoint] -> nil end)
:ok
end
test "it returns path to ipfs file with cid as subdomain" do
Config
|> expect(:get, fn [Pleroma.Uploaders.IPFS, :get_gateway_url] ->
"https://{CID}.ipfs.mydomain.com"
end)
assert IPFS.get_file("testcid") == {
:ok,
{:url, "https://testcid.ipfs.mydomain.com"}
}
end
test "it returns path to ipfs file with cid as path" do
Config
|> expect(:get, fn [Pleroma.Uploaders.IPFS, :get_gateway_url] ->
"https://ipfs.mydomain.com/ipfs/{CID}"
end)
assert IPFS.get_file("testcid") == {
:ok,
{:url, "https://ipfs.mydomain.com/ipfs/testcid"}
}
end
end
describe "put_file/1" do
setup do
Config
|> expect(:get, fn [Pleroma.Uploaders.IPFS] ->
[post_gateway_url: "http://localhost:5001"]
end)
file_upload = %Pleroma.Upload{
name: "image-tet.jpg",
content_type: "image/jpeg",
path: "test_folder/image-tet.jpg",
tempfile: Path.absname("test/instance_static/add/shortcode.png")
}
mp =
Multipart.new()
|> Multipart.add_content_type_param("charset=utf-8")
|> Multipart.add_file(file_upload.tempfile)
[file_upload: file_upload, mp: mp]
end
test "save file", %{file_upload: file_upload} do
with_mock Pleroma.HTTP,
post: fn "http://localhost:5001/api/v0/add", _mp, [], params: ["cid-version": "1"] ->
{:ok,
%Tesla.Env{
status: 200,
body:
"{\"Name\":\"image-tet.jpg\",\"Size\":\"5000\", \"Hash\":\"bafybeicrh7ltzx52yxcwrvxxckfmwhqdgsb6qym6dxqm2a4ymsakeshwoi\"}"
}}
end do
assert IPFS.put_file(file_upload) ==
{:ok, {:file, "bafybeicrh7ltzx52yxcwrvxxckfmwhqdgsb6qym6dxqm2a4ymsakeshwoi"}}
end
end
test "returns error", %{file_upload: file_upload} do
with_mock Pleroma.HTTP,
post: fn "http://localhost:5001/api/v0/add", _mp, [], params: ["cid-version": "1"] ->
{:error, "IPFS Gateway upload failed"}
end do
assert capture_log(fn ->
assert IPFS.put_file(file_upload) == {:error, "IPFS Gateway upload failed"}
end) =~ "Elixir.Pleroma.Uploaders.IPFS: {:error, \"IPFS Gateway upload failed\"}"
end
end
test "returns error if JSON decode fails", %{file_upload: file_upload} do
with_mock Pleroma.HTTP, [],
post: fn "http://localhost:5001/api/v0/add", _mp, [], params: ["cid-version": "1"] ->
{:ok, %Tesla.Env{status: 200, body: "invalid"}}
end do
assert capture_log(fn ->
assert IPFS.put_file(file_upload) == {:error, "JSON decode failed"}
end) =~
"Elixir.Pleroma.Uploaders.IPFS: {:error, %Jason.DecodeError"
end
end
test "returns error if JSON body doesn't contain Hash key", %{file_upload: file_upload} do
with_mock Pleroma.HTTP, [],
post: fn "http://localhost:5001/api/v0/add", _mp, [], params: ["cid-version": "1"] ->
{:ok, %Tesla.Env{status: 200, body: "{\"key\": \"value\"}"}}
end do
assert IPFS.put_file(file_upload) == {:error, "JSON doesn't contain Hash key"}
end
end
end
describe "delete_file/1" do
setup do
Config
|> expect(:get, fn [Pleroma.Uploaders.IPFS] ->
[post_gateway_url: "http://localhost:5001"]
end)
:ok
end
test_with_mock "deletes file", Pleroma.HTTP,
post: fn "http://localhost:5001/api/v0/files/rm", "", [], params: [arg: "image.jpg"] ->
{:ok, %{status: 204}}
end do
assert :ok = IPFS.delete_file("image.jpg")
end
end
end

View file

@ -235,6 +235,16 @@ test "replying to a status", %{user: user, conn: conn} do
assert Activity.get_in_reply_to_activity(activity).id == replied_to.id
end
test "replying to a deleted status", %{user: user, conn: conn} do
{:ok, status} = CommonAPI.post(user, %{status: "cofe"})
{:ok, _deleted_status} = CommonAPI.delete(status.id, user)
conn
|> put_req_header("content-type", "application/json")
|> post("/api/v1/statuses", %{"status" => "xD", "in_reply_to_id" => status.id})
|> json_response_and_validate_schema(422)
end
test "replying to a direct message with visibility other than direct", %{
user: user,
conn: conn

View file

@ -10,6 +10,7 @@ defmodule Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrlTest do
alias Pleroma.UnstubbedConfigMock, as: ConfigMock
alias Pleroma.Web.RichMedia.Card
alias Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl
setup do
ConfigMock
@ -82,6 +83,12 @@ test "s3 signed url is parsed and correct ttl is set for rich media" do
assert DateTime.diff(scheduled_at, timestamp_dt) == valid_till
end
test "AWS URL for an image without expiration works" do
og_data = %{"image" => "https://amazonaws.com/image.png"}
assert is_nil(AwsSignedUrl.ttl(og_data, ""))
end
defp construct_s3_url(timestamp, valid_till) do
"https://pleroma.s3.ap-southeast-1.amazonaws.com/sachin%20%281%29%20_a%20-%25%2Aasdasd%20BNN%20bnnn%20.png?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAIBLWWK6RGDQXDLJQ%2F20190716%2Fap-southeast-1%2Fs3%2Faws4_request&X-Amz-Date=#{timestamp}&X-Amz-Expires=#{valid_till}&X-Amz-Signature=04ffd6b98634f4b1bbabc62e0fac4879093cd54a6eed24fe8eb38e8369526bbf&X-Amz-SignedHeaders=host"
end