diff --git a/changelog.d/fix-mrfs.add b/changelog.d/fix-mrfs.add
new file mode 100644
index 0000000000..2a0fb07680
--- /dev/null
+++ b/changelog.d/fix-mrfs.add
@@ -0,0 +1 @@
+Added a Mix task "pleroma.config fix_mrf_policies" which will remove erroneous MRF policies from ConfigDB.
diff --git a/changelog.d/handle-non-validate-delete-errors.change b/changelog.d/handle-non-validate-delete-errors.change
new file mode 100644
index 0000000000..94adb0e988
--- /dev/null
+++ b/changelog.d/handle-non-validate-delete-errors.change
@@ -0,0 +1 @@
+Transmogrifier: handle non-validate errors on incoming Delete activities
diff --git a/changelog.d/metadata-provider-empty-post.fix b/changelog.d/metadata-provider-empty-post.fix
new file mode 100644
index 0000000000..8d6341c6c7
--- /dev/null
+++ b/changelog.d/metadata-provider-empty-post.fix
@@ -0,0 +1 @@
+Fix OpenGraph and Twitter metadata providers when parsing objects with no content or summary fields.
diff --git a/changelog.d/oban-cancel-federation.add b/changelog.d/oban-cancel-federation.add
new file mode 100644
index 0000000000..1481936802
--- /dev/null
+++ b/changelog.d/oban-cancel-federation.add
@@ -0,0 +1 @@
+Deleting, Unfavoriting, Unrepeating, or Unreacting will cancel undelivered publishing jobs for the original activity.
diff --git a/changelog.d/oban-cancel.change b/changelog.d/oban-cancel.change
new file mode 100644
index 0000000000..e4512d3bb1
--- /dev/null
+++ b/changelog.d/oban-cancel.change
@@ -0,0 +1 @@
+Changed some jobs to return :cancel on unrecoverable errors that should not be retried
diff --git a/changelog.d/oban-fetcher-rejected.change b/changelog.d/oban-fetcher-rejected.change
index 92e0836479..65f5c992dc 100644
--- a/changelog.d/oban-fetcher-rejected.change
+++ b/changelog.d/oban-fetcher-rejected.change
@@ -1 +1 @@
-Discard Remote Fetcher jobs which errored due to an MRF rejection
+Discard Remote Fetcher jobs which errored due to an MRF rejection.
diff --git a/changelog.d/oban-live_dashboard.add b/changelog.d/oban-live_dashboard.add
new file mode 100644
index 0000000000..b5b3e4f41a
--- /dev/null
+++ b/changelog.d/oban-live_dashboard.add
@@ -0,0 +1 @@
+Oban jobs can now be viewed in the Live Dashboard
diff --git a/changelog.d/oban-rich-media-errors.fix b/changelog.d/oban-rich-media-errors.fix
new file mode 100644
index 0000000000..b904108db9
--- /dev/null
+++ b/changelog.d/oban-rich-media-errors.fix
@@ -0,0 +1 @@
+Prevent Rich Media backfill jobs from retrying in cases where it is likely they will fail again.
diff --git a/changelog.d/oban-timeouts.change b/changelog.d/oban-timeouts.change
new file mode 100644
index 0000000000..33d017c5c3
--- /dev/null
+++ b/changelog.d/oban-timeouts.change
@@ -0,0 +1 @@
+Ensure all Oban jobs have timeouts defined
diff --git a/config/config.exs b/config/config.exs
index b25fc5975b..8f9e97d526 100644
--- a/config/config.exs
+++ b/config/config.exs
@@ -600,9 +600,9 @@
web_push: 50,
transmogrifier: 20,
notifications: 20,
- background: 5,
+ background: 20,
search_indexing: [limit: 10, paused: true],
- slow: 1
+ slow: 5
],
plugins: [Oban.Plugins.Pruner],
crontab: [
diff --git a/docs/administration/CLI_tasks/config.md b/docs/administration/CLI_tasks/config.md
index 7c167ec5d4..13d671a7e8 100644
--- a/docs/administration/CLI_tasks/config.md
+++ b/docs/administration/CLI_tasks/config.md
@@ -154,4 +154,19 @@ This forcibly removes all saved values in the database.
```sh
mix pleroma.config [--force] reset
+
```
+
+## Remove invalid MRF modules from the database
+
+This forcibly removes any enabled MRF that does not exist and will fix the ability of the instance to start.
+
+=== "OTP"
+ ```sh
+ ./bin/pleroma_ctl config fix_mrf_policies
+ ```
+
+=== "From Source"
+ ```sh
+ mix pleroma.config fix_mrf_policies
+ ```
\ No newline at end of file
diff --git a/lib/mix/tasks/pleroma/config.ex b/lib/mix/tasks/pleroma/config.ex
index 3a2ea44f80..8b3b2f18ba 100644
--- a/lib/mix/tasks/pleroma/config.ex
+++ b/lib/mix/tasks/pleroma/config.ex
@@ -205,6 +205,35 @@ def run(["delete", group]) do
end
end
+ # Removes any policies that are not a real module
+ # as they will prevent the server from starting
+ def run(["fix_mrf_policies"]) do
+ check_configdb(fn ->
+ start_pleroma()
+
+ group = :pleroma
+ key = :mrf
+
+ %{value: value} =
+ group
+ |> ConfigDB.get_by_group_and_key(key)
+
+ policies =
+ Keyword.get(value, :policies, [])
+ |> Enum.filter(&is_atom(&1))
+ |> Enum.filter(fn mrf ->
+ case Code.ensure_compiled(mrf) do
+ {:module, _} -> true
+ {:error, _} -> false
+ end
+ end)
+
+ value = Keyword.put(value, :policies, policies)
+
+ ConfigDB.update_or_create(%{group: group, key: key, value: value})
+ end)
+ end
+
@spec migrate_to_db(Path.t() | nil) :: any()
def migrate_to_db(file_path \\ nil) do
with :ok <- Pleroma.Config.DeprecationWarnings.warn() do
diff --git a/lib/pleroma/instances/instance.ex b/lib/pleroma/instances/instance.ex
index c497a4fb75..288555146e 100644
--- a/lib/pleroma/instances/instance.ex
+++ b/lib/pleroma/instances/instance.ex
@@ -10,7 +10,7 @@ defmodule Pleroma.Instances.Instance do
alias Pleroma.Maps
alias Pleroma.Repo
alias Pleroma.User
- alias Pleroma.Workers.BackgroundWorker
+ alias Pleroma.Workers.DeleteWorker
use Ecto.Schema
@@ -297,7 +297,7 @@ defp scrape_metadata(%URI{} = instance_uri) do
all of those users' activities and notifications.
"""
def delete_users_and_activities(host) when is_binary(host) do
- BackgroundWorker.enqueue("delete_instance", %{"host" => host})
+ DeleteWorker.enqueue("delete_instance", %{"host" => host})
end
def perform(:delete_instance, host) when is_binary(host) do
diff --git a/lib/pleroma/object/fetcher.ex b/lib/pleroma/object/fetcher.ex
index af5642af43..c0f671dd43 100644
--- a/lib/pleroma/object/fetcher.ex
+++ b/lib/pleroma/object/fetcher.ex
@@ -59,6 +59,7 @@ def refetch_object(%Object{data: %{"id" => id}} = object) do
end
# Note: will create a Create activity, which we need internally at the moment.
+ @spec fetch_object_from_id(String.t(), list()) :: {:ok, Object.t()} | {:error | :reject, any()}
def fetch_object_from_id(id, options \\ []) do
with {_, nil} <- {:fetch_object, Object.get_cached_by_ap_id(id)},
{_, true} <- {:allowed_depth, Federator.allowed_thread_distance?(options[:depth])},
diff --git a/lib/pleroma/user.ex b/lib/pleroma/user.ex
index 8de169446d..8f3ccc5dbf 100644
--- a/lib/pleroma/user.ex
+++ b/lib/pleroma/user.ex
@@ -40,6 +40,7 @@ defmodule Pleroma.User do
alias Pleroma.Web.RelMe
alias Pleroma.Webhook.Notify
alias Pleroma.Workers.BackgroundWorker
+ alias Pleroma.Workers.DeleteWorker
alias Pleroma.Workers.UserRefreshWorker
require Logger
@@ -2032,7 +2033,7 @@ def delete(users) when is_list(users) do
def delete(%User{} = user) do
# Purge the user immediately
purge(user)
- BackgroundWorker.enqueue("delete_user", %{"user_id" => user.id})
+ DeleteWorker.enqueue("delete_user", %{"user_id" => user.id})
end
# *Actually* delete the user from the DB
diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex
index d17210859d..86424ed2de 100644
--- a/lib/pleroma/web/activity_pub/transmogrifier.ex
+++ b/lib/pleroma/web/activity_pub/transmogrifier.ex
@@ -540,6 +540,9 @@ def handle_incoming(
else
_ -> e
end
+
+ e ->
+ {:error, e}
end
end
diff --git a/lib/pleroma/web/common_api.ex b/lib/pleroma/web/common_api.ex
index f0162e0334..4fed80efb1 100644
--- a/lib/pleroma/web/common_api.ex
+++ b/lib/pleroma/web/common_api.ex
@@ -19,6 +19,7 @@ defmodule Pleroma.Web.CommonAPI do
alias Pleroma.Web.ActivityPub.Visibility
alias Pleroma.Web.CommonAPI.ActivityDraft
+ import Ecto.Query, only: [where: 3]
import Pleroma.Web.Gettext
import Pleroma.Web.CommonAPI.Utils
@@ -156,6 +157,7 @@ def reject_follow_request(follower, followed) do
def delete(activity_id, user) do
with {_, %Activity{data: %{"object" => _, "type" => "Create"}} = activity} <-
{:find_activity, Activity.get_by_id(activity_id, filter: [])},
+ {_, {:ok, _}} <- {:cancel_jobs, maybe_cancel_jobs(activity)},
{_, %Object{} = object, _} <-
{:find_object, Object.normalize(activity, fetch: false), activity},
true <- User.privileged?(user, :messages_delete) || user.ap_id == object.data["actor"],
@@ -223,6 +225,7 @@ def unrepeat(id, user) do
{:find_activity, Activity.get_by_id(id)},
%Object{} = note <- Object.normalize(activity, fetch: false),
%Activity{} = announce <- Utils.get_existing_announce(user.ap_id, note),
+ {_, {:ok, _}} <- {:cancel_jobs, maybe_cancel_jobs(announce)},
{:ok, undo, _} <- Builder.undo(user, announce),
{:ok, activity, _} <- Pipeline.common_pipeline(undo, local: true) do
{:ok, activity}
@@ -275,6 +278,7 @@ def unfavorite(id, user) do
{:find_activity, Activity.get_by_id(id)},
%Object{} = note <- Object.normalize(activity, fetch: false),
%Activity{} = like <- Utils.get_existing_like(user.ap_id, note),
+ {_, {:ok, _}} <- {:cancel_jobs, maybe_cancel_jobs(like)},
{:ok, undo, _} <- Builder.undo(user, like),
{:ok, activity, _} <- Pipeline.common_pipeline(undo, local: true) do
{:ok, activity}
@@ -298,6 +302,7 @@ def react_with_emoji(id, user, emoji) do
def unreact_with_emoji(id, user, emoji) do
with %Activity{} = reaction_activity <- Utils.get_latest_reaction(id, user, emoji),
+ {_, {:ok, _}} <- {:cancel_jobs, maybe_cancel_jobs(reaction_activity)},
{:ok, undo, _} <- Builder.undo(user, reaction_activity),
{:ok, activity, _} <- Pipeline.common_pipeline(undo, local: true) do
{:ok, activity}
@@ -807,4 +812,14 @@ defp make_update_event_data(user, orig_object, changes, location) do
_ -> {:error, nil}
end
end
+
+ defp maybe_cancel_jobs(%Activity{data: %{"id" => ap_id}}) do
+ Oban.Job
+ |> where([j], j.worker == "Pleroma.Workers.PublisherWorker")
+ |> where([j], j.args["op"] == "publish_one")
+ |> where([j], j.args["params"]["id"] == ^ap_id)
+ |> Oban.cancel_all_jobs()
+ end
+
+ defp maybe_cancel_jobs(_), do: {:ok, 0}
end
diff --git a/lib/pleroma/web/metadata/utils.ex b/lib/pleroma/web/metadata/utils.ex
index 80a8be9a2d..8f61ace241 100644
--- a/lib/pleroma/web/metadata/utils.ex
+++ b/lib/pleroma/web/metadata/utils.ex
@@ -25,11 +25,14 @@ def scrub_html_and_truncate(%{data: %{"summary" => summary}} = object)
|> scrub_html_and_truncate_object_field(object)
end
- def scrub_html_and_truncate(%{data: %{"content" => content}} = object) do
+ def scrub_html_and_truncate(%{data: %{"content" => content}} = object)
+ when is_binary(content) and content != "" do
content
|> scrub_html_and_truncate_object_field(object)
end
+ def scrub_html_and_truncate(%{}), do: ""
+
def scrub_html_and_truncate(content, max_length \\ 200, omission \\ "...")
when is_binary(content) do
content
diff --git a/lib/pleroma/web/rich_media/backfill.ex b/lib/pleroma/web/rich_media/backfill.ex
index f1ee83bf0f..a66422e718 100644
--- a/lib/pleroma/web/rich_media/backfill.ex
+++ b/lib/pleroma/web/rich_media/backfill.ex
@@ -4,6 +4,7 @@
defmodule Pleroma.Web.RichMedia.Backfill do
alias Pleroma.Web.RichMedia.Card
+ alias Pleroma.Web.RichMedia.Helpers
alias Pleroma.Web.RichMedia.Parser
alias Pleroma.Web.RichMedia.Parser.TTL
alias Pleroma.Workers.RichMediaWorker
@@ -16,8 +17,7 @@ defmodule Pleroma.Web.RichMedia.Backfill do
Pleroma.Web.ActivityPub.ActivityPub
)
- @spec run(map()) ::
- :ok | {:error, {:invalid_metadata, any()} | :body_too_large | {:content, any()} | any()}
+ @spec run(map()) :: :ok | Parser.parse_errors() | Helpers.get_errors()
def run(%{"url" => url} = args) do
url_hash = Card.url_to_hash(url)
@@ -33,22 +33,16 @@ def run(%{"url" => url} = args) do
end
warm_cache(url_hash, card)
+ :ok
- {:error, {:invalid_metadata, fields}} ->
- Logger.debug("Rich media incomplete or invalid metadata for #{url}: #{inspect(fields)}")
+ {:error, type} = error
+ when type in [:invalid_metadata, :body_too_large, :content_type, :validate] ->
negative_cache(url_hash)
+ error
- {:error, :body_too_large} ->
- Logger.error("Rich media error for #{url}: :body_too_large")
- negative_cache(url_hash)
-
- {:error, {:content_type, type}} ->
- Logger.debug("Rich media error for #{url}: :content_type is #{type}")
- negative_cache(url_hash)
-
- e ->
- Logger.debug("Rich media error for #{url}: #{inspect(e)}")
- {:error, e}
+ {:error, type} = error
+ when type in [:get, :head] ->
+ error
end
end
diff --git a/lib/pleroma/web/rich_media/helpers.ex b/lib/pleroma/web/rich_media/helpers.ex
index b62c9749cd..4f37e896c7 100644
--- a/lib/pleroma/web/rich_media/helpers.ex
+++ b/lib/pleroma/web/rich_media/helpers.ex
@@ -5,6 +5,12 @@
defmodule Pleroma.Web.RichMedia.Helpers do
alias Pleroma.Config
+ require Logger
+
+ @type get_errors :: {:error, :body_too_large | :content_type | :head | :get}
+
+ @spec rich_media_get(String.t()) :: {:ok, String.t()} | get_errors()
+
defp headers do
user_agent =
case Pleroma.Config.get([:rich_media, :user_agent], :default) do
@@ -21,23 +27,30 @@ defp headers do
def rich_media_get(url) do
headers = headers()
- head_check =
- case Pleroma.HTTP.head(url, headers, http_options()) do
- # If the HEAD request didn't reach the server for whatever reason,
- # we assume the GET that comes right after won't either
- {:error, _} = e ->
- e
+ with {_, {:ok, %Tesla.Env{status: 200, headers: headers}}} <-
+ {:head, Pleroma.HTTP.head(url, headers, http_options())},
+ {_, :ok} <- {:content_type, check_content_type(headers)},
+ {_, :ok} <- {:content_length, check_content_length(headers)},
+ {_, {:ok, %Tesla.Env{status: 200, body: body}}} <-
+ {:get, Pleroma.HTTP.get(url, headers, http_options())} do
+ {:ok, body}
+ else
+ {:head, _} ->
+ Logger.debug("Rich media error for #{url}: HTTP HEAD failed")
+ {:error, :head}
- {:ok, %Tesla.Env{status: 200, headers: headers}} ->
- with :ok <- check_content_type(headers),
- :ok <- check_content_length(headers),
- do: :ok
+ {:content_type, {_, type}} ->
+ Logger.debug("Rich media error for #{url}: content-type is #{type}")
+ {:error, :content_type}
- _ ->
- :ok
- end
+ {:content_length, {_, length}} ->
+ Logger.debug("Rich media error for #{url}: content-length is #{length}")
+ {:error, :body_too_large}
- with :ok <- head_check, do: Pleroma.HTTP.get(url, headers, http_options())
+ {:get, _} ->
+ Logger.debug("Rich media error for #{url}: HTTP GET failed")
+ {:error, :get}
+ end
end
defp check_content_type(headers) do
@@ -45,7 +58,7 @@ defp check_content_type(headers) do
{_, content_type} ->
case Plug.Conn.Utils.media_type(content_type) do
{:ok, "text", "html", _} -> :ok
- _ -> {:error, {:content_type, content_type}}
+ _ -> {:error, content_type}
end
_ ->
@@ -60,7 +73,7 @@ defp check_content_length(headers) do
{_, maybe_content_length} ->
case Integer.parse(maybe_content_length) do
{content_length, ""} when content_length <= max_body -> :ok
- {_, ""} -> {:error, :body_too_large}
+ {_, ""} -> {:error, maybe_content_length}
_ -> :ok
end
diff --git a/lib/pleroma/web/rich_media/parser.ex b/lib/pleroma/web/rich_media/parser.ex
index 7f6b5d388a..a3a522d7ab 100644
--- a/lib/pleroma/web/rich_media/parser.ex
+++ b/lib/pleroma/web/rich_media/parser.ex
@@ -3,6 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.RichMedia.Parser do
+ alias Pleroma.Web.RichMedia.Helpers
require Logger
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
@@ -11,24 +12,26 @@ defp parsers do
Pleroma.Config.get([:rich_media, :parsers])
end
- def parse(nil), do: nil
+ @type parse_errors :: {:error, :rich_media_disabled | :validate}
- @spec parse(String.t()) :: {:ok, map()} | {:error, any()}
- def parse(url) do
+ @spec parse(String.t()) ::
+ {:ok, map()} | parse_errors() | Helpers.get_errors()
+ def parse(url) when is_binary(url) do
with {_, true} <- {:config, @config_impl.get([:rich_media, :enabled])},
- :ok <- validate_page_url(url),
- {:ok, data} <- parse_url(url) do
+ {_, :ok} <- {:validate, validate_page_url(url)},
+ {_, {:ok, data}} <- {:parse, parse_url(url)} do
data = Map.put(data, "url", url)
{:ok, data}
else
{:config, _} -> {:error, :rich_media_disabled}
- e -> e
+ {:validate, _} -> {:error, :validate}
+ {:parse, error} -> error
end
end
defp parse_url(url) do
- with {:ok, %Tesla.Env{body: html}} <- Pleroma.Web.RichMedia.Helpers.rich_media_get(url),
- {:ok, html} <- Floki.parse_document(html) do
+ with {:ok, body} <- Helpers.rich_media_get(url),
+ {:ok, html} <- Floki.parse_document(body) do
html
|> maybe_parse()
|> clean_parsed_data()
@@ -50,8 +53,8 @@ defp check_parsed_data(%{"title" => title} = data)
{:ok, data}
end
- defp check_parsed_data(data) do
- {:error, {:invalid_metadata, data}}
+ defp check_parsed_data(_data) do
+ {:error, :invalid_metadata}
end
defp clean_parsed_data(data) do
diff --git a/lib/pleroma/web/rich_media/parsers/o_embed.ex b/lib/pleroma/web/rich_media/parsers/o_embed.ex
index 0f303176ce..35ec144265 100644
--- a/lib/pleroma/web/rich_media/parsers/o_embed.ex
+++ b/lib/pleroma/web/rich_media/parsers/o_embed.ex
@@ -22,7 +22,7 @@ defp get_oembed_url([{"link", attributes, _children} | _]) do
end
defp get_oembed_data(url) do
- with {:ok, %Tesla.Env{body: json}} <- Pleroma.Web.RichMedia.Helpers.rich_media_get(url) do
+ with {:ok, json} <- Pleroma.Web.RichMedia.Helpers.rich_media_get(url) do
Jason.decode(json)
end
end
diff --git a/lib/pleroma/web/router.ex b/lib/pleroma/web/router.ex
index f04959692b..91095c57f3 100644
--- a/lib/pleroma/web/router.ex
+++ b/lib/pleroma/web/router.ex
@@ -1137,7 +1137,7 @@ defmodule Pleroma.Web.Router do
scope "/" do
pipe_through([:pleroma_html, :authenticate, :require_admin])
- live_dashboard("/phoenix/live_dashboard")
+ live_dashboard("/phoenix/live_dashboard", additional_pages: [oban: Oban.LiveDashboard])
end
# Test-only routes needed to test action dispatching and plug chain execution
diff --git a/lib/pleroma/workers/background_worker.ex b/lib/pleroma/workers/background_worker.ex
index dbf40ee1be..d74cc08f1d 100644
--- a/lib/pleroma/workers/background_worker.ex
+++ b/lib/pleroma/workers/background_worker.ex
@@ -3,7 +3,6 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.BackgroundWorker do
- alias Pleroma.Instances.Instance
alias Pleroma.User
use Pleroma.Workers.WorkerHelper, queue: "background"
@@ -15,11 +14,6 @@ def perform(%Job{args: %{"op" => "user_activation", "user_id" => user_id, "statu
User.perform(:set_activation_async, user, status)
end
- def perform(%Job{args: %{"op" => "delete_user", "user_id" => user_id}}) do
- user = User.get_cached_by_id(user_id)
- User.perform(:delete, user)
- end
-
def perform(%Job{args: %{"op" => "force_password_reset", "user_id" => user_id}}) do
user = User.get_cached_by_id(user_id)
User.perform(:force_password_reset, user)
@@ -45,10 +39,6 @@ def perform(%Job{args: %{"op" => "verify_fields_links", "user_id" => user_id}})
User.perform(:verify_fields_links, user)
end
- def perform(%Job{args: %{"op" => "delete_instance", "host" => host}}) do
- Instance.perform(:delete_instance, host)
- end
-
@impl Oban.Worker
- def timeout(_job), do: :timer.seconds(900)
+ def timeout(_job), do: :timer.seconds(5)
end
diff --git a/lib/pleroma/workers/cron/digest_emails_worker.ex b/lib/pleroma/workers/cron/digest_emails_worker.ex
index 0292bbb3b5..17e92d10b0 100644
--- a/lib/pleroma/workers/cron/digest_emails_worker.ex
+++ b/lib/pleroma/workers/cron/digest_emails_worker.ex
@@ -58,4 +58,7 @@ def send_email(user) do
User.touch_last_digest_emailed_at(user)
end
+
+ @impl Oban.Worker
+ def timeout(_job), do: :timer.seconds(5)
end
diff --git a/lib/pleroma/workers/cron/new_users_digest_worker.ex b/lib/pleroma/workers/cron/new_users_digest_worker.ex
index d2abb2d3b1..1f57aad4a2 100644
--- a/lib/pleroma/workers/cron/new_users_digest_worker.ex
+++ b/lib/pleroma/workers/cron/new_users_digest_worker.ex
@@ -60,4 +60,7 @@ def perform(_job) do
:ok
end
+
+ @impl Oban.Worker
+ def timeout(_job), do: :timer.seconds(5)
end
diff --git a/lib/pleroma/workers/delete_worker.ex b/lib/pleroma/workers/delete_worker.ex
new file mode 100644
index 0000000000..97003fb69e
--- /dev/null
+++ b/lib/pleroma/workers/delete_worker.ex
@@ -0,0 +1,24 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Workers.DeleteWorker do
+ alias Pleroma.Instances.Instance
+ alias Pleroma.User
+
+ use Pleroma.Workers.WorkerHelper, queue: "slow"
+
+ @impl Oban.Worker
+
+ def perform(%Job{args: %{"op" => "delete_user", "user_id" => user_id}}) do
+ user = User.get_cached_by_id(user_id)
+ User.perform(:delete, user)
+ end
+
+ def perform(%Job{args: %{"op" => "delete_instance", "host" => host}}) do
+ Instance.perform(:delete_instance, host)
+ end
+
+ @impl Oban.Worker
+ def timeout(_job), do: :timer.seconds(900)
+end
diff --git a/lib/pleroma/workers/purge_expired_activity.ex b/lib/pleroma/workers/purge_expired_activity.ex
index a65593b6e5..f48e340420 100644
--- a/lib/pleroma/workers/purge_expired_activity.ex
+++ b/lib/pleroma/workers/purge_expired_activity.ex
@@ -6,8 +6,8 @@ defmodule Pleroma.Workers.PurgeExpiredActivity do
@moduledoc """
Worker which purges expired activity.
"""
-
- use Oban.Worker, queue: :slow, max_attempts: 1, unique: [period: :infinity]
+ @queue :background
+ use Oban.Worker, queue: @queue, max_attempts: 1, unique: [period: :infinity]
import Ecto.Query
@@ -46,20 +46,22 @@ defp enabled? do
defp find_activity(id) do
with nil <- Activity.get_by_id_with_object(id) do
- {:error, :activity_not_found}
+ {:cancel, :activity_not_found}
end
end
defp find_user(ap_id) do
with nil <- Pleroma.User.get_by_ap_id(ap_id) do
- {:error, :user_not_found}
+ {:cancel, :user_not_found}
end
end
def get_expiration(id) do
+ queue = Atom.to_string(@queue)
+
from(j in Oban.Job,
where: j.state == "scheduled",
- where: j.queue == "slow",
+ where: j.queue == ^queue,
where: fragment("?->>'activity_id' = ?", j.args, ^id)
)
|> Pleroma.Repo.one()
diff --git a/lib/pleroma/workers/remote_fetcher_worker.ex b/lib/pleroma/workers/remote_fetcher_worker.ex
index 6d777ae946..1c8874b59f 100644
--- a/lib/pleroma/workers/remote_fetcher_worker.ex
+++ b/lib/pleroma/workers/remote_fetcher_worker.ex
@@ -13,7 +13,7 @@ def perform(%Job{args: %{"op" => "fetch_remote", "id" => id} = args}) do
{:ok, _object} ->
:ok
- {:rejected, reason} ->
+ {:reject, reason} ->
{:cancel, reason}
{:error, :forbidden} ->
@@ -27,9 +27,6 @@ def perform(%Job{args: %{"op" => "fetch_remote", "id" => id} = args}) do
{:error, _} = e ->
e
-
- e ->
- {:error, e}
end
end
diff --git a/lib/pleroma/workers/rich_media_worker.ex b/lib/pleroma/workers/rich_media_worker.ex
index f18ac658ad..30f9d9e9ee 100644
--- a/lib/pleroma/workers/rich_media_worker.ex
+++ b/lib/pleroma/workers/rich_media_worker.ex
@@ -14,6 +14,23 @@ def perform(%Job{args: %{"op" => "expire", "url" => url} = _args}) do
end
def perform(%Job{args: %{"op" => "backfill", "url" => _url} = args}) do
- Backfill.run(args)
+ case Backfill.run(args) do
+ :ok ->
+ :ok
+
+ {:error, type}
+ when type in [:invalid_metadata, :body_too_large, :content_type, :validate] ->
+ {:cancel, type}
+
+ {:error, type}
+ when type in [:get, :head] ->
+ {:error, type}
+
+ error ->
+ {:error, error}
+ end
end
+
+ @impl Oban.Worker
+ def timeout(_job), do: :timer.seconds(5)
end
diff --git a/lib/pleroma/workers/search_indexing_worker.ex b/lib/pleroma/workers/search_indexing_worker.ex
index 8476a2be50..8969ae3788 100644
--- a/lib/pleroma/workers/search_indexing_worker.ex
+++ b/lib/pleroma/workers/search_indexing_worker.ex
@@ -20,4 +20,7 @@ def perform(%Job{args: %{"op" => "remove_from_index", "object" => object_id}}) d
search_module.remove_from_index(object)
end
+
+ @impl Oban.Worker
+ def timeout(_job), do: :timer.seconds(5)
end
diff --git a/lib/pleroma/workers/user_refresh_worker.ex b/lib/pleroma/workers/user_refresh_worker.ex
index f43170c8fe..0c04fb237d 100644
--- a/lib/pleroma/workers/user_refresh_worker.ex
+++ b/lib/pleroma/workers/user_refresh_worker.ex
@@ -11,4 +11,7 @@ defmodule Pleroma.Workers.UserRefreshWorker do
def perform(%Job{args: %{"ap_id" => ap_id}}) do
User.fetch_by_ap_id(ap_id)
end
+
+ @impl Oban.Worker
+ def timeout(_job), do: :timer.seconds(5)
end
diff --git a/mix.exs b/mix.exs
index 5ec646c477..532df83d9c 100644
--- a/mix.exs
+++ b/mix.exs
@@ -204,6 +204,7 @@ defp deps do
{:exile, "~> 0.10.0"},
{:bandit, "~> 1.5.2"},
{:websock_adapter, "~> 0.5.6"},
+ {:oban_live_dashboard, "~> 0.1.1"},
{:icalendar, "~> 1.1"},
{:geospatial, "~> 0.3.0"},
diff --git a/mix.lock b/mix.lock
index cce19d38b0..e715de26bb 100644
--- a/mix.lock
+++ b/mix.lock
@@ -27,7 +27,7 @@
"crypt": {:git, "https://github.com/msantos/crypt.git", "f75cd55325e33cbea198fb41fe41871392f8fb76", [ref: "f75cd55325e33cbea198fb41fe41871392f8fb76"]},
"csv": {:hex, :csv, "2.4.1", "50e32749953b6bf9818dbfed81cf1190e38cdf24f95891303108087486c5925e", [:mix], [{:parallel_stream, "~> 1.0.4", [hex: :parallel_stream, repo: "hexpm", optional: false]}], "hexpm", "54508938ac67e27966b10ef49606e3ad5995d665d7fc2688efb3eab1307c9079"},
"custom_base": {:hex, :custom_base, "0.2.1", "4a832a42ea0552299d81652aa0b1f775d462175293e99dfbe4d7dbaab785a706", [:mix], [], "hexpm", "8df019facc5ec9603e94f7270f1ac73ddf339f56ade76a721eaa57c1493ba463"},
- "db_connection": {:hex, :db_connection, "2.6.0", "77d835c472b5b67fc4f29556dee74bf511bbafecdcaf98c27d27fa5918152086", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c2f992d15725e721ec7fbc1189d4ecdb8afef76648c746a8e1cad35e3b8a35f3"},
+ "db_connection": {:hex, :db_connection, "2.7.0", "b99faa9291bb09892c7da373bb82cba59aefa9b36300f6145c5f201c7adf48ec", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "dcf08f31b2701f857dfc787fbad78223d61a32204f217f15e881dd93e4bdd3ff"},
"decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"},
"deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"},
"dialyxir": {:hex, :dialyxir, "1.4.3", "edd0124f358f0b9e95bfe53a9fcf806d615d8f838e2202a9f430d59566b6b53b", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "bf2cfb75cd5c5006bec30141b131663299c661a864ec7fbbc72dfa557487a986"},
@@ -37,7 +37,7 @@
"ecto": {:hex, :ecto, "3.11.2", "e1d26be989db350a633667c5cda9c3d115ae779b66da567c68c80cfb26a8c9ee", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "3c38bca2c6f8d8023f2145326cc8a80100c3ffe4dcbd9842ff867f7fc6156c65"},
"ecto_enum": {:hex, :ecto_enum, "1.4.0", "d14b00e04b974afc69c251632d1e49594d899067ee2b376277efd8233027aec8", [:mix], [{:ecto, ">= 3.0.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "> 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:mariaex, ">= 0.0.0", [hex: :mariaex, repo: "hexpm", optional: true]}, {:postgrex, ">= 0.0.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "8fb55c087181c2b15eee406519dc22578fa60dd82c088be376d0010172764ee4"},
"ecto_psql_extras": {:hex, :ecto_psql_extras, "0.7.15", "0fc29dbae0e444a29bd6abeee4cf3c4c037e692a272478a234a1cc765077dbb1", [:mix], [{:ecto_sql, "~> 3.7", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16.0 or ~> 0.17.0", [hex: :postgrex, repo: "hexpm", optional: false]}, {:table_rex, "~> 3.1.1 or ~> 4.0.0", [hex: :table_rex, repo: "hexpm", optional: false]}], "hexpm", "b6127f3a5c6fc3d84895e4768cc7c199f22b48b67d6c99b13fbf4a374e73f039"},
- "ecto_sql": {:hex, :ecto_sql, "3.11.2", "c7cc7f812af571e50b80294dc2e535821b3b795ce8008d07aa5f336591a185a8", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.11.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.6.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.16 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "73c07f995ac17dbf89d3cfaaf688fcefabcd18b7b004ac63b0dc4ef39499ed6b"},
+ "ecto_sql": {:hex, :ecto_sql, "3.11.3", "4eb7348ff8101fbc4e6bbc5a4404a24fecbe73a3372d16569526b0cf34ebc195", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.11.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.6", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.16 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "e5f36e3d736b99c7fee3e631333b8394ade4bafe9d96d35669fca2d81c2be928"},
"eimp": {:hex, :eimp, "1.0.14", "fc297f0c7e2700457a95a60c7010a5f1dcb768a083b6d53f49cd94ab95a28f22", [:rebar3], [{:p1_utils, "1.0.18", [hex: :p1_utils, repo: "hexpm", optional: false]}], "hexpm", "501133f3112079b92d9e22da8b88bf4f0e13d4d67ae9c15c42c30bd25ceb83b6"},
"elixir_make": {:hex, :elixir_make, "0.7.8", "505026f266552ee5aabca0b9f9c229cbb496c689537c9f922f3eb5431157efc7", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:certifi, "~> 2.0", [hex: :certifi, repo: "hexpm", optional: true]}], "hexpm", "7a71945b913d37ea89b06966e1342c85cfe549b15e6d6d081e8081c493062c07"},
"erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"},
@@ -73,7 +73,7 @@
"icalendar": {:hex, :icalendar, "1.1.2", "5d0afff5d0143c5bd43f18ae32a777bf0fb9a724543ab05229a460d368f0a5e7", [:mix], [{:timex, "~> 3.4", [hex: :timex, repo: "hexpm", optional: false]}], "hexpm", "2060f8e353fdf3047e95a3f012583dc3c0bbd7ca1010e32ed9e9fc5760ad4292"},
"idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"},
"inet_cidr": {:hex, :inet_cidr, "1.0.8", "d26bb7bdbdf21ae401ead2092bf2bb4bf57fe44a62f5eaa5025280720ace8a40", [:mix], [], "hexpm", "d5b26da66603bb56c933c65214c72152f0de9a6ea53618b56d63302a68f6a90e"},
- "jason": {:hex, :jason, "1.4.1", "af1504e35f629ddcdd6addb3513c3853991f694921b1b9368b0bd32beb9f1b63", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fbb01ecdfd565b56261302f7e1fcc27c4fb8f32d56eab74db621fc154604a7a1"},
+ "jason": {:hex, :jason, "1.4.3", "d3f984eeb96fe53b85d20e0b049f03e57d075b5acda3ac8d465c969a2536c17b", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "9a90e868927f7c777689baa16d86f4d0e086d968db5c05d917ccff6d443e58a3"},
"joken": {:hex, :joken, "2.6.0", "b9dd9b6d52e3e6fcb6c65e151ad38bf4bc286382b5b6f97079c47ade6b1bcc6a", [:mix], [{:jose, "~> 1.11.5", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm", "5a95b05a71cd0b54abd35378aeb1d487a23a52c324fa7efdffc512b655b5aaa7"},
"jose": {:hex, :jose, "1.11.6", "613fda82552128aa6fb804682e3a616f4bc15565a048dabd05b1ebd5827ed965", [:mix, :rebar3], [], "hexpm", "6275cb75504f9c1e60eeacb771adfeee4905a9e182103aa59b53fed651ff9738"},
"jumper": {:hex, :jumper, "1.0.2", "68cdcd84472a00ac596b4e6459a41b3062d4427cbd4f1e8c8793c5b54f1406a7", [:mix], [], "hexpm", "9b7782409021e01ab3c08270e26f36eb62976a38c1aa64b2eaf6348422f165e1"},
@@ -98,7 +98,8 @@
"nodex": {:git, "https://git.pleroma.social/pleroma/nodex", "cb6730f943cfc6aad674c92161be23a8411f15d1", [ref: "cb6730f943cfc6aad674c92161be23a8411f15d1"]},
"oauth2": {:hex, :oauth2, "0.9.4", "632e8e8826a45e33ac2ea5ac66dcc019ba6bb5a0d2ba77e342d33e3b7b252c6e", [:mix], [{:hackney, "~> 1.7", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "407c6b9f60aa0d01b915e2347dc6be78adca706a37f0c530808942da3b62e7af"},
"oauther": {:hex, :oauther, "1.3.0", "82b399607f0ca9d01c640438b34d74ebd9e4acd716508f868e864537ecdb1f76", [:mix], [], "hexpm", "78eb888ea875c72ca27b0864a6f550bc6ee84f2eeca37b093d3d833fbcaec04e"},
- "oban": {:hex, :oban, "2.17.10", "c3e5bd739b5c3fdc38eba1d43ab270a8c6ca4463bb779b7705c69400b0d87678", [:mix], [{:ecto_sql, "~> 3.10", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:ecto_sqlite3, "~> 0.9", [hex: :ecto_sqlite3, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "4afd027b8e2bc3c399b54318b4f46ee8c40251fb55a285cb4e38b5363f0ee7c4"},
+ "oban": {:hex, :oban, "2.17.12", "33fb0cbfb92b910d48dd91a908590fe3698bb85eacec8cd0d9bc6aa13dddd6d6", [:mix], [{:ecto_sql, "~> 3.10", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:ecto_sqlite3, "~> 0.9", [hex: :ecto_sqlite3, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7a647d6cd6bb300073db17faabce22d80ae135da3baf3180a064fa7c4fa046e3"},
+ "oban_live_dashboard": {:hex, :oban_live_dashboard, "0.1.1", "8aa4ceaf381c818f7d5c8185cc59942b8ac82ef0cf559881aacf8d3f8ac7bdd3", [:mix], [{:oban, "~> 2.15", [hex: :oban, repo: "hexpm", optional: false]}, {:phoenix_live_dashboard, "~> 0.7", [hex: :phoenix_live_dashboard, repo: "hexpm", optional: false]}], "hexpm", "16dc4ce9c9a95aa2e655e35ed4e675652994a8def61731a18af85e230e1caa63"},
"octo_fetch": {:hex, :octo_fetch, "0.4.0", "074b5ecbc08be10b05b27e9db08bc20a3060142769436242702931c418695b19", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm", "cf8be6f40cd519d7000bb4e84adcf661c32e59369ca2827c4e20042eda7a7fc6"},
"open_api_spex": {:hex, :open_api_spex, "3.18.2", "8c855e83bfe8bf81603d919d6e892541eafece3720f34d1700b58024dadde247", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:poison, "~> 3.0 or ~> 4.0 or ~> 5.0", [hex: :poison, repo: "hexpm", optional: true]}, {:ymlr, "~> 2.0 or ~> 3.0 or ~> 4.0", [hex: :ymlr, repo: "hexpm", optional: true]}], "hexpm", "aa3e6dcfc0ad6a02596b2172662da21c9dd848dac145ea9e603f54e3d81b8d2b"},
"parallel_stream": {:hex, :parallel_stream, "1.0.6", "b967be2b23f0f6787fab7ed681b4c45a215a81481fb62b01a5b750fa8f30f76c", [:mix], [], "hexpm", "639b2e8749e11b87b9eb42f2ad325d161c170b39b288ac8d04c4f31f8f0823eb"},
diff --git a/test/pleroma/web/common_api_test.exs b/test/pleroma/web/common_api_test.exs
index e254004f1d..f09b0fa0d9 100644
--- a/test/pleroma/web/common_api_test.exs
+++ b/test/pleroma/web/common_api_test.exs
@@ -13,6 +13,7 @@ defmodule Pleroma.Web.CommonAPITest do
alias Pleroma.Object
alias Pleroma.Repo
alias Pleroma.Rule
+ alias Pleroma.Tests.ObanHelpers
alias Pleroma.UnstubbedConfigMock, as: ConfigMock
alias Pleroma.User
alias Pleroma.Web.ActivityPub.ActivityPub
@@ -22,7 +23,7 @@ defmodule Pleroma.Web.CommonAPITest do
alias Pleroma.Web.CommonAPI
alias Pleroma.Workers.PollWorker
- import Ecto.Query, only: [from: 2]
+ import Ecto.Query, only: [from: 2, where: 3]
import Mock
import Mox
import Pleroma.Factory
@@ -1968,4 +1969,217 @@ test "it does not boost if group is blocking poster", %{poster: poster, group: g
assert [] = announces
end
end
+
+ describe "Oban jobs are cancelled" do
+ setup do
+ clear_config([:instance, :federating], true)
+
+ local_user = insert(:user)
+
+ remote_one =
+ insert(:user, %{
+ local: false,
+ nickname: "nick1@domain.com",
+ ap_id: "https://domain.com/users/nick1",
+ inbox: "https://domain.com/users/nick1/inbox",
+ shared_inbox: "https://domain.com/inbox"
+ })
+
+ remote_two =
+ insert(:user, %{
+ local: false,
+ nickname: "nick2@example.com",
+ ap_id: "https://example.com/users/nick2",
+ inbox: "https://example.com/users/nick2/inbox",
+ shared_inbox: "https://example.com/inbox"
+ })
+
+ %{local_user: local_user, remote_one: remote_one, remote_two: remote_two}
+ end
+
+ test "when deleting posts", %{
+ local_user: local_user,
+ remote_one: remote_one,
+ remote_two: remote_two
+ } do
+ {:ok, _, _} = Pleroma.User.follow(remote_one, local_user)
+ {:ok, _, _} = Pleroma.User.follow(remote_two, local_user)
+
+ {:ok, %{data: %{"id" => ap_id}} = activity} =
+ CommonAPI.post(local_user, %{status: "Happy Friday everyone!"})
+
+ # Generate the publish_one jobs
+ ObanHelpers.perform_all()
+
+ publish_one_jobs =
+ all_enqueued()
+ |> Enum.filter(fn job ->
+ match?(
+ %{
+ state: "available",
+ queue: "federator_outgoing",
+ worker: "Pleroma.Workers.PublisherWorker",
+ args: %{"op" => "publish_one", "params" => %{"id" => ^ap_id}}
+ },
+ job
+ )
+ end)
+
+ assert length(publish_one_jobs) == 2
+
+ # The delete should have triggered cancelling the publish_one jobs
+ assert {:ok, _delete} = CommonAPI.delete(activity.id, local_user)
+
+ # all_enqueued/1 will not return cancelled jobs
+ cancelled_jobs =
+ Oban.Job
+ |> where([j], j.worker == "Pleroma.Workers.PublisherWorker")
+ |> where([j], j.state == "cancelled")
+ |> where([j], j.args["op"] == "publish_one")
+ |> where([j], j.args["params"]["id"] == ^ap_id)
+ |> Pleroma.Repo.all()
+
+ assert length(cancelled_jobs) == 2
+ end
+
+ test "when unfavoriting posts", %{
+ local_user: local_user,
+ remote_one: remote_user
+ } do
+ {:ok, activity} =
+ CommonAPI.post(remote_user, %{status: "I like turtles!"})
+
+ {:ok, %{data: %{"id" => ap_id}} = _favorite} =
+ CommonAPI.favorite(local_user, activity.id)
+
+ # Generate the publish_one jobs
+ ObanHelpers.perform_all()
+
+ publish_one_jobs =
+ all_enqueued()
+ |> Enum.filter(fn job ->
+ match?(
+ %{
+ state: "available",
+ queue: "federator_outgoing",
+ worker: "Pleroma.Workers.PublisherWorker",
+ args: %{"op" => "publish_one", "params" => %{"id" => ^ap_id}}
+ },
+ job
+ )
+ end)
+
+ assert length(publish_one_jobs) == 1
+
+ # The unfavorite should have triggered cancelling the publish_one jobs
+ assert {:ok, _unfavorite} = CommonAPI.unfavorite(activity.id, local_user)
+
+ # all_enqueued/1 will not return cancelled jobs
+ cancelled_jobs =
+ Oban.Job
+ |> where([j], j.worker == "Pleroma.Workers.PublisherWorker")
+ |> where([j], j.state == "cancelled")
+ |> where([j], j.args["op"] == "publish_one")
+ |> where([j], j.args["params"]["id"] == ^ap_id)
+ |> Pleroma.Repo.all()
+
+ assert length(cancelled_jobs) == 1
+ end
+
+ test "when unboosting posts", %{
+ local_user: local_user,
+ remote_one: remote_one,
+ remote_two: remote_two
+ } do
+ {:ok, _, _} = Pleroma.User.follow(remote_one, local_user)
+ {:ok, _, _} = Pleroma.User.follow(remote_two, local_user)
+
+ {:ok, activity} =
+ CommonAPI.post(remote_one, %{status: "This is an unpleasant post"})
+
+ {:ok, %{data: %{"id" => ap_id}} = _repeat} =
+ CommonAPI.repeat(activity.id, local_user)
+
+ # Generate the publish_one jobs
+ ObanHelpers.perform_all()
+
+ publish_one_jobs =
+ all_enqueued()
+ |> Enum.filter(fn job ->
+ match?(
+ %{
+ state: "available",
+ queue: "federator_outgoing",
+ worker: "Pleroma.Workers.PublisherWorker",
+ args: %{"op" => "publish_one", "params" => %{"id" => ^ap_id}}
+ },
+ job
+ )
+ end)
+
+ assert length(publish_one_jobs) == 2
+
+ # The unrepeat should have triggered cancelling the publish_one jobs
+ assert {:ok, _unfavorite} = CommonAPI.unrepeat(activity.id, local_user)
+
+ # all_enqueued/1 will not return cancelled jobs
+ cancelled_jobs =
+ Oban.Job
+ |> where([j], j.worker == "Pleroma.Workers.PublisherWorker")
+ |> where([j], j.state == "cancelled")
+ |> where([j], j.args["op"] == "publish_one")
+ |> where([j], j.args["params"]["id"] == ^ap_id)
+ |> Pleroma.Repo.all()
+
+ assert length(cancelled_jobs) == 2
+ end
+
+ test "when unreacting to posts", %{
+ local_user: local_user,
+ remote_one: remote_one,
+ remote_two: remote_two
+ } do
+ {:ok, _, _} = Pleroma.User.follow(remote_one, local_user)
+ {:ok, _, _} = Pleroma.User.follow(remote_two, local_user)
+
+ {:ok, activity} =
+ CommonAPI.post(remote_one, %{status: "Gang gang!!!!"})
+
+ {:ok, %{data: %{"id" => ap_id}} = _react} =
+ CommonAPI.react_with_emoji(activity.id, local_user, "👍")
+
+ # Generate the publish_one jobs
+ ObanHelpers.perform_all()
+
+ publish_one_jobs =
+ all_enqueued()
+ |> Enum.filter(fn job ->
+ match?(
+ %{
+ state: "available",
+ queue: "federator_outgoing",
+ worker: "Pleroma.Workers.PublisherWorker",
+ args: %{"op" => "publish_one", "params" => %{"id" => ^ap_id}}
+ },
+ job
+ )
+ end)
+
+ assert length(publish_one_jobs) == 2
+
+ # The unreact should have triggered cancelling the publish_one jobs
+ assert {:ok, _unreact} = CommonAPI.unreact_with_emoji(activity.id, local_user, "👍")
+
+ # all_enqueued/1 will not return cancelled jobs
+ cancelled_jobs =
+ Oban.Job
+ |> where([j], j.worker == "Pleroma.Workers.PublisherWorker")
+ |> where([j], j.state == "cancelled")
+ |> where([j], j.args["op"] == "publish_one")
+ |> where([j], j.args["params"]["id"] == ^ap_id)
+ |> Pleroma.Repo.all()
+
+ assert length(cancelled_jobs) == 2
+ end
+ end
end
diff --git a/test/pleroma/web/metadata/utils_test.exs b/test/pleroma/web/metadata/utils_test.exs
index 3daf852fba..9bc02dadf6 100644
--- a/test/pleroma/web/metadata/utils_test.exs
+++ b/test/pleroma/web/metadata/utils_test.exs
@@ -8,7 +8,7 @@ defmodule Pleroma.Web.Metadata.UtilsTest do
alias Pleroma.Web.Metadata.Utils
describe "scrub_html_and_truncate/1" do
- test "it returns content text without encode HTML if summary is nil" do
+ test "it returns content text without HTML if summary is nil" do
user = insert(:user)
note =
@@ -17,14 +17,14 @@ test "it returns content text without encode HTML if summary is nil" do
"actor" => user.ap_id,
"id" => "https://pleroma.gov/objects/whatever",
"summary" => nil,
- "content" => "Pleroma's really cool!"
+ "content" => "Pleroma's really cool!
"
}
})
assert Utils.scrub_html_and_truncate(note) == "Pleroma's really cool!"
end
- test "it returns context text without encode HTML if summary is empty" do
+ test "it returns content text without HTML if summary is empty" do
user = insert(:user)
note =
@@ -33,14 +33,14 @@ test "it returns context text without encode HTML if summary is empty" do
"actor" => user.ap_id,
"id" => "https://pleroma.gov/objects/whatever",
"summary" => "",
- "content" => "Pleroma's really cool!"
+ "content" => "Pleroma's really cool!
"
}
})
assert Utils.scrub_html_and_truncate(note) == "Pleroma's really cool!"
end
- test "it returns summary text without encode HTML if summary is filled" do
+ test "it returns summary text without HTML if summary is filled" do
user = insert(:user)
note =
@@ -48,7 +48,7 @@ test "it returns summary text without encode HTML if summary is filled" do
data: %{
"actor" => user.ap_id,
"id" => "https://pleroma.gov/objects/whatever",
- "summary" => "Public service announcement on caffeine consumption",
+ "summary" => "Public service announcement on caffeine consumption
",
"content" => "cofe"
}
})
@@ -57,6 +57,22 @@ test "it returns summary text without encode HTML if summary is filled" do
"Public service announcement on caffeine consumption"
end
+ test "it returns empty string if summary and content are absent" do
+ user = insert(:user)
+
+ note =
+ insert(:note, %{
+ data: %{
+ "actor" => user.ap_id,
+ "id" => "https://pleroma.gov/objects/whatever",
+ "content" => nil,
+ "summary" => nil
+ }
+ })
+
+ assert Utils.scrub_html_and_truncate(note) == ""
+ end
+
test "it does not return old content after editing" do
user = insert(:user)
diff --git a/test/pleroma/web/rich_media/parser_test.exs b/test/pleroma/web/rich_media/parser_test.exs
index a5f2563a20..8fd75b57a6 100644
--- a/test/pleroma/web/rich_media/parser_test.exs
+++ b/test/pleroma/web/rich_media/parser_test.exs
@@ -20,7 +20,7 @@ test "returns error when no metadata present" do
end
test "doesn't just add a title" do
- assert {:error, {:invalid_metadata, _}} = Parser.parse("https://example.com/non-ogp")
+ assert {:error, :invalid_metadata} = Parser.parse("https://example.com/non-ogp")
end
test "parses ogp" do
@@ -96,7 +96,7 @@ test "rejects invalid OGP data" do
end
test "returns error if getting page was not successful" do
- assert {:error, :overload} = Parser.parse("https://example.com/error")
+ assert {:error, :get} = Parser.parse("https://example.com/error")
end
test "does a HEAD request to check if the body is too large" do
@@ -104,17 +104,17 @@ test "does a HEAD request to check if the body is too large" do
end
test "does a HEAD request to check if the body is html" do
- assert {:error, {:content_type, _}} = Parser.parse("https://example.com/pdf-file")
+ assert {:error, :content_type} = Parser.parse("https://example.com/pdf-file")
end
test "refuses to crawl incomplete URLs" do
url = "example.com/ogp"
- assert :error == Parser.parse(url)
+ assert {:error, :validate} == Parser.parse(url)
end
test "refuses to crawl malformed URLs" do
url = "example.com[]/ogp"
- assert :error == Parser.parse(url)
+ assert {:error, :validate} == Parser.parse(url)
end
test "refuses to crawl URLs of private network from posts" do
@@ -126,7 +126,7 @@ test "refuses to crawl URLs of private network from posts" do
"https://pleroma.local/notice/9kCP7V"
]
|> Enum.each(fn url ->
- assert :error == Parser.parse(url)
+ assert {:error, :validate} == Parser.parse(url)
end)
end
diff --git a/test/pleroma/workers/purge_expired_activity_test.exs b/test/pleroma/workers/purge_expired_activity_test.exs
index 0372f54cae..040ff6a51c 100644
--- a/test/pleroma/workers/purge_expired_activity_test.exs
+++ b/test/pleroma/workers/purge_expired_activity_test.exs
@@ -42,7 +42,7 @@ test "error if user was not found" do
user = Pleroma.User.get_by_ap_id(activity.actor)
Pleroma.Repo.delete(user)
- assert {:error, :user_not_found} =
+ assert {:cancel, :user_not_found} =
perform_job(Pleroma.Workers.PurgeExpiredActivity, %{activity_id: activity.id})
end
@@ -53,7 +53,7 @@ test "error if actiivity was not found" do
expires_at: DateTime.add(DateTime.utc_now(), 3601)
})
- assert {:error, :activity_not_found} =
+ assert {:cancel, :activity_not_found} =
perform_job(Pleroma.Workers.PurgeExpiredActivity, %{activity_id: "some_if"})
end
end
diff --git a/test/support/http_request_mock.ex b/test/support/http_request_mock.ex
index 87ec9c14e9..cac692c2fd 100644
--- a/test/support/http_request_mock.ex
+++ b/test/support/http_request_mock.ex
@@ -1782,7 +1782,7 @@ def post(url, query, body, headers) do
]
def head(url, _query, _body, _headers) when url in @rich_media_mocks do
- {:ok, %Tesla.Env{status: 404, body: ""}}
+ {:ok, %Tesla.Env{status: 200, body: ""}}
end
def head("https://example.com/pdf-file", _, _, _) do