From 1e8d1904e61c73f624bdce600fc243dff81e19fc Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 15 Jul 2024 10:15:23 -0400 Subject: [PATCH 01/22] Define missing Oban timeouts --- changelog.d/oban-timeouts.change | 1 + lib/pleroma/workers/cron/digest_emails_worker.ex | 3 +++ lib/pleroma/workers/cron/new_users_digest_worker.ex | 3 +++ lib/pleroma/workers/rich_media_worker.ex | 3 +++ lib/pleroma/workers/search_indexing_worker.ex | 3 +++ lib/pleroma/workers/user_refresh_worker.ex | 3 +++ 6 files changed, 16 insertions(+) create mode 100644 changelog.d/oban-timeouts.change diff --git a/changelog.d/oban-timeouts.change b/changelog.d/oban-timeouts.change new file mode 100644 index 0000000000..33d017c5c3 --- /dev/null +++ b/changelog.d/oban-timeouts.change @@ -0,0 +1 @@ +Ensure all Oban jobs have timeouts defined diff --git a/lib/pleroma/workers/cron/digest_emails_worker.ex b/lib/pleroma/workers/cron/digest_emails_worker.ex index 0292bbb3b5..17e92d10b0 100644 --- a/lib/pleroma/workers/cron/digest_emails_worker.ex +++ b/lib/pleroma/workers/cron/digest_emails_worker.ex @@ -58,4 +58,7 @@ def send_email(user) do User.touch_last_digest_emailed_at(user) end + + @impl Oban.Worker + def timeout(_job), do: :timer.seconds(5) end diff --git a/lib/pleroma/workers/cron/new_users_digest_worker.ex b/lib/pleroma/workers/cron/new_users_digest_worker.ex index d2abb2d3b1..1f57aad4a2 100644 --- a/lib/pleroma/workers/cron/new_users_digest_worker.ex +++ b/lib/pleroma/workers/cron/new_users_digest_worker.ex @@ -60,4 +60,7 @@ def perform(_job) do :ok end + + @impl Oban.Worker + def timeout(_job), do: :timer.seconds(5) end diff --git a/lib/pleroma/workers/rich_media_worker.ex b/lib/pleroma/workers/rich_media_worker.ex index f18ac658ad..ecc980a28a 100644 --- a/lib/pleroma/workers/rich_media_worker.ex +++ b/lib/pleroma/workers/rich_media_worker.ex @@ -16,4 +16,7 @@ def perform(%Job{args: %{"op" => "expire", "url" => url} = _args}) do def perform(%Job{args: %{"op" => "backfill", "url" => _url} = args}) do Backfill.run(args) end + + @impl Oban.Worker + def timeout(_job), do: :timer.seconds(5) end diff --git a/lib/pleroma/workers/search_indexing_worker.ex b/lib/pleroma/workers/search_indexing_worker.ex index 8476a2be50..8969ae3788 100644 --- a/lib/pleroma/workers/search_indexing_worker.ex +++ b/lib/pleroma/workers/search_indexing_worker.ex @@ -20,4 +20,7 @@ def perform(%Job{args: %{"op" => "remove_from_index", "object" => object_id}}) d search_module.remove_from_index(object) end + + @impl Oban.Worker + def timeout(_job), do: :timer.seconds(5) end diff --git a/lib/pleroma/workers/user_refresh_worker.ex b/lib/pleroma/workers/user_refresh_worker.ex index f43170c8fe..0c04fb237d 100644 --- a/lib/pleroma/workers/user_refresh_worker.ex +++ b/lib/pleroma/workers/user_refresh_worker.ex @@ -11,4 +11,7 @@ defmodule Pleroma.Workers.UserRefreshWorker do def perform(%Job{args: %{"ap_id" => ap_id}}) do User.fetch_by_ap_id(ap_id) end + + @impl Oban.Worker + def timeout(_job), do: :timer.seconds(5) end From 6278af209af00594c4d1ae37410da629e02b4df2 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 15 Jul 2024 10:16:02 -0400 Subject: [PATCH 02/22] Bump Oban to 2.17.12 --- mix.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/mix.lock b/mix.lock index 495f20cbc6..c7d71bc84d 100644 --- a/mix.lock +++ b/mix.lock @@ -25,7 +25,7 @@ "credo": {:hex, :credo, "1.7.3", "05bb11eaf2f2b8db370ecaa6a6bda2ec49b2acd5e0418bc106b73b07128c0436", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "35ea675a094c934c22fb1dca3696f3c31f2728ae6ef5a53b5d648c11180a4535"}, "crontab": {:hex, :crontab, "1.1.8", "2ce0e74777dfcadb28a1debbea707e58b879e6aa0ffbf9c9bb540887bce43617", [:mix], [{:ecto, "~> 1.0 or ~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"}, "custom_base": {:hex, :custom_base, "0.2.1", "4a832a42ea0552299d81652aa0b1f775d462175293e99dfbe4d7dbaab785a706", [:mix], [], "hexpm", "8df019facc5ec9603e94f7270f1ac73ddf339f56ade76a721eaa57c1493ba463"}, - "db_connection": {:hex, :db_connection, "2.6.0", "77d835c472b5b67fc4f29556dee74bf511bbafecdcaf98c27d27fa5918152086", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c2f992d15725e721ec7fbc1189d4ecdb8afef76648c746a8e1cad35e3b8a35f3"}, + "db_connection": {:hex, :db_connection, "2.7.0", "b99faa9291bb09892c7da373bb82cba59aefa9b36300f6145c5f201c7adf48ec", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "dcf08f31b2701f857dfc787fbad78223d61a32204f217f15e881dd93e4bdd3ff"}, "decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"}, "deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"}, "dialyxir": {:hex, :dialyxir, "1.4.3", "edd0124f358f0b9e95bfe53a9fcf806d615d8f838e2202a9f430d59566b6b53b", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "bf2cfb75cd5c5006bec30141b131663299c661a864ec7fbbc72dfa557487a986"}, @@ -35,7 +35,7 @@ "ecto": {:hex, :ecto, "3.11.2", "e1d26be989db350a633667c5cda9c3d115ae779b66da567c68c80cfb26a8c9ee", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "3c38bca2c6f8d8023f2145326cc8a80100c3ffe4dcbd9842ff867f7fc6156c65"}, "ecto_enum": {:hex, :ecto_enum, "1.4.0", "d14b00e04b974afc69c251632d1e49594d899067ee2b376277efd8233027aec8", [:mix], [{:ecto, ">= 3.0.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "> 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:mariaex, ">= 0.0.0", [hex: :mariaex, repo: "hexpm", optional: true]}, {:postgrex, ">= 0.0.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "8fb55c087181c2b15eee406519dc22578fa60dd82c088be376d0010172764ee4"}, "ecto_psql_extras": {:hex, :ecto_psql_extras, "0.7.15", "0fc29dbae0e444a29bd6abeee4cf3c4c037e692a272478a234a1cc765077dbb1", [:mix], [{:ecto_sql, "~> 3.7", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16.0 or ~> 0.17.0", [hex: :postgrex, repo: "hexpm", optional: false]}, {:table_rex, "~> 3.1.1 or ~> 4.0.0", [hex: :table_rex, repo: "hexpm", optional: false]}], "hexpm", "b6127f3a5c6fc3d84895e4768cc7c199f22b48b67d6c99b13fbf4a374e73f039"}, - "ecto_sql": {:hex, :ecto_sql, "3.11.2", "c7cc7f812af571e50b80294dc2e535821b3b795ce8008d07aa5f336591a185a8", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.11.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.6.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.16 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "73c07f995ac17dbf89d3cfaaf688fcefabcd18b7b004ac63b0dc4ef39499ed6b"}, + "ecto_sql": {:hex, :ecto_sql, "3.11.3", "4eb7348ff8101fbc4e6bbc5a4404a24fecbe73a3372d16569526b0cf34ebc195", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.11.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.6", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.16 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "e5f36e3d736b99c7fee3e631333b8394ade4bafe9d96d35669fca2d81c2be928"}, "eimp": {:hex, :eimp, "1.0.14", "fc297f0c7e2700457a95a60c7010a5f1dcb768a083b6d53f49cd94ab95a28f22", [:rebar3], [{:p1_utils, "1.0.18", [hex: :p1_utils, repo: "hexpm", optional: false]}], "hexpm", "501133f3112079b92d9e22da8b88bf4f0e13d4d67ae9c15c42c30bd25ceb83b6"}, "elixir_make": {:hex, :elixir_make, "0.7.8", "505026f266552ee5aabca0b9f9c229cbb496c689537c9f922f3eb5431157efc7", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:certifi, "~> 2.0", [hex: :certifi, repo: "hexpm", optional: true]}], "hexpm", "7a71945b913d37ea89b06966e1342c85cfe549b15e6d6d081e8081c493062c07"}, "erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"}, @@ -65,7 +65,7 @@ "httpoison": {:hex, :httpoison, "1.8.2", "9eb9c63ae289296a544842ef816a85d881d4a31f518a0fec089aaa744beae290", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "2bb350d26972e30c96e2ca74a1aaf8293d61d0742ff17f01e0279fef11599921"}, "idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"}, "inet_cidr": {:hex, :inet_cidr, "1.0.8", "d26bb7bdbdf21ae401ead2092bf2bb4bf57fe44a62f5eaa5025280720ace8a40", [:mix], [], "hexpm", "d5b26da66603bb56c933c65214c72152f0de9a6ea53618b56d63302a68f6a90e"}, - "jason": {:hex, :jason, "1.4.1", "af1504e35f629ddcdd6addb3513c3853991f694921b1b9368b0bd32beb9f1b63", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fbb01ecdfd565b56261302f7e1fcc27c4fb8f32d56eab74db621fc154604a7a1"}, + "jason": {:hex, :jason, "1.4.3", "d3f984eeb96fe53b85d20e0b049f03e57d075b5acda3ac8d465c969a2536c17b", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "9a90e868927f7c777689baa16d86f4d0e086d968db5c05d917ccff6d443e58a3"}, "joken": {:hex, :joken, "2.6.0", "b9dd9b6d52e3e6fcb6c65e151ad38bf4bc286382b5b6f97079c47ade6b1bcc6a", [:mix], [{:jose, "~> 1.11.5", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm", "5a95b05a71cd0b54abd35378aeb1d487a23a52c324fa7efdffc512b655b5aaa7"}, "jose": {:hex, :jose, "1.11.6", "613fda82552128aa6fb804682e3a616f4bc15565a048dabd05b1ebd5827ed965", [:mix, :rebar3], [], "hexpm", "6275cb75504f9c1e60eeacb771adfeee4905a9e182103aa59b53fed651ff9738"}, "jumper": {:hex, :jumper, "1.0.2", "68cdcd84472a00ac596b4e6459a41b3062d4427cbd4f1e8c8793c5b54f1406a7", [:mix], [], "hexpm", "9b7782409021e01ab3c08270e26f36eb62976a38c1aa64b2eaf6348422f165e1"}, @@ -88,7 +88,7 @@ "nimble_parsec": {:hex, :nimble_parsec, "0.6.0", "32111b3bf39137144abd7ba1cce0914533b2d16ef35e8abc5ec8be6122944263", [:mix], [], "hexpm", "27eac315a94909d4dc68bc07a4a83e06c8379237c5ea528a9acff4ca1c873c52"}, "nimble_pool": {:hex, :nimble_pool, "0.2.6", "91f2f4c357da4c4a0a548286c84a3a28004f68f05609b4534526871a22053cde", [:mix], [], "hexpm", "1c715055095d3f2705c4e236c18b618420a35490da94149ff8b580a2144f653f"}, "nodex": {:git, "https://git.pleroma.social/pleroma/nodex", "cb6730f943cfc6aad674c92161be23a8411f15d1", [ref: "cb6730f943cfc6aad674c92161be23a8411f15d1"]}, - "oban": {:hex, :oban, "2.17.10", "c3e5bd739b5c3fdc38eba1d43ab270a8c6ca4463bb779b7705c69400b0d87678", [:mix], [{:ecto_sql, "~> 3.10", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:ecto_sqlite3, "~> 0.9", [hex: :ecto_sqlite3, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "4afd027b8e2bc3c399b54318b4f46ee8c40251fb55a285cb4e38b5363f0ee7c4"}, + "oban": {:hex, :oban, "2.17.12", "33fb0cbfb92b910d48dd91a908590fe3698bb85eacec8cd0d9bc6aa13dddd6d6", [:mix], [{:ecto_sql, "~> 3.10", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:ecto_sqlite3, "~> 0.9", [hex: :ecto_sqlite3, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7a647d6cd6bb300073db17faabce22d80ae135da3baf3180a064fa7c4fa046e3"}, "octo_fetch": {:hex, :octo_fetch, "0.4.0", "074b5ecbc08be10b05b27e9db08bc20a3060142769436242702931c418695b19", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm", "cf8be6f40cd519d7000bb4e84adcf661c32e59369ca2827c4e20042eda7a7fc6"}, "open_api_spex": {:hex, :open_api_spex, "3.18.2", "8c855e83bfe8bf81603d919d6e892541eafece3720f34d1700b58024dadde247", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:poison, "~> 3.0 or ~> 4.0 or ~> 5.0", [hex: :poison, repo: "hexpm", optional: true]}, {:ymlr, "~> 2.0 or ~> 3.0 or ~> 4.0", [hex: :ymlr, repo: "hexpm", optional: true]}], "hexpm", "aa3e6dcfc0ad6a02596b2172662da21c9dd848dac145ea9e603f54e3d81b8d2b"}, "parse_trans": {:hex, :parse_trans, "3.4.1", "6e6aa8167cb44cc8f39441d05193be6e6f4e7c2946cb2759f015f8c56b76e5ff", [:rebar3], [], "hexpm", "620a406ce75dada827b82e453c19cf06776be266f5a67cff34e1ef2cbb60e49a"}, From 2e2caad28db9dbc7342ac706bc743ec393c2e7e4 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 15 Jul 2024 10:23:03 -0400 Subject: [PATCH 03/22] Fix Oban jobs exiting with :error instead of :cancel --- changelog.d/oban-cancel.change | 1 + lib/pleroma/workers/purge_expired_activity.ex | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/oban-cancel.change diff --git a/changelog.d/oban-cancel.change b/changelog.d/oban-cancel.change new file mode 100644 index 0000000000..e4512d3bb1 --- /dev/null +++ b/changelog.d/oban-cancel.change @@ -0,0 +1 @@ +Changed some jobs to return :cancel on unrecoverable errors that should not be retried diff --git a/lib/pleroma/workers/purge_expired_activity.ex b/lib/pleroma/workers/purge_expired_activity.ex index a65593b6e5..6b0799a147 100644 --- a/lib/pleroma/workers/purge_expired_activity.ex +++ b/lib/pleroma/workers/purge_expired_activity.ex @@ -46,13 +46,13 @@ defp enabled? do defp find_activity(id) do with nil <- Activity.get_by_id_with_object(id) do - {:error, :activity_not_found} + {:cancel, :activity_not_found} end end defp find_user(ap_id) do with nil <- Pleroma.User.get_by_ap_id(ap_id) do - {:error, :user_not_found} + {:cancel, :user_not_found} end end From 2f14990c5c57df00126ea2b06ccb706c6dd8f918 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 15 Jul 2024 10:25:18 -0400 Subject: [PATCH 04/22] Change PurgeExpiredActivity to use the background queue --- lib/pleroma/workers/purge_expired_activity.ex | 8 +++++--- test/pleroma/workers/purge_expired_activity_test.exs | 4 ++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/lib/pleroma/workers/purge_expired_activity.ex b/lib/pleroma/workers/purge_expired_activity.ex index 6b0799a147..f48e340420 100644 --- a/lib/pleroma/workers/purge_expired_activity.ex +++ b/lib/pleroma/workers/purge_expired_activity.ex @@ -6,8 +6,8 @@ defmodule Pleroma.Workers.PurgeExpiredActivity do @moduledoc """ Worker which purges expired activity. """ - - use Oban.Worker, queue: :slow, max_attempts: 1, unique: [period: :infinity] + @queue :background + use Oban.Worker, queue: @queue, max_attempts: 1, unique: [period: :infinity] import Ecto.Query @@ -57,9 +57,11 @@ defp find_user(ap_id) do end def get_expiration(id) do + queue = Atom.to_string(@queue) + from(j in Oban.Job, where: j.state == "scheduled", - where: j.queue == "slow", + where: j.queue == ^queue, where: fragment("?->>'activity_id' = ?", j.args, ^id) ) |> Pleroma.Repo.one() diff --git a/test/pleroma/workers/purge_expired_activity_test.exs b/test/pleroma/workers/purge_expired_activity_test.exs index 0372f54cae..040ff6a51c 100644 --- a/test/pleroma/workers/purge_expired_activity_test.exs +++ b/test/pleroma/workers/purge_expired_activity_test.exs @@ -42,7 +42,7 @@ test "error if user was not found" do user = Pleroma.User.get_by_ap_id(activity.actor) Pleroma.Repo.delete(user) - assert {:error, :user_not_found} = + assert {:cancel, :user_not_found} = perform_job(Pleroma.Workers.PurgeExpiredActivity, %{activity_id: activity.id}) end @@ -53,7 +53,7 @@ test "error if actiivity was not found" do expires_at: DateTime.add(DateTime.utc_now(), 3601) }) - assert {:error, :activity_not_found} = + assert {:cancel, :activity_not_found} = perform_job(Pleroma.Workers.PurgeExpiredActivity, %{activity_id: "some_if"}) end end From 52b6dd8bff6a17a75d8a3122cd599c0195a3df8d Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 15 Jul 2024 10:28:55 -0400 Subject: [PATCH 05/22] Increase background job concurrency to 20 --- config/config.exs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/config.exs b/config/config.exs index dd333212ce..040c480502 100644 --- a/config/config.exs +++ b/config/config.exs @@ -589,7 +589,7 @@ federator_outgoing: 5, web_push: 50, transmogrifier: 20, - background: 5, + background: 20, search_indexing: [limit: 10, paused: true], slow: 1 ], From 30defb1674512263e2eea25ef4a44d7159e8bb60 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 15 Jul 2024 11:58:07 -0400 Subject: [PATCH 06/22] Create a DeleteWorker and change user and instance deletion jobs to use it These deletion tasks are slow, but the other background jobs are not. This will allow us to have a lower timeout on the normal background jobs. --- lib/pleroma/instances/instance.ex | 4 ++-- lib/pleroma/user.ex | 3 ++- lib/pleroma/workers/background_worker.ex | 12 +----------- lib/pleroma/workers/delete_worker.ex | 24 ++++++++++++++++++++++++ 4 files changed, 29 insertions(+), 14 deletions(-) create mode 100644 lib/pleroma/workers/delete_worker.ex diff --git a/lib/pleroma/instances/instance.ex b/lib/pleroma/instances/instance.ex index c497a4fb75..288555146e 100644 --- a/lib/pleroma/instances/instance.ex +++ b/lib/pleroma/instances/instance.ex @@ -10,7 +10,7 @@ defmodule Pleroma.Instances.Instance do alias Pleroma.Maps alias Pleroma.Repo alias Pleroma.User - alias Pleroma.Workers.BackgroundWorker + alias Pleroma.Workers.DeleteWorker use Ecto.Schema @@ -297,7 +297,7 @@ defp scrape_metadata(%URI{} = instance_uri) do all of those users' activities and notifications. """ def delete_users_and_activities(host) when is_binary(host) do - BackgroundWorker.enqueue("delete_instance", %{"host" => host}) + DeleteWorker.enqueue("delete_instance", %{"host" => host}) end def perform(:delete_instance, host) when is_binary(host) do diff --git a/lib/pleroma/user.ex b/lib/pleroma/user.ex index 7a8a689314..e28d76a7c8 100644 --- a/lib/pleroma/user.ex +++ b/lib/pleroma/user.ex @@ -38,6 +38,7 @@ defmodule Pleroma.User do alias Pleroma.Web.OAuth alias Pleroma.Web.RelMe alias Pleroma.Workers.BackgroundWorker + alias Pleroma.Workers.DeleteWorker alias Pleroma.Workers.UserRefreshWorker require Logger @@ -1982,7 +1983,7 @@ def delete(users) when is_list(users) do def delete(%User{} = user) do # Purge the user immediately purge(user) - BackgroundWorker.enqueue("delete_user", %{"user_id" => user.id}) + DeleteWorker.enqueue("delete_user", %{"user_id" => user.id}) end # *Actually* delete the user from the DB diff --git a/lib/pleroma/workers/background_worker.ex b/lib/pleroma/workers/background_worker.ex index dbf40ee1be..d74cc08f1d 100644 --- a/lib/pleroma/workers/background_worker.ex +++ b/lib/pleroma/workers/background_worker.ex @@ -3,7 +3,6 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Workers.BackgroundWorker do - alias Pleroma.Instances.Instance alias Pleroma.User use Pleroma.Workers.WorkerHelper, queue: "background" @@ -15,11 +14,6 @@ def perform(%Job{args: %{"op" => "user_activation", "user_id" => user_id, "statu User.perform(:set_activation_async, user, status) end - def perform(%Job{args: %{"op" => "delete_user", "user_id" => user_id}}) do - user = User.get_cached_by_id(user_id) - User.perform(:delete, user) - end - def perform(%Job{args: %{"op" => "force_password_reset", "user_id" => user_id}}) do user = User.get_cached_by_id(user_id) User.perform(:force_password_reset, user) @@ -45,10 +39,6 @@ def perform(%Job{args: %{"op" => "verify_fields_links", "user_id" => user_id}}) User.perform(:verify_fields_links, user) end - def perform(%Job{args: %{"op" => "delete_instance", "host" => host}}) do - Instance.perform(:delete_instance, host) - end - @impl Oban.Worker - def timeout(_job), do: :timer.seconds(900) + def timeout(_job), do: :timer.seconds(5) end diff --git a/lib/pleroma/workers/delete_worker.ex b/lib/pleroma/workers/delete_worker.ex new file mode 100644 index 0000000000..97003fb69e --- /dev/null +++ b/lib/pleroma/workers/delete_worker.ex @@ -0,0 +1,24 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2022 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Workers.DeleteWorker do + alias Pleroma.Instances.Instance + alias Pleroma.User + + use Pleroma.Workers.WorkerHelper, queue: "slow" + + @impl Oban.Worker + + def perform(%Job{args: %{"op" => "delete_user", "user_id" => user_id}}) do + user = User.get_cached_by_id(user_id) + User.perform(:delete, user) + end + + def perform(%Job{args: %{"op" => "delete_instance", "host" => host}}) do + Instance.perform(:delete_instance, host) + end + + @impl Oban.Worker + def timeout(_job), do: :timer.seconds(900) +end From 80e16de3bdf447aebd58037bb0a2c7a605e3c38e Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 15 Jul 2024 12:00:58 -0400 Subject: [PATCH 07/22] Increase slow job queue parallelization --- config/config.exs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/config.exs b/config/config.exs index 040c480502..044f951f6e 100644 --- a/config/config.exs +++ b/config/config.exs @@ -591,7 +591,7 @@ transmogrifier: 20, background: 20, search_indexing: [limit: 10, paused: true], - slow: 1 + slow: 5 ], plugins: [Oban.Plugins.Pruner], crontab: [ From c9203f125c3ce44bc7b7273fedca089aadfb6b86 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Mon, 15 Jul 2024 15:21:16 -0400 Subject: [PATCH 08/22] Added a Mix task "pleroma.config fix_mrf_policies" which will remove erroneous MRF policies from ConfigDB --- changelog.d/fix-mrfs.add | 1 + lib/mix/tasks/pleroma/config.ex | 29 +++++++++++++++++++++++++++++ 2 files changed, 30 insertions(+) create mode 100644 changelog.d/fix-mrfs.add diff --git a/changelog.d/fix-mrfs.add b/changelog.d/fix-mrfs.add new file mode 100644 index 0000000000..a38830109d --- /dev/null +++ b/changelog.d/fix-mrfs.add @@ -0,0 +1 @@ +Added a Mix task "pleroma.config fix_mrf_policies" which will remove erroneous MRF policies from ConfigDB diff --git a/lib/mix/tasks/pleroma/config.ex b/lib/mix/tasks/pleroma/config.ex index 3a2ea44f80..8b3b2f18ba 100644 --- a/lib/mix/tasks/pleroma/config.ex +++ b/lib/mix/tasks/pleroma/config.ex @@ -205,6 +205,35 @@ def run(["delete", group]) do end end + # Removes any policies that are not a real module + # as they will prevent the server from starting + def run(["fix_mrf_policies"]) do + check_configdb(fn -> + start_pleroma() + + group = :pleroma + key = :mrf + + %{value: value} = + group + |> ConfigDB.get_by_group_and_key(key) + + policies = + Keyword.get(value, :policies, []) + |> Enum.filter(&is_atom(&1)) + |> Enum.filter(fn mrf -> + case Code.ensure_compiled(mrf) do + {:module, _} -> true + {:error, _} -> false + end + end) + + value = Keyword.put(value, :policies, policies) + + ConfigDB.update_or_create(%{group: group, key: key, value: value}) + end) + end + @spec migrate_to_db(Path.t() | nil) :: any() def migrate_to_db(file_path \\ nil) do with :ok <- Pleroma.Config.DeprecationWarnings.warn() do From 4cbb59c8f615183efa67ad8b628fce40c6bf7cd9 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Wed, 17 Jul 2024 09:32:29 -0400 Subject: [PATCH 09/22] Add Oban Live Dashboard --- changelog.d/oban-live_dashboard.add | 1 + lib/pleroma/web/router.ex | 2 +- mix.exs | 1 + mix.lock | 1 + 4 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 changelog.d/oban-live_dashboard.add diff --git a/changelog.d/oban-live_dashboard.add b/changelog.d/oban-live_dashboard.add new file mode 100644 index 0000000000..b5b3e4f41a --- /dev/null +++ b/changelog.d/oban-live_dashboard.add @@ -0,0 +1 @@ +Oban jobs can now be viewed in the Live Dashboard diff --git a/lib/pleroma/web/router.ex b/lib/pleroma/web/router.ex index 56c457e90b..fc40a11432 100644 --- a/lib/pleroma/web/router.ex +++ b/lib/pleroma/web/router.ex @@ -988,7 +988,7 @@ defmodule Pleroma.Web.Router do scope "/" do pipe_through([:pleroma_html, :authenticate, :require_admin]) - live_dashboard("/phoenix/live_dashboard") + live_dashboard("/phoenix/live_dashboard", additional_pages: [oban: Oban.LiveDashboard]) end # Test-only routes needed to test action dispatching and plug chain execution diff --git a/mix.exs b/mix.exs index eab77d9cd2..6770f95fd4 100644 --- a/mix.exs +++ b/mix.exs @@ -201,6 +201,7 @@ defp deps do {:exile, "~> 0.10.0"}, {:bandit, "~> 1.5.2"}, {:websock_adapter, "~> 0.5.6"}, + {:oban_live_dashboard, "~> 0.1.1"}, ## dev & test {:phoenix_live_reload, "~> 1.3.3", only: :dev}, diff --git a/mix.lock b/mix.lock index c7d71bc84d..61ede9e5eb 100644 --- a/mix.lock +++ b/mix.lock @@ -89,6 +89,7 @@ "nimble_pool": {:hex, :nimble_pool, "0.2.6", "91f2f4c357da4c4a0a548286c84a3a28004f68f05609b4534526871a22053cde", [:mix], [], "hexpm", "1c715055095d3f2705c4e236c18b618420a35490da94149ff8b580a2144f653f"}, "nodex": {:git, "https://git.pleroma.social/pleroma/nodex", "cb6730f943cfc6aad674c92161be23a8411f15d1", [ref: "cb6730f943cfc6aad674c92161be23a8411f15d1"]}, "oban": {:hex, :oban, "2.17.12", "33fb0cbfb92b910d48dd91a908590fe3698bb85eacec8cd0d9bc6aa13dddd6d6", [:mix], [{:ecto_sql, "~> 3.10", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:ecto_sqlite3, "~> 0.9", [hex: :ecto_sqlite3, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7a647d6cd6bb300073db17faabce22d80ae135da3baf3180a064fa7c4fa046e3"}, + "oban_live_dashboard": {:hex, :oban_live_dashboard, "0.1.1", "8aa4ceaf381c818f7d5c8185cc59942b8ac82ef0cf559881aacf8d3f8ac7bdd3", [:mix], [{:oban, "~> 2.15", [hex: :oban, repo: "hexpm", optional: false]}, {:phoenix_live_dashboard, "~> 0.7", [hex: :phoenix_live_dashboard, repo: "hexpm", optional: false]}], "hexpm", "16dc4ce9c9a95aa2e655e35ed4e675652994a8def61731a18af85e230e1caa63"}, "octo_fetch": {:hex, :octo_fetch, "0.4.0", "074b5ecbc08be10b05b27e9db08bc20a3060142769436242702931c418695b19", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm", "cf8be6f40cd519d7000bb4e84adcf661c32e59369ca2827c4e20042eda7a7fc6"}, "open_api_spex": {:hex, :open_api_spex, "3.18.2", "8c855e83bfe8bf81603d919d6e892541eafece3720f34d1700b58024dadde247", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:poison, "~> 3.0 or ~> 4.0 or ~> 5.0", [hex: :poison, repo: "hexpm", optional: true]}, {:ymlr, "~> 2.0 or ~> 3.0 or ~> 4.0", [hex: :ymlr, repo: "hexpm", optional: true]}], "hexpm", "aa3e6dcfc0ad6a02596b2172662da21c9dd848dac145ea9e603f54e3d81b8d2b"}, "parse_trans": {:hex, :parse_trans, "3.4.1", "6e6aa8167cb44cc8f39441d05193be6e6f4e7c2946cb2759f015f8c56b76e5ff", [:rebar3], [], "hexpm", "620a406ce75dada827b82e453c19cf06776be266f5a67cff34e1ef2cbb60e49a"}, From d124d8645e1455fe5d4d2ab143f3ca09967f9572 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Wed, 17 Jul 2024 12:40:07 -0400 Subject: [PATCH 10/22] Rework some Rich Media functionality for better error handling Oban should not retry jobs that are likely to fail again --- changelog.d/oban-rich-media-errors.fix | 1 + lib/pleroma/web/rich_media/backfill.ex | 24 ++++------ lib/pleroma/web/rich_media/helpers.ex | 44 ++++++++++++------- lib/pleroma/web/rich_media/parser.ex | 23 +++++----- lib/pleroma/web/rich_media/parsers/o_embed.ex | 2 +- lib/pleroma/workers/rich_media_worker.ex | 12 ++++- test/pleroma/web/rich_media/parser_test.exs | 12 ++--- test/support/http_request_mock.ex | 2 +- 8 files changed, 70 insertions(+), 50 deletions(-) create mode 100644 changelog.d/oban-rich-media-errors.fix diff --git a/changelog.d/oban-rich-media-errors.fix b/changelog.d/oban-rich-media-errors.fix new file mode 100644 index 0000000000..b904108db9 --- /dev/null +++ b/changelog.d/oban-rich-media-errors.fix @@ -0,0 +1 @@ +Prevent Rich Media backfill jobs from retrying in cases where it is likely they will fail again. diff --git a/lib/pleroma/web/rich_media/backfill.ex b/lib/pleroma/web/rich_media/backfill.ex index f1ee83bf0f..a66422e718 100644 --- a/lib/pleroma/web/rich_media/backfill.ex +++ b/lib/pleroma/web/rich_media/backfill.ex @@ -4,6 +4,7 @@ defmodule Pleroma.Web.RichMedia.Backfill do alias Pleroma.Web.RichMedia.Card + alias Pleroma.Web.RichMedia.Helpers alias Pleroma.Web.RichMedia.Parser alias Pleroma.Web.RichMedia.Parser.TTL alias Pleroma.Workers.RichMediaWorker @@ -16,8 +17,7 @@ defmodule Pleroma.Web.RichMedia.Backfill do Pleroma.Web.ActivityPub.ActivityPub ) - @spec run(map()) :: - :ok | {:error, {:invalid_metadata, any()} | :body_too_large | {:content, any()} | any()} + @spec run(map()) :: :ok | Parser.parse_errors() | Helpers.get_errors() def run(%{"url" => url} = args) do url_hash = Card.url_to_hash(url) @@ -33,22 +33,16 @@ def run(%{"url" => url} = args) do end warm_cache(url_hash, card) + :ok - {:error, {:invalid_metadata, fields}} -> - Logger.debug("Rich media incomplete or invalid metadata for #{url}: #{inspect(fields)}") + {:error, type} = error + when type in [:invalid_metadata, :body_too_large, :content_type, :validate] -> negative_cache(url_hash) + error - {:error, :body_too_large} -> - Logger.error("Rich media error for #{url}: :body_too_large") - negative_cache(url_hash) - - {:error, {:content_type, type}} -> - Logger.debug("Rich media error for #{url}: :content_type is #{type}") - negative_cache(url_hash) - - e -> - Logger.debug("Rich media error for #{url}: #{inspect(e)}") - {:error, e} + {:error, type} = error + when type in [:get, :head] -> + error end end diff --git a/lib/pleroma/web/rich_media/helpers.ex b/lib/pleroma/web/rich_media/helpers.ex index ea41bd285a..fba23c6577 100644 --- a/lib/pleroma/web/rich_media/helpers.ex +++ b/lib/pleroma/web/rich_media/helpers.ex @@ -5,26 +5,38 @@ defmodule Pleroma.Web.RichMedia.Helpers do alias Pleroma.Config + require Logger + + @type get_errors :: {:error, :body_too_large | :content_type | :head | :get} + + @spec rich_media_get(String.t()) :: {:ok, String.t()} | get_errors() def rich_media_get(url) do headers = [{"user-agent", Pleroma.Application.user_agent() <> "; Bot"}] - head_check = - case Pleroma.HTTP.head(url, headers, http_options()) do - # If the HEAD request didn't reach the server for whatever reason, - # we assume the GET that comes right after won't either - {:error, _} = e -> - e + with {_, {:ok, %Tesla.Env{status: 200, headers: headers}}} <- + {:head, Pleroma.HTTP.head(url, headers, http_options())}, + {_, :ok} <- {:content_type, check_content_type(headers)}, + {_, :ok} <- {:content_length, check_content_length(headers)}, + {_, {:ok, %Tesla.Env{status: 200, body: body}}} <- + {:get, Pleroma.HTTP.get(url, headers, http_options())} do + {:ok, body} + else + {:head, _} -> + Logger.debug("Rich media error for #{url}: HTTP HEAD failed") + {:error, :head} - {:ok, %Tesla.Env{status: 200, headers: headers}} -> - with :ok <- check_content_type(headers), - :ok <- check_content_length(headers), - do: :ok + {:content_type, {_, type}} -> + Logger.debug("Rich media error for #{url}: content-type is #{type}") + {:error, :content_type} - _ -> - :ok - end + {:content_length, {_, length}} -> + Logger.debug("Rich media error for #{url}: content-length is #{length}") + {:error, :body_too_large} - with :ok <- head_check, do: Pleroma.HTTP.get(url, headers, http_options()) + {:get, _} -> + Logger.debug("Rich media error for #{url}: HTTP GET failed") + {:error, :get} + end end defp check_content_type(headers) do @@ -32,7 +44,7 @@ defp check_content_type(headers) do {_, content_type} -> case Plug.Conn.Utils.media_type(content_type) do {:ok, "text", "html", _} -> :ok - _ -> {:error, {:content_type, content_type}} + _ -> {:error, content_type} end _ -> @@ -47,7 +59,7 @@ defp check_content_length(headers) do {_, maybe_content_length} -> case Integer.parse(maybe_content_length) do {content_length, ""} when content_length <= max_body -> :ok - {_, ""} -> {:error, :body_too_large} + {_, ""} -> {:error, maybe_content_length} _ -> :ok end diff --git a/lib/pleroma/web/rich_media/parser.ex b/lib/pleroma/web/rich_media/parser.ex index 7f6b5d388a..a3a522d7ab 100644 --- a/lib/pleroma/web/rich_media/parser.ex +++ b/lib/pleroma/web/rich_media/parser.ex @@ -3,6 +3,7 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.RichMedia.Parser do + alias Pleroma.Web.RichMedia.Helpers require Logger @config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config) @@ -11,24 +12,26 @@ defp parsers do Pleroma.Config.get([:rich_media, :parsers]) end - def parse(nil), do: nil + @type parse_errors :: {:error, :rich_media_disabled | :validate} - @spec parse(String.t()) :: {:ok, map()} | {:error, any()} - def parse(url) do + @spec parse(String.t()) :: + {:ok, map()} | parse_errors() | Helpers.get_errors() + def parse(url) when is_binary(url) do with {_, true} <- {:config, @config_impl.get([:rich_media, :enabled])}, - :ok <- validate_page_url(url), - {:ok, data} <- parse_url(url) do + {_, :ok} <- {:validate, validate_page_url(url)}, + {_, {:ok, data}} <- {:parse, parse_url(url)} do data = Map.put(data, "url", url) {:ok, data} else {:config, _} -> {:error, :rich_media_disabled} - e -> e + {:validate, _} -> {:error, :validate} + {:parse, error} -> error end end defp parse_url(url) do - with {:ok, %Tesla.Env{body: html}} <- Pleroma.Web.RichMedia.Helpers.rich_media_get(url), - {:ok, html} <- Floki.parse_document(html) do + with {:ok, body} <- Helpers.rich_media_get(url), + {:ok, html} <- Floki.parse_document(body) do html |> maybe_parse() |> clean_parsed_data() @@ -50,8 +53,8 @@ defp check_parsed_data(%{"title" => title} = data) {:ok, data} end - defp check_parsed_data(data) do - {:error, {:invalid_metadata, data}} + defp check_parsed_data(_data) do + {:error, :invalid_metadata} end defp clean_parsed_data(data) do diff --git a/lib/pleroma/web/rich_media/parsers/o_embed.ex b/lib/pleroma/web/rich_media/parsers/o_embed.ex index 0f303176ce..35ec144265 100644 --- a/lib/pleroma/web/rich_media/parsers/o_embed.ex +++ b/lib/pleroma/web/rich_media/parsers/o_embed.ex @@ -22,7 +22,7 @@ defp get_oembed_url([{"link", attributes, _children} | _]) do end defp get_oembed_data(url) do - with {:ok, %Tesla.Env{body: json}} <- Pleroma.Web.RichMedia.Helpers.rich_media_get(url) do + with {:ok, json} <- Pleroma.Web.RichMedia.Helpers.rich_media_get(url) do Jason.decode(json) end end diff --git a/lib/pleroma/workers/rich_media_worker.ex b/lib/pleroma/workers/rich_media_worker.ex index ecc980a28a..0db97633f3 100644 --- a/lib/pleroma/workers/rich_media_worker.ex +++ b/lib/pleroma/workers/rich_media_worker.ex @@ -14,7 +14,17 @@ def perform(%Job{args: %{"op" => "expire", "url" => url} = _args}) do end def perform(%Job{args: %{"op" => "backfill", "url" => _url} = args}) do - Backfill.run(args) + case Backfill.run(args) do + :ok -> + :ok + + {:error, type} + when type in [:invalid_metadata, :body_too_large, :content_type, :validate] -> + :cancel + + error -> + {:error, error} + end end @impl Oban.Worker diff --git a/test/pleroma/web/rich_media/parser_test.exs b/test/pleroma/web/rich_media/parser_test.exs index a5f2563a20..8fd75b57a6 100644 --- a/test/pleroma/web/rich_media/parser_test.exs +++ b/test/pleroma/web/rich_media/parser_test.exs @@ -20,7 +20,7 @@ test "returns error when no metadata present" do end test "doesn't just add a title" do - assert {:error, {:invalid_metadata, _}} = Parser.parse("https://example.com/non-ogp") + assert {:error, :invalid_metadata} = Parser.parse("https://example.com/non-ogp") end test "parses ogp" do @@ -96,7 +96,7 @@ test "rejects invalid OGP data" do end test "returns error if getting page was not successful" do - assert {:error, :overload} = Parser.parse("https://example.com/error") + assert {:error, :get} = Parser.parse("https://example.com/error") end test "does a HEAD request to check if the body is too large" do @@ -104,17 +104,17 @@ test "does a HEAD request to check if the body is too large" do end test "does a HEAD request to check if the body is html" do - assert {:error, {:content_type, _}} = Parser.parse("https://example.com/pdf-file") + assert {:error, :content_type} = Parser.parse("https://example.com/pdf-file") end test "refuses to crawl incomplete URLs" do url = "example.com/ogp" - assert :error == Parser.parse(url) + assert {:error, :validate} == Parser.parse(url) end test "refuses to crawl malformed URLs" do url = "example.com[]/ogp" - assert :error == Parser.parse(url) + assert {:error, :validate} == Parser.parse(url) end test "refuses to crawl URLs of private network from posts" do @@ -126,7 +126,7 @@ test "refuses to crawl URLs of private network from posts" do "https://pleroma.local/notice/9kCP7V" ] |> Enum.each(fn url -> - assert :error == Parser.parse(url) + assert {:error, :validate} == Parser.parse(url) end) end diff --git a/test/support/http_request_mock.ex b/test/support/http_request_mock.ex index 20e410424b..ed044cf989 100644 --- a/test/support/http_request_mock.ex +++ b/test/support/http_request_mock.ex @@ -1724,7 +1724,7 @@ def post(url, query, body, headers) do ] def head(url, _query, _body, _headers) when url in @rich_media_mocks do - {:ok, %Tesla.Env{status: 404, body: ""}} + {:ok, %Tesla.Env{status: 200, body: ""}} end def head("https://example.com/pdf-file", _, _, _) do From 1e0d5934d53f95ac7737e844cf060d2cfe1be98d Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Wed, 17 Jul 2024 12:51:38 -0400 Subject: [PATCH 11/22] Fix return for cancelling job --- lib/pleroma/workers/rich_media_worker.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/pleroma/workers/rich_media_worker.ex b/lib/pleroma/workers/rich_media_worker.ex index 0db97633f3..a94a85ec71 100644 --- a/lib/pleroma/workers/rich_media_worker.ex +++ b/lib/pleroma/workers/rich_media_worker.ex @@ -20,7 +20,7 @@ def perform(%Job{args: %{"op" => "backfill", "url" => _url} = args}) do {:error, type} when type in [:invalid_metadata, :body_too_large, :content_type, :validate] -> - :cancel + {:cancel, type} error -> {:error, error} From f753bd33805159a08cc8b447daa1c9ace137deab Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Wed, 17 Jul 2024 13:12:51 -0400 Subject: [PATCH 12/22] Explicitly handle the GET and HEAD errors --- lib/pleroma/workers/rich_media_worker.ex | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lib/pleroma/workers/rich_media_worker.ex b/lib/pleroma/workers/rich_media_worker.ex index a94a85ec71..30f9d9e9ee 100644 --- a/lib/pleroma/workers/rich_media_worker.ex +++ b/lib/pleroma/workers/rich_media_worker.ex @@ -22,6 +22,10 @@ def perform(%Job{args: %{"op" => "backfill", "url" => _url} = args}) do when type in [:invalid_metadata, :body_too_large, :content_type, :validate] -> {:cancel, type} + {:error, type} + when type in [:get, :head] -> + {:error, type} + error -> {:error, error} end From c05cbaa9375660572ea9c720fa1825ab4c8b325c Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Wed, 17 Jul 2024 13:42:13 -0400 Subject: [PATCH 13/22] Dialyzer fix for RemoteFetcherWorker --- changelog.d/oban-fetcher-rejected.change | 2 +- lib/pleroma/object/fetcher.ex | 1 + lib/pleroma/workers/remote_fetcher_worker.ex | 5 +---- 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/changelog.d/oban-fetcher-rejected.change b/changelog.d/oban-fetcher-rejected.change index 92e0836479..65f5c992dc 100644 --- a/changelog.d/oban-fetcher-rejected.change +++ b/changelog.d/oban-fetcher-rejected.change @@ -1 +1 @@ -Discard Remote Fetcher jobs which errored due to an MRF rejection +Discard Remote Fetcher jobs which errored due to an MRF rejection. diff --git a/lib/pleroma/object/fetcher.ex b/lib/pleroma/object/fetcher.ex index af5642af43..c0f671dd43 100644 --- a/lib/pleroma/object/fetcher.ex +++ b/lib/pleroma/object/fetcher.ex @@ -59,6 +59,7 @@ def refetch_object(%Object{data: %{"id" => id}} = object) do end # Note: will create a Create activity, which we need internally at the moment. + @spec fetch_object_from_id(String.t(), list()) :: {:ok, Object.t()} | {:error | :reject, any()} def fetch_object_from_id(id, options \\ []) do with {_, nil} <- {:fetch_object, Object.get_cached_by_ap_id(id)}, {_, true} <- {:allowed_depth, Federator.allowed_thread_distance?(options[:depth])}, diff --git a/lib/pleroma/workers/remote_fetcher_worker.ex b/lib/pleroma/workers/remote_fetcher_worker.ex index 6d777ae946..1c8874b59f 100644 --- a/lib/pleroma/workers/remote_fetcher_worker.ex +++ b/lib/pleroma/workers/remote_fetcher_worker.ex @@ -13,7 +13,7 @@ def perform(%Job{args: %{"op" => "fetch_remote", "id" => id} = args}) do {:ok, _object} -> :ok - {:rejected, reason} -> + {:reject, reason} -> {:cancel, reason} {:error, :forbidden} -> @@ -27,9 +27,6 @@ def perform(%Job{args: %{"op" => "fetch_remote", "id" => id} = args}) do {:error, _} = e -> e - - e -> - {:error, e} end end From 6cd3f9042f2ad208a2a3b08f3db2918d10ab1cc0 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Wed, 17 Jul 2024 13:51:56 -0400 Subject: [PATCH 14/22] Add docs for fix_mrf_policies --- changelog.d/fix-mrfs.add | 2 +- docs/administration/CLI_tasks/config.md | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/changelog.d/fix-mrfs.add b/changelog.d/fix-mrfs.add index a38830109d..2a0fb07680 100644 --- a/changelog.d/fix-mrfs.add +++ b/changelog.d/fix-mrfs.add @@ -1 +1 @@ -Added a Mix task "pleroma.config fix_mrf_policies" which will remove erroneous MRF policies from ConfigDB +Added a Mix task "pleroma.config fix_mrf_policies" which will remove erroneous MRF policies from ConfigDB. diff --git a/docs/administration/CLI_tasks/config.md b/docs/administration/CLI_tasks/config.md index 7c167ec5d4..13d671a7e8 100644 --- a/docs/administration/CLI_tasks/config.md +++ b/docs/administration/CLI_tasks/config.md @@ -154,4 +154,19 @@ This forcibly removes all saved values in the database. ```sh mix pleroma.config [--force] reset + ``` + +## Remove invalid MRF modules from the database + +This forcibly removes any enabled MRF that does not exist and will fix the ability of the instance to start. + +=== "OTP" + ```sh + ./bin/pleroma_ctl config fix_mrf_policies + ``` + +=== "From Source" + ```sh + mix pleroma.config fix_mrf_policies + ``` \ No newline at end of file From 1f3ac66844e3a5f3e85aa0fb2afd23a2bc706f8b Mon Sep 17 00:00:00 2001 From: Mint Date: Thu, 18 Jul 2024 18:00:40 +0300 Subject: [PATCH 15/22] Transmogrifier: handle non-validate errors on incoming Delete activities This should fix WithClauseError resulting in Oban jobs for processing incoming deletes being retried without getting cancelled when those deletes are MRF rejected. --- changelog.d/handle-non-validate-delete-errors.change | 1 + lib/pleroma/web/activity_pub/transmogrifier.ex | 1 + 2 files changed, 2 insertions(+) create mode 100644 changelog.d/handle-non-validate-delete-errors.change diff --git a/changelog.d/handle-non-validate-delete-errors.change b/changelog.d/handle-non-validate-delete-errors.change new file mode 100644 index 0000000000..94adb0e988 --- /dev/null +++ b/changelog.d/handle-non-validate-delete-errors.change @@ -0,0 +1 @@ +Transmogrifier: handle non-validate errors on incoming Delete activities diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex index 4d851559f9..9b6ecb3a92 100644 --- a/lib/pleroma/web/activity_pub/transmogrifier.ex +++ b/lib/pleroma/web/activity_pub/transmogrifier.ex @@ -530,6 +530,7 @@ def handle_incoming( else _ -> e end + e -> {:error, e} end end From d3c2180181c39fdfd52691ffcaf289b86de20a71 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Thu, 18 Jul 2024 12:12:34 -0400 Subject: [PATCH 16/22] Formatting --- lib/pleroma/web/activity_pub/transmogrifier.ex | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex index 9b6ecb3a92..2f8a7f8f27 100644 --- a/lib/pleroma/web/activity_pub/transmogrifier.ex +++ b/lib/pleroma/web/activity_pub/transmogrifier.ex @@ -530,7 +530,9 @@ def handle_incoming( else _ -> e end - e -> {:error, e} + + e -> + {:error, e} end end From 62280a3b9f556f7eb5c2940b0ddc20c79fbcc758 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Sat, 20 Jul 2024 13:32:56 -0400 Subject: [PATCH 17/22] Cancel queued (undelivered) publishing jobs for an activity when deleting that activity. --- changelog.d/oban-cancel-federation.add | 1 + lib/pleroma/web/common_api.ex | 12 +++++ test/pleroma/web/common_api_test.exs | 68 +++++++++++++++++++++++++- 3 files changed, 80 insertions(+), 1 deletion(-) create mode 100644 changelog.d/oban-cancel-federation.add diff --git a/changelog.d/oban-cancel-federation.add b/changelog.d/oban-cancel-federation.add new file mode 100644 index 0000000000..39473e898c --- /dev/null +++ b/changelog.d/oban-cancel-federation.add @@ -0,0 +1 @@ +Deleting a post will cancel queued publishing jobs for the deleted activity. diff --git a/lib/pleroma/web/common_api.ex b/lib/pleroma/web/common_api.ex index 09bedcd2b3..d43c465203 100644 --- a/lib/pleroma/web/common_api.ex +++ b/lib/pleroma/web/common_api.ex @@ -19,6 +19,7 @@ defmodule Pleroma.Web.CommonAPI do alias Pleroma.Web.ActivityPub.Visibility alias Pleroma.Web.CommonAPI.ActivityDraft + import Ecto.Query, only: [where: 3] import Pleroma.Web.Gettext import Pleroma.Web.CommonAPI.Utils @@ -156,6 +157,7 @@ def reject_follow_request(follower, followed) do def delete(activity_id, user) do with {_, %Activity{data: %{"object" => _, "type" => "Create"}} = activity} <- {:find_activity, Activity.get_by_id(activity_id, filter: [])}, + {_, {:ok, _}} <- {:cancel_jobs, maybe_cancel_jobs(activity)}, {_, %Object{} = object, _} <- {:find_object, Object.normalize(activity, fetch: false), activity}, true <- User.privileged?(user, :messages_delete) || user.ap_id == object.data["actor"], @@ -671,4 +673,14 @@ def get_user(ap_id, fake_record_fallback \\ true) do nil end end + + defp maybe_cancel_jobs(%Activity{data: %{"id" => ap_id}}) do + Oban.Job + |> where([j], j.worker == "Pleroma.Workers.PublisherWorker") + |> where([j], j.args["op"] == "publish_one") + |> where([j], j.args["params"]["id"] == ^ap_id) + |> Oban.cancel_all_jobs() + end + + defp maybe_cancel_jobs(_), do: {:ok, 0} end diff --git a/test/pleroma/web/common_api_test.exs b/test/pleroma/web/common_api_test.exs index 58cd1fd42c..a2c0432b8e 100644 --- a/test/pleroma/web/common_api_test.exs +++ b/test/pleroma/web/common_api_test.exs @@ -13,6 +13,7 @@ defmodule Pleroma.Web.CommonAPITest do alias Pleroma.Object alias Pleroma.Repo alias Pleroma.Rule + alias Pleroma.Tests.ObanHelpers alias Pleroma.UnstubbedConfigMock, as: ConfigMock alias Pleroma.User alias Pleroma.Web.ActivityPub.ActivityPub @@ -22,7 +23,7 @@ defmodule Pleroma.Web.CommonAPITest do alias Pleroma.Web.CommonAPI alias Pleroma.Workers.PollWorker - import Ecto.Query, only: [from: 2] + import Ecto.Query, only: [from: 2, where: 3] import Mock import Mox import Pleroma.Factory @@ -1920,4 +1921,69 @@ test "it does not boost if group is blocking poster", %{poster: poster, group: g assert [] = announces end end + + describe "Oban jobs are cancelled" do + setup do: clear_config([:instance, :federating], true) + + test "when deleting posts" do + user = insert(:user) + + follower_one = + insert(:user, %{ + local: false, + nickname: "nick1@domain.com", + ap_id: "https://domain.com/users/nick1", + inbox: "https://domain.com/users/nick1/inbox", + shared_inbox: "https://domain.com/inbox" + }) + + follower_two = + insert(:user, %{ + local: false, + nickname: "nick2@example.com", + ap_id: "https://example.com/users/nick2", + inbox: "https://example.com/users/nick2/inbox", + shared_inbox: "https://example.com/inbox" + }) + + {:ok, _, _} = Pleroma.User.follow(follower_one, user) + {:ok, _, _} = Pleroma.User.follow(follower_two, user) + + {:ok, %{data: %{"id" => ap_id}} = activity} = + CommonAPI.post(user, %{status: "Happy Friday everyone!"}) + + # Generate the publish_one jobs + ObanHelpers.perform_all() + + publish_one_jobs = + all_enqueued() + |> Enum.filter(fn job -> + match?( + %{ + state: "available", + queue: "federator_outgoing", + worker: "Pleroma.Workers.PublisherWorker", + args: %{"op" => "publish_one", "params" => %{"id" => ^ap_id}} + }, + job + ) + end) + + assert length(publish_one_jobs) == 2 + + # The delete should have triggered cancelling the publish_one jobs + assert {:ok, _delete} = CommonAPI.delete(activity.id, user) + + # all_enqueued/1 will not return cancelled jobs + cancelled_jobs = + Oban.Job + |> where([j], j.worker == "Pleroma.Workers.PublisherWorker") + |> where([j], j.state == "cancelled") + |> where([j], j.args["op"] == "publish_one") + |> where([j], j.args["params"]["id"] == ^ap_id) + |> Pleroma.Repo.all() + + assert length(cancelled_jobs) == 2 + end + end end From 3f5c9f003b065ed95d0aa9ff05fc41ea7484f38e Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Sat, 20 Jul 2024 14:17:35 -0400 Subject: [PATCH 18/22] Reorganize test group to have shared a shared setup --- test/pleroma/web/common_api_test.exs | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/test/pleroma/web/common_api_test.exs b/test/pleroma/web/common_api_test.exs index a2c0432b8e..75a30191aa 100644 --- a/test/pleroma/web/common_api_test.exs +++ b/test/pleroma/web/common_api_test.exs @@ -1923,12 +1923,12 @@ test "it does not boost if group is blocking poster", %{poster: poster, group: g end describe "Oban jobs are cancelled" do - setup do: clear_config([:instance, :federating], true) + setup do + clear_config([:instance, :federating], true) - test "when deleting posts" do - user = insert(:user) + local_user = insert(:user) - follower_one = + remote_one = insert(:user, %{ local: false, nickname: "nick1@domain.com", @@ -1937,7 +1937,7 @@ test "when deleting posts" do shared_inbox: "https://domain.com/inbox" }) - follower_two = + remote_two = insert(:user, %{ local: false, nickname: "nick2@example.com", @@ -1946,11 +1946,19 @@ test "when deleting posts" do shared_inbox: "https://example.com/inbox" }) - {:ok, _, _} = Pleroma.User.follow(follower_one, user) - {:ok, _, _} = Pleroma.User.follow(follower_two, user) + %{local_user: local_user, remote_one: remote_one, remote_two: remote_two} + end + + test "when deleting posts", %{ + local_user: local_user, + remote_one: remote_one, + remote_two: remote_two + } do + {:ok, _, _} = Pleroma.User.follow(remote_one, local_user) + {:ok, _, _} = Pleroma.User.follow(remote_two, local_user) {:ok, %{data: %{"id" => ap_id}} = activity} = - CommonAPI.post(user, %{status: "Happy Friday everyone!"}) + CommonAPI.post(local_user, %{status: "Happy Friday everyone!"}) # Generate the publish_one jobs ObanHelpers.perform_all() @@ -1972,7 +1980,7 @@ test "when deleting posts" do assert length(publish_one_jobs) == 2 # The delete should have triggered cancelling the publish_one jobs - assert {:ok, _delete} = CommonAPI.delete(activity.id, user) + assert {:ok, _delete} = CommonAPI.delete(activity.id, local_user) # all_enqueued/1 will not return cancelled jobs cancelled_jobs = From 86ae00f9da4d9e39d8f635d51b1139529b6fb9dc Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Sat, 20 Jul 2024 14:53:53 -0400 Subject: [PATCH 19/22] Support cancelling jobs when Unfavoriting --- changelog.d/oban-cancel-federation.add | 2 +- lib/pleroma/web/common_api.ex | 1 + test/pleroma/web/common_api_test.exs | 44 ++++++++++++++++++++++++++ 3 files changed, 46 insertions(+), 1 deletion(-) diff --git a/changelog.d/oban-cancel-federation.add b/changelog.d/oban-cancel-federation.add index 39473e898c..aae8bd84f2 100644 --- a/changelog.d/oban-cancel-federation.add +++ b/changelog.d/oban-cancel-federation.add @@ -1 +1 @@ -Deleting a post will cancel queued publishing jobs for the deleted activity. +Deleting or Unfavoriting a post will cancel undelivered publishing jobs for the original activity. diff --git a/lib/pleroma/web/common_api.ex b/lib/pleroma/web/common_api.ex index d43c465203..9f730d8112 100644 --- a/lib/pleroma/web/common_api.ex +++ b/lib/pleroma/web/common_api.ex @@ -277,6 +277,7 @@ def unfavorite(id, user) do {:find_activity, Activity.get_by_id(id)}, %Object{} = note <- Object.normalize(activity, fetch: false), %Activity{} = like <- Utils.get_existing_like(user.ap_id, note), + {_, {:ok, _}} <- {:cancel_jobs, maybe_cancel_jobs(like)}, {:ok, undo, _} <- Builder.undo(user, like), {:ok, activity, _} <- Pipeline.common_pipeline(undo, local: true) do {:ok, activity} diff --git a/test/pleroma/web/common_api_test.exs b/test/pleroma/web/common_api_test.exs index 75a30191aa..500c4ba3ab 100644 --- a/test/pleroma/web/common_api_test.exs +++ b/test/pleroma/web/common_api_test.exs @@ -1993,5 +1993,49 @@ test "when deleting posts", %{ assert length(cancelled_jobs) == 2 end + + test "when unfavoriting posts", %{ + local_user: local_user, + remote_one: remote_user + } do + {:ok, activity} = + CommonAPI.post(remote_user, %{status: "I like turtles!"}) + + {:ok, %{data: %{"id" => ap_id}} = _favorite} = + CommonAPI.favorite(local_user, activity.id) + + # Generate the publish_one jobs + ObanHelpers.perform_all() + + publish_one_jobs = + all_enqueued() + |> Enum.filter(fn job -> + match?( + %{ + state: "available", + queue: "federator_outgoing", + worker: "Pleroma.Workers.PublisherWorker", + args: %{"op" => "publish_one", "params" => %{"id" => ^ap_id}} + }, + job + ) + end) + + assert length(publish_one_jobs) == 1 + + # The unfavorite should have triggered cancelling the publish_one jobs + assert {:ok, _unfavorite} = CommonAPI.unfavorite(activity.id, local_user) + + # all_enqueued/1 will not return cancelled jobs + cancelled_jobs = + Oban.Job + |> where([j], j.worker == "Pleroma.Workers.PublisherWorker") + |> where([j], j.state == "cancelled") + |> where([j], j.args["op"] == "publish_one") + |> where([j], j.args["params"]["id"] == ^ap_id) + |> Pleroma.Repo.all() + + assert length(cancelled_jobs) == 1 + end end end From 304b7f5093c7e4ea096a3fec85e0c9339f745db0 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Sat, 20 Jul 2024 15:06:19 -0400 Subject: [PATCH 20/22] Support cancelling jobs when Unrepeating --- changelog.d/oban-cancel-federation.add | 2 +- lib/pleroma/web/common_api.ex | 1 + test/pleroma/web/common_api_test.exs | 48 ++++++++++++++++++++++++++ 3 files changed, 50 insertions(+), 1 deletion(-) diff --git a/changelog.d/oban-cancel-federation.add b/changelog.d/oban-cancel-federation.add index aae8bd84f2..1b9bab5164 100644 --- a/changelog.d/oban-cancel-federation.add +++ b/changelog.d/oban-cancel-federation.add @@ -1 +1 @@ -Deleting or Unfavoriting a post will cancel undelivered publishing jobs for the original activity. +Deleting, Unfavoriting, or Unrepeating a post will cancel undelivered publishing jobs for the original activity. diff --git a/lib/pleroma/web/common_api.ex b/lib/pleroma/web/common_api.ex index 9f730d8112..de3ec85feb 100644 --- a/lib/pleroma/web/common_api.ex +++ b/lib/pleroma/web/common_api.ex @@ -225,6 +225,7 @@ def unrepeat(id, user) do {:find_activity, Activity.get_by_id(id)}, %Object{} = note <- Object.normalize(activity, fetch: false), %Activity{} = announce <- Utils.get_existing_announce(user.ap_id, note), + {_, {:ok, _}} <- {:cancel_jobs, maybe_cancel_jobs(announce)}, {:ok, undo, _} <- Builder.undo(user, announce), {:ok, activity, _} <- Pipeline.common_pipeline(undo, local: true) do {:ok, activity} diff --git a/test/pleroma/web/common_api_test.exs b/test/pleroma/web/common_api_test.exs index 500c4ba3ab..62fa45552a 100644 --- a/test/pleroma/web/common_api_test.exs +++ b/test/pleroma/web/common_api_test.exs @@ -2037,5 +2037,53 @@ test "when unfavoriting posts", %{ assert length(cancelled_jobs) == 1 end + + test "when unboosting posts", %{ + local_user: local_user, + remote_one: remote_one, + remote_two: remote_two + } do + {:ok, _, _} = Pleroma.User.follow(remote_one, local_user) + {:ok, _, _} = Pleroma.User.follow(remote_two, local_user) + + {:ok, activity} = + CommonAPI.post(remote_one, %{status: "This is an unpleasant post"}) + + {:ok, %{data: %{"id" => ap_id}} = _repeat} = + CommonAPI.repeat(activity.id, local_user) + + # Generate the publish_one jobs + ObanHelpers.perform_all() + + publish_one_jobs = + all_enqueued() + |> Enum.filter(fn job -> + match?( + %{ + state: "available", + queue: "federator_outgoing", + worker: "Pleroma.Workers.PublisherWorker", + args: %{"op" => "publish_one", "params" => %{"id" => ^ap_id}} + }, + job + ) + end) + + assert length(publish_one_jobs) == 2 + + # The unrepeat should have triggered cancelling the publish_one jobs + assert {:ok, _unfavorite} = CommonAPI.unrepeat(activity.id, local_user) + + # all_enqueued/1 will not return cancelled jobs + cancelled_jobs = + Oban.Job + |> where([j], j.worker == "Pleroma.Workers.PublisherWorker") + |> where([j], j.state == "cancelled") + |> where([j], j.args["op"] == "publish_one") + |> where([j], j.args["params"]["id"] == ^ap_id) + |> Pleroma.Repo.all() + + assert length(cancelled_jobs) == 2 + end end end From d44765bc1303bd4b6fcb066197ccf66b758cdc99 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Sat, 20 Jul 2024 15:14:46 -0400 Subject: [PATCH 21/22] Support cancelling jobs when Unreacting --- changelog.d/oban-cancel-federation.add | 2 +- lib/pleroma/web/common_api.ex | 1 + test/pleroma/web/common_api_test.exs | 48 ++++++++++++++++++++++++++ 3 files changed, 50 insertions(+), 1 deletion(-) diff --git a/changelog.d/oban-cancel-federation.add b/changelog.d/oban-cancel-federation.add index 1b9bab5164..1481936802 100644 --- a/changelog.d/oban-cancel-federation.add +++ b/changelog.d/oban-cancel-federation.add @@ -1 +1 @@ -Deleting, Unfavoriting, or Unrepeating a post will cancel undelivered publishing jobs for the original activity. +Deleting, Unfavoriting, Unrepeating, or Unreacting will cancel undelivered publishing jobs for the original activity. diff --git a/lib/pleroma/web/common_api.ex b/lib/pleroma/web/common_api.ex index de3ec85feb..36e7efd8df 100644 --- a/lib/pleroma/web/common_api.ex +++ b/lib/pleroma/web/common_api.ex @@ -302,6 +302,7 @@ def react_with_emoji(id, user, emoji) do def unreact_with_emoji(id, user, emoji) do with %Activity{} = reaction_activity <- Utils.get_latest_reaction(id, user, emoji), + {_, {:ok, _}} <- {:cancel_jobs, maybe_cancel_jobs(reaction_activity)}, {:ok, undo, _} <- Builder.undo(user, reaction_activity), {:ok, activity, _} <- Pipeline.common_pipeline(undo, local: true) do {:ok, activity} diff --git a/test/pleroma/web/common_api_test.exs b/test/pleroma/web/common_api_test.exs index 62fa45552a..4c1add82e1 100644 --- a/test/pleroma/web/common_api_test.exs +++ b/test/pleroma/web/common_api_test.exs @@ -2085,5 +2085,53 @@ test "when unboosting posts", %{ assert length(cancelled_jobs) == 2 end + + test "when unreacting to posts", %{ + local_user: local_user, + remote_one: remote_one, + remote_two: remote_two + } do + {:ok, _, _} = Pleroma.User.follow(remote_one, local_user) + {:ok, _, _} = Pleroma.User.follow(remote_two, local_user) + + {:ok, activity} = + CommonAPI.post(remote_one, %{status: "Gang gang!!!!"}) + + {:ok, %{data: %{"id" => ap_id}} = _react} = + CommonAPI.react_with_emoji(activity.id, local_user, "👍") + + # Generate the publish_one jobs + ObanHelpers.perform_all() + + publish_one_jobs = + all_enqueued() + |> Enum.filter(fn job -> + match?( + %{ + state: "available", + queue: "federator_outgoing", + worker: "Pleroma.Workers.PublisherWorker", + args: %{"op" => "publish_one", "params" => %{"id" => ^ap_id}} + }, + job + ) + end) + + assert length(publish_one_jobs) == 2 + + # The unreact should have triggered cancelling the publish_one jobs + assert {:ok, _unreact} = CommonAPI.unreact_with_emoji(activity.id, local_user, "👍") + + # all_enqueued/1 will not return cancelled jobs + cancelled_jobs = + Oban.Job + |> where([j], j.worker == "Pleroma.Workers.PublisherWorker") + |> where([j], j.state == "cancelled") + |> where([j], j.args["op"] == "publish_one") + |> where([j], j.args["params"]["id"] == ^ap_id) + |> Pleroma.Repo.all() + + assert length(cancelled_jobs) == 2 + end end end From fb654acfadfeec00a1a52e2af96e922dc4b88b01 Mon Sep 17 00:00:00 2001 From: Mark Felder Date: Sat, 20 Jul 2024 23:48:54 -0400 Subject: [PATCH 22/22] Fix OpenGraph and Twitter metadata providers when parsing objects with no content or summary fields. --- changelog.d/metadata-provider-empty-post.fix | 1 + lib/pleroma/web/metadata/utils.ex | 5 +++- test/pleroma/web/metadata/utils_test.exs | 28 +++++++++++++++----- 3 files changed, 27 insertions(+), 7 deletions(-) create mode 100644 changelog.d/metadata-provider-empty-post.fix diff --git a/changelog.d/metadata-provider-empty-post.fix b/changelog.d/metadata-provider-empty-post.fix new file mode 100644 index 0000000000..8d6341c6c7 --- /dev/null +++ b/changelog.d/metadata-provider-empty-post.fix @@ -0,0 +1 @@ +Fix OpenGraph and Twitter metadata providers when parsing objects with no content or summary fields. diff --git a/lib/pleroma/web/metadata/utils.ex b/lib/pleroma/web/metadata/utils.ex index 80a8be9a2d..8f61ace241 100644 --- a/lib/pleroma/web/metadata/utils.ex +++ b/lib/pleroma/web/metadata/utils.ex @@ -25,11 +25,14 @@ def scrub_html_and_truncate(%{data: %{"summary" => summary}} = object) |> scrub_html_and_truncate_object_field(object) end - def scrub_html_and_truncate(%{data: %{"content" => content}} = object) do + def scrub_html_and_truncate(%{data: %{"content" => content}} = object) + when is_binary(content) and content != "" do content |> scrub_html_and_truncate_object_field(object) end + def scrub_html_and_truncate(%{}), do: "" + def scrub_html_and_truncate(content, max_length \\ 200, omission \\ "...") when is_binary(content) do content diff --git a/test/pleroma/web/metadata/utils_test.exs b/test/pleroma/web/metadata/utils_test.exs index 3daf852fba..9bc02dadf6 100644 --- a/test/pleroma/web/metadata/utils_test.exs +++ b/test/pleroma/web/metadata/utils_test.exs @@ -8,7 +8,7 @@ defmodule Pleroma.Web.Metadata.UtilsTest do alias Pleroma.Web.Metadata.Utils describe "scrub_html_and_truncate/1" do - test "it returns content text without encode HTML if summary is nil" do + test "it returns content text without HTML if summary is nil" do user = insert(:user) note = @@ -17,14 +17,14 @@ test "it returns content text without encode HTML if summary is nil" do "actor" => user.ap_id, "id" => "https://pleroma.gov/objects/whatever", "summary" => nil, - "content" => "Pleroma's really cool!" + "content" => "Pleroma's really cool!
" } }) assert Utils.scrub_html_and_truncate(note) == "Pleroma's really cool!" end - test "it returns context text without encode HTML if summary is empty" do + test "it returns content text without HTML if summary is empty" do user = insert(:user) note = @@ -33,14 +33,14 @@ test "it returns context text without encode HTML if summary is empty" do "actor" => user.ap_id, "id" => "https://pleroma.gov/objects/whatever", "summary" => "", - "content" => "Pleroma's really cool!" + "content" => "Pleroma's really cool!
" } }) assert Utils.scrub_html_and_truncate(note) == "Pleroma's really cool!" end - test "it returns summary text without encode HTML if summary is filled" do + test "it returns summary text without HTML if summary is filled" do user = insert(:user) note = @@ -48,7 +48,7 @@ test "it returns summary text without encode HTML if summary is filled" do data: %{ "actor" => user.ap_id, "id" => "https://pleroma.gov/objects/whatever", - "summary" => "Public service announcement on caffeine consumption", + "summary" => "Public service announcement on caffeine consumption
", "content" => "cofe" } }) @@ -57,6 +57,22 @@ test "it returns summary text without encode HTML if summary is filled" do "Public service announcement on caffeine consumption" end + test "it returns empty string if summary and content are absent" do + user = insert(:user) + + note = + insert(:note, %{ + data: %{ + "actor" => user.ap_id, + "id" => "https://pleroma.gov/objects/whatever", + "content" => nil, + "summary" => nil + } + }) + + assert Utils.scrub_html_and_truncate(note) == "" + end + test "it does not return old content after editing" do user = insert(:user)