diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index dab52e4c6c..21d7b22425 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,8 +1,8 @@
-image: git.pleroma.social:5050/pleroma/pleroma/ci-base
+image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp-24
variables: &global_variables
# Only used for the release
- ELIXIR_VER: 1.12.3
+ ELIXIR_VER: 1.13.4
POSTGRES_DB: pleroma_test
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
@@ -72,7 +72,7 @@ check-changelog:
tags:
- amd64
-build-1.12.3:
+build-1.13.4:
extends:
- .build_changes_policy
- .using-ci-base
@@ -85,7 +85,7 @@ build-1.15.7-otp-25:
- .build_changes_policy
- .using-ci-base
stage: build
- image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15
+ image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25
allow_failure: true
script:
- mix compile --force
diff --git a/Dockerfile b/Dockerfile
index 69c3509de4..72461305ca 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,7 +1,7 @@
ARG ELIXIR_IMG=hexpm/elixir
-ARG ELIXIR_VER=1.12.3
-ARG ERLANG_VER=24.2.1
-ARG ALPINE_VER=3.17.0
+ARG ELIXIR_VER=1.13.4
+ARG ERLANG_VER=24.3.4.15
+ARG ALPINE_VER=3.17.5
FROM ${ELIXIR_IMG}:${ELIXIR_VER}-erlang-${ERLANG_VER}-alpine-${ALPINE_VER} as build
diff --git a/README.md b/README.md
index efd2ef1419..fca76b758c 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-A fork of Pleroma/Rebased. More information soon.
+`pl`. A fork of Pleroma/Rebased. More information soon.
---
diff --git a/app.json b/app.json
index b0b28b9db4..c612a3ee75 100644
--- a/app.json
+++ b/app.json
@@ -1,10 +1,10 @@
{
- "name": "Rebased",
- "description": "Rebased, the recommended backend for Soapbox written in Elixir.",
+ "name": "pl",
+ "description": "Federated social media software, a fork of Pleroma/Rebased",
"keywords": [
"fediverse"
],
- "website": "https://soapbox.pub",
+ "website": "https://github.com/mkljczk/pl",
"dokku": {
"plugins": [
"postgres"
diff --git a/changelog.d/3904.security b/changelog.d/3904.security
new file mode 100644
index 0000000000..04836d4e8c
--- /dev/null
+++ b/changelog.d/3904.security
@@ -0,0 +1 @@
+HTTP Security: By default, don't allow unsafe-eval. The setting needs to be changed to allow Flash emulation.
diff --git a/changelog.d/3907.skip b/changelog.d/3907.skip
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/changelog.d/add-ipfs-upload.add b/changelog.d/add-ipfs-upload.add
new file mode 100644
index 0000000000..0cd1f2858b
--- /dev/null
+++ b/changelog.d/add-ipfs-upload.add
@@ -0,0 +1 @@
+Uploader: Add support for uploading attachments using IPFS
diff --git a/changelog.d/add-nsfw-mrf.add b/changelog.d/add-nsfw-mrf.add
new file mode 100644
index 0000000000..ce62c7ed01
--- /dev/null
+++ b/changelog.d/add-nsfw-mrf.add
@@ -0,0 +1 @@
+Add NSFW-detecting MRF
diff --git a/changelog.d/add-rbl-mrf.add b/changelog.d/add-rbl-mrf.add
new file mode 100644
index 0000000000..363270fb92
--- /dev/null
+++ b/changelog.d/add-rbl-mrf.add
@@ -0,0 +1 @@
+Add DNSRBL MRF
diff --git a/changelog.d/anti-mentionspam-mrf.add b/changelog.d/anti-mentionspam-mrf.add
new file mode 100644
index 0000000000..9466f85f43
--- /dev/null
+++ b/changelog.d/anti-mentionspam-mrf.add
@@ -0,0 +1 @@
+Add Anti-mention Spam MRF backported from Rebased
diff --git a/changelog.d/api-docs-2.skip b/changelog.d/api-docs-2.skip
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/changelog.d/auth-fetch-exception.add b/changelog.d/auth-fetch-exception.add
new file mode 100644
index 0000000000..98efb903eb
--- /dev/null
+++ b/changelog.d/auth-fetch-exception.add
@@ -0,0 +1 @@
+HTTPSignaturePlug: Add :authorized_fetch_mode_exceptions configuration
\ No newline at end of file
diff --git a/changelog.d/authorized-fetch-rejections.add b/changelog.d/authorized-fetch-rejections.add
new file mode 100644
index 0000000000..66e15a979b
--- /dev/null
+++ b/changelog.d/authorized-fetch-rejections.add
@@ -0,0 +1 @@
+Add an option to reject certain domains when authorized fetch is enabled.
diff --git a/changelog.d/bump-elixir.change b/changelog.d/bump-elixir.change
new file mode 100644
index 0000000000..afb25d4e78
--- /dev/null
+++ b/changelog.d/bump-elixir.change
@@ -0,0 +1 @@
+Elixir 1.13 is the minimum required version.
diff --git a/changelog.d/card-endpoint.remove b/changelog.d/card-endpoint.remove
new file mode 100644
index 0000000000..e09a24cf76
--- /dev/null
+++ b/changelog.d/card-endpoint.remove
@@ -0,0 +1 @@
+Mastodon API: Remove deprecated GET /api/v1/statuses/:id/card endpoint https://github.com/mastodon/mastodon/pull/11213
diff --git a/changelog.d/card-image-description.add b/changelog.d/card-image-description.add
new file mode 100644
index 0000000000..bf423ebb8a
--- /dev/null
+++ b/changelog.d/card-image-description.add
@@ -0,0 +1 @@
+Include image description in status media cards
\ No newline at end of file
diff --git a/changelog.d/description-meilisearch-type.skip b/changelog.d/description-meilisearch-type.skip
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/changelog.d/familiar-followers.add b/changelog.d/familiar-followers.add
new file mode 100644
index 0000000000..6e7ec9d257
--- /dev/null
+++ b/changelog.d/familiar-followers.add
@@ -0,0 +1 @@
+Implement `/api/v1/accounts/familiar_followers`
\ No newline at end of file
diff --git a/changelog.d/fep-2c59.add b/changelog.d/fep-2c59.add
new file mode 100644
index 0000000000..03e33cbd85
--- /dev/null
+++ b/changelog.d/fep-2c59.add
@@ -0,0 +1 @@
+Implement FEP-2c59, add "webfinger" to user actor
\ No newline at end of file
diff --git a/changelog.d/ffmpeg-limiter.add b/changelog.d/ffmpeg-limiter.add
new file mode 100644
index 0000000000..e4a5ef1969
--- /dev/null
+++ b/changelog.d/ffmpeg-limiter.add
@@ -0,0 +1 @@
+Framegrabs with ffmpeg will execute with a 5 second timeout and cache the URLs of failures with a TTL of 15 minutes to prevent excessive retries.
diff --git a/changelog.d/fix-webfinger-spoofing.security b/changelog.d/fix-webfinger-spoofing.security
new file mode 100644
index 0000000000..7b3c9490a8
--- /dev/null
+++ b/changelog.d/fix-webfinger-spoofing.security
@@ -0,0 +1 @@
+Fix webfinger spoofing.
diff --git a/changelog.d/instance-rules.add b/changelog.d/instance-rules.add
new file mode 100644
index 0000000000..42f3cbfa18
--- /dev/null
+++ b/changelog.d/instance-rules.add
@@ -0,0 +1 @@
+Add instance rules
\ No newline at end of file
diff --git a/changelog.d/logger-metadata.add b/changelog.d/logger-metadata.add
new file mode 100644
index 0000000000..6c627a972d
--- /dev/null
+++ b/changelog.d/logger-metadata.add
@@ -0,0 +1 @@
+Logger metadata is now attached to some logs to help with troubleshooting and analysis
diff --git a/changelog.d/mark-read.fix b/changelog.d/mark-read.fix
new file mode 100644
index 0000000000..346eb19e2a
--- /dev/null
+++ b/changelog.d/mark-read.fix
@@ -0,0 +1 @@
+The query for marking notifications as read has been simplified
diff --git a/changelog.d/mastodon_api_v2.add b/changelog.d/mastodon_api_v2.add
new file mode 100644
index 0000000000..d53aa35c44
--- /dev/null
+++ b/changelog.d/mastodon_api_v2.add
@@ -0,0 +1 @@
+Add new parameters to /api/v2/instance: configuration[accounts][max_pinned_statuses] and configuration[statuses][characters_reserved_per_url]
diff --git a/changelog.d/mediaproxy-http.fix b/changelog.d/mediaproxy-http.fix
new file mode 100644
index 0000000000..4ff6430e08
--- /dev/null
+++ b/changelog.d/mediaproxy-http.fix
@@ -0,0 +1 @@
+Ensure MediaProxy HTTP requests obey all the defined connection settings
diff --git a/changelog.d/missing-mrfs.add b/changelog.d/missing-mrfs.add
new file mode 100644
index 0000000000..6a17f9e1ae
--- /dev/null
+++ b/changelog.d/missing-mrfs.add
@@ -0,0 +1 @@
+Startup detection for configured MRF modules that are missing or incorrectly defined
diff --git a/changelog.d/oban-queues.change b/changelog.d/oban-queues.change
new file mode 100644
index 0000000000..16df6409a0
--- /dev/null
+++ b/changelog.d/oban-queues.change
@@ -0,0 +1 @@
+Oban queues have refactored to simplify the queue design
diff --git a/changelog.d/pools.change b/changelog.d/pools.change
new file mode 100644
index 0000000000..3c689195a4
--- /dev/null
+++ b/changelog.d/pools.change
@@ -0,0 +1 @@
+HTTP connection pool adjustments
diff --git a/changelog.d/prometheus-docs.change b/changelog.d/prometheus-docs.change
new file mode 100644
index 0000000000..a9bd1e2e9f
--- /dev/null
+++ b/changelog.d/prometheus-docs.change
@@ -0,0 +1 @@
+Update the documentation for configuring Prometheus metrics.
diff --git a/changelog.d/promexdocs.add b/changelog.d/promexdocs.add
new file mode 100644
index 0000000000..dda972994a
--- /dev/null
+++ b/changelog.d/promexdocs.add
@@ -0,0 +1 @@
+PromEx documentation
diff --git a/changelog.d/qdrant_search.add b/changelog.d/qdrant_search.add
new file mode 100644
index 0000000000..9801131d1a
--- /dev/null
+++ b/changelog.d/qdrant_search.add
@@ -0,0 +1 @@
+Add Qdrant/OpenAI embedding search
diff --git a/changelog.d/realpath-over-readlink.fix b/changelog.d/realpath-over-readlink.fix
new file mode 100644
index 0000000000..479561b95d
--- /dev/null
+++ b/changelog.d/realpath-over-readlink.fix
@@ -0,0 +1 @@
+pleroma_ctl: Use realpath(1) instead of readlink(1)
diff --git a/changelog.d/receiverworker-error-handling.fix b/changelog.d/receiverworker-error-handling.fix
new file mode 100644
index 0000000000..f017a2bba0
--- /dev/null
+++ b/changelog.d/receiverworker-error-handling.fix
@@ -0,0 +1 @@
+ReceiverWorker: Make sure non-{:ok, _} is returned as {:error, …}
\ No newline at end of file
diff --git a/changelog.d/reply-to-deleted.change b/changelog.d/reply-to-deleted.change
new file mode 100644
index 0000000000..8b952ee7a5
--- /dev/null
+++ b/changelog.d/reply-to-deleted.change
@@ -0,0 +1 @@
+A 422 error is returned when attempting to reply to a deleted status
diff --git a/changelog.d/rich_media_refactor.change b/changelog.d/rich_media_refactor.change
new file mode 100644
index 0000000000..c0d4e3b0ad
--- /dev/null
+++ b/changelog.d/rich_media_refactor.change
@@ -0,0 +1 @@
+Refactored Rich Media to cache the content in the database. Fetching operations that could block status rendering have been eliminated.
diff --git a/changelog.d/richmediattl.fix b/changelog.d/richmediattl.fix
new file mode 100644
index 0000000000..98de63015a
--- /dev/null
+++ b/changelog.d/richmediattl.fix
@@ -0,0 +1 @@
+Parsing of RichMedia TTLs for Amazon URLs when query parameters are nil
diff --git a/changelog.d/search-healthcheck.add b/changelog.d/search-healthcheck.add
new file mode 100644
index 0000000000..4974925e77
--- /dev/null
+++ b/changelog.d/search-healthcheck.add
@@ -0,0 +1 @@
+Monitoring of search backend health to control the processing of jobs in the search indexing Oban queue
diff --git a/changelog.d/show-reposter-replies.add b/changelog.d/show-reposter-replies.add
new file mode 100644
index 0000000000..3b852ec3b1
--- /dev/null
+++ b/changelog.d/show-reposter-replies.add
@@ -0,0 +1 @@
+Display reposted replies with exclude_replies: true
\ No newline at end of file
diff --git a/changelog.d/status-notification-type.add b/changelog.d/status-notification-type.add
new file mode 100644
index 0000000000..a6e94fa870
--- /dev/null
+++ b/changelog.d/status-notification-type.add
@@ -0,0 +1 @@
+Add "status" notification type
\ No newline at end of file
diff --git a/changelog.d/support-honk-image-summaries.add b/changelog.d/support-honk-image-summaries.add
new file mode 100644
index 0000000000..052c03f95a
--- /dev/null
+++ b/changelog.d/support-honk-image-summaries.add
@@ -0,0 +1 @@
+Support honk-style attachment summaries as alt-text.
diff --git a/changelog.d/web_push_filtered.fix b/changelog.d/web_push_filtered.fix
new file mode 100644
index 0000000000..b9159362ab
--- /dev/null
+++ b/changelog.d/web_push_filtered.fix
@@ -0,0 +1 @@
+Web Push notifications are no longer generated for muted/blocked threads and users.
diff --git a/changelog.d/webfinger-validation.fix b/changelog.d/webfinger-validation.fix
new file mode 100644
index 0000000000..e643126665
--- /dev/null
+++ b/changelog.d/webfinger-validation.fix
@@ -0,0 +1 @@
+Fix validate_webfinger when running a different domain for Webfinger
\ No newline at end of file
diff --git a/ci/elixir-1.13/Dockerfile b/ci/elixir-1.13/Dockerfile
new file mode 100644
index 0000000000..b8bceb3d97
--- /dev/null
+++ b/ci/elixir-1.13/Dockerfile
@@ -0,0 +1,8 @@
+FROM elixir:1.13.4-otp-24
+
+# Single RUN statement, otherwise intermediate images are created
+# https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#run
+RUN apt-get update &&\
+ apt-get install -y libmagic-dev cmake libimage-exiftool-perl ffmpeg &&\
+ mix local.hex --force &&\
+ mix local.rebar --force
diff --git a/ci/elixir-1.13/build_and_push.sh b/ci/elixir-1.13/build_and_push.sh
new file mode 100755
index 0000000000..64e1856db1
--- /dev/null
+++ b/ci/elixir-1.13/build_and_push.sh
@@ -0,0 +1 @@
+docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp-24 --push .
diff --git a/ci/elixir-1.15-otp25/build_and_push.sh b/ci/elixir-1.15-otp25/build_and_push.sh
index 06fe74f349..a28e0d33ce 100755
--- a/ci/elixir-1.15-otp25/build_and_push.sh
+++ b/ci/elixir-1.15-otp25/build_and_push.sh
@@ -1 +1 @@
-docker buildx build --platform linux/amd64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25 --push .
+docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25 --push .
diff --git a/config/config.exs b/config/config.exs
index 3b8c6c9047..35b1716a6e 100644
--- a/config/config.exs
+++ b/config/config.exs
@@ -82,6 +82,10 @@
# region: "us-east-1", # may be required for Amazon AWS
scheme: "https://"
+config :pleroma, Pleroma.Uploaders.IPFS,
+ post_gateway_url: nil,
+ get_gateway_url: nil
+
config :pleroma, :emoji,
shortcode_globs: ["/emoji/custom/**/*.png"],
pack_extensions: [".png", ".gif"],
@@ -131,13 +135,13 @@
config :logger, :console,
level: :debug,
format: "\n$time $metadata[$level] $message\n",
- metadata: [:request_id]
+ metadata: [:actor, :path, :type, :user]
config :logger, :ex_syslogger,
level: :debug,
ident: "pleroma",
format: "$metadata[$level] $message",
- metadata: [:request_id]
+ metadata: [:actor, :path, :type, :user]
config :mime, :types, %{
"application/xml" => ["xml"],
@@ -188,6 +192,7 @@
allow_relay: true,
public: true,
quarantined_instances: [],
+ rejected_instances: [],
static_dir: "instance/static/",
allowed_post_formats: [
"text/plain",
@@ -411,6 +416,11 @@
accept: [],
reject: []
+config :pleroma, :mrf_dnsrbl,
+ nameserver: "127.0.0.1",
+ port: 53,
+ zone: "bl.pleroma.com"
+
# threshold of 7 days
config :pleroma, :mrf_object_age,
threshold: 604_800,
@@ -436,21 +446,26 @@
ttl: 60_000,
min_length: 50
- config :pleroma, :mrf_force_mention,
+config :pleroma, :mrf_force_mention,
mention_parent: true,
mention_quoted: true
+config :pleroma, :mrf_antimentionspam, user_age_limit: 30_000
+
config :pleroma, :rich_media,
enabled: true,
ignore_hosts: [],
ignore_tld: ["local", "localdomain", "lan"],
parsers: [
- Pleroma.Web.RichMedia.Parsers.OEmbed,
- Pleroma.Web.RichMedia.Parsers.TwitterCard
+ Pleroma.Web.RichMedia.Parsers.TwitterCard,
+ Pleroma.Web.RichMedia.Parsers.OEmbed
],
- oembed_providers_enabled: true,
failure_backoff: 60_000,
- ttl_setters: [Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl]
+ ttl_setters: [
+ Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl,
+ Pleroma.Web.RichMedia.Parser.TTL.Opengraph
+ ],
+ max_body: 5_000_000
config :pleroma, :media_proxy,
enabled: false,
@@ -515,7 +530,8 @@
sts: false,
sts_max_age: 31_536_000,
ct_max_age: 2_592_000,
- referrer_policy: "same-origin"
+ referrer_policy: "same-origin",
+ allow_unsafe_eval: false
config :cors_plug,
max_age: 86_400,
@@ -577,25 +593,15 @@
log: false,
queues: [
activity_expiration: 10,
- token_expiration: 5,
- filter_expiration: 1,
- backup: 1,
- federator_incoming: 50,
- federator_outgoing: 50,
+ federator_incoming: 5,
+ federator_outgoing: 5,
ingestion_queue: 50,
web_push: 50,
- mailer: 10,
transmogrifier: 20,
- scheduled_activities: 10,
- poll_notifications: 10,
notifications: 20,
background: 5,
- remote_fetcher: 2,
- attachments_cleanup: 1,
- new_users_digest: 1,
- mute_expire: 5,
- search_indexing: 10,
- check_domain_resolve: 1
+ search_indexing: [limit: 10, paused: true],
+ slow: 1
],
plugins: [Oban.Plugins.Pruner],
crontab: [
@@ -845,22 +851,27 @@
config :pleroma, :pools,
federation: [
- size: 50,
- max_waiting: 10,
+ size: 75,
+ max_waiting: 20,
recv_timeout: 10_000
],
media: [
- size: 50,
+ size: 75,
+ max_waiting: 20,
+ recv_timeout: 15_000
+ ],
+ rich_media: [
+ size: 25,
max_waiting: 20,
recv_timeout: 15_000
],
upload: [
size: 25,
- max_waiting: 5,
+ max_waiting: 20,
recv_timeout: 15_000
],
default: [
- size: 10,
+ size: 50,
max_waiting: 2,
recv_timeout: 5_000
]
@@ -874,6 +885,10 @@
max_connections: 50,
timeout: 150_000
],
+ rich_media: [
+ max_connections: 50,
+ timeout: 150_000
+ ],
upload: [
max_connections: 25,
timeout: 300_000
@@ -919,8 +934,6 @@
process_chunk_size: 100
config :pleroma, ConcurrentLimiter, [
- {Pleroma.Web.RichMedia.Helpers, [max_running: 5, max_waiting: 5]},
- {Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy, [max_running: 5, max_waiting: 5]},
{Pleroma.Search, [max_running: 30, max_waiting: 50]},
{Pleroma.Webhook.Notify, [max_running: 5, max_waiting: 200]}
]
@@ -963,6 +976,19 @@
config :pleroma, Pleroma.Uploaders.Uploader, timeout: 30_000
+config :pleroma, Pleroma.Search.QdrantSearch,
+ qdrant_url: "http://127.0.0.1:6333/",
+ qdrant_api_key: "",
+ openai_url: "http://127.0.0.1:11345",
+ # The healthcheck url has to be set to nil when used with the real openai
+ # API, as it doesn't have a healthcheck endpoint.
+ openai_healthcheck_url: "http://127.0.0.1:11345/health",
+ openai_model: "snowflake/snowflake-arctic-embed-xs",
+ openai_api_key: "",
+ qdrant_index_configuration: %{
+ vectors: %{size: 384, distance: "Cosine"}
+ }
+
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
diff --git a/config/description.exs b/config/description.exs
index b1a36220bc..9b3b5457d1 100644
--- a/config/description.exs
+++ b/config/description.exs
@@ -136,6 +136,31 @@
}
]
},
+ %{
+ group: :pleroma,
+ key: Pleroma.Uploaders.IPFS,
+ type: :group,
+ description: "IPFS uploader-related settings",
+ children: [
+ %{
+ key: :get_gateway_url,
+ type: :string,
+ description: "GET Gateway URL",
+ suggestions: [
+ "https://ipfs.mydomain.com/{CID}",
+ "https://{CID}.ipfs.mydomain.com/"
+ ]
+ },
+ %{
+ key: :post_gateway_url,
+ type: :string,
+ description: "POST Gateway URL",
+ suggestions: [
+ "http://localhost:5001/"
+ ]
+ }
+ ]
+ },
%{
group: :pleroma,
key: Pleroma.Uploaders.S3,
@@ -749,6 +774,18 @@
{"*.quarantined.com", "Reason"}
]
},
+ %{
+ key: :rejected_instances,
+ type: {:list, :tuple},
+ key_placeholder: "instance",
+ value_placeholder: "reason",
+ description:
+ "List of ActivityPub instances to reject requests from if authorized_fetch_mode is enabled",
+ suggestions: [
+ {"rejected.com", "Reason"},
+ {"*.rejected.com", "Reason"}
+ ]
+ },
%{
key: :static_dir,
type: :string,
@@ -1829,6 +1866,12 @@
type: :boolean,
description: "Require HTTP signatures for AP fetches"
},
+ %{
+ key: :authorized_fetch_mode_exceptions,
+ type: {:list, :string},
+ description:
+ "List of IPs (CIDR format accepted) to exempt from HTTP Signatures requirement (for example to allow debugging, you shouldn't otherwise need this)"
+ },
%{
key: :note_replies_output_limit,
type: :integer,
@@ -2149,12 +2192,6 @@
type: :boolean,
description: "Enables RichMedia parsing of URLs"
},
- %{
- key: :oembed_providers_enabled,
- type: :boolean,
- description:
- "Embed rich media from a list of known providers. This takes precedence over other parsers."
- },
%{
key: :ignore_hosts,
type: {:list, :string},
@@ -3763,7 +3800,7 @@
},
%{
key: :initial_indexing_chunk_size,
- type: :int,
+ type: :integer,
description:
"Amount of posts in a batch when running the initial indexing operation. Should probably not be more than 100000" <>
" since there's a limit on maximum insert size",
diff --git a/config/dev.exs b/config/dev.exs
index fe8de5045a..f23719fe3d 100644
--- a/config/dev.exs
+++ b/config/dev.exs
@@ -35,8 +35,8 @@
# configured to run both http and https servers on
# different ports.
-# Do not include metadata nor timestamps in development logs
-config :logger, :console, format: "[$level] $message\n"
+# Do not include timestamps in development logs
+config :logger, :console, format: "$metadata[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
diff --git a/config/test.exs b/config/test.exs
index 6d1b7b263c..40141a1483 100644
--- a/config/test.exs
+++ b/config/test.exs
@@ -67,7 +67,8 @@
config :pleroma, :rich_media,
enabled: false,
ignore_hosts: [],
- ignore_tld: ["local", "localdomain", "lan"]
+ ignore_tld: ["local", "localdomain", "lan"],
+ max_body: 2_000_000
config :pleroma, :instance,
multi_factor_authentication: [
@@ -164,6 +165,12 @@
config :pleroma, Pleroma.Upload, config_impl: Pleroma.UnstubbedConfigMock
config :pleroma, Pleroma.ScheduledActivity, config_impl: Pleroma.UnstubbedConfigMock
config :pleroma, Pleroma.Web.RichMedia.Helpers, config_impl: Pleroma.StaticStubbedConfigMock
+config :pleroma, Pleroma.Uploaders.IPFS, config_impl: Pleroma.UnstubbedConfigMock
+config :pleroma, Pleroma.Web.Plugs.HTTPSecurityPlug, config_impl: Pleroma.StaticStubbedConfigMock
+config :pleroma, Pleroma.Web.Plugs.HTTPSignaturePlug, config_impl: Pleroma.StaticStubbedConfigMock
+
+config :pleroma, Pleroma.Web.Plugs.HTTPSignaturePlug,
+ http_signatures_impl: Pleroma.StubbedHTTPSignaturesMock
peer_module =
if String.to_integer(System.otp_release()) >= 25 do
@@ -186,6 +193,8 @@
config :pleroma, Pleroma.Emoji.Loader, test_emoji: true
+config :pleroma, Pleroma.Web.RichMedia.Backfill, provider: Pleroma.Web.RichMedia.Backfill
+
if File.exists?("./config/test.secret.exs") do
import_config "test.secret.exs"
else
diff --git a/docs/configuration/cheatsheet.md b/docs/configuration/cheatsheet.md
index 4aeae4f2c0..88f45b5c46 100644
--- a/docs/configuration/cheatsheet.md
+++ b/docs/configuration/cheatsheet.md
@@ -36,6 +36,7 @@ To add configuration to your config file, you can copy it from the base config.
* `allow_relay`: Permits remote instances to subscribe to all public posts of your instance. This may increase the visibility of your instance.
* `public`: Makes the client API in authenticated mode-only except for user-profiles. Useful for disabling the Local Timeline and The Whole Known Network. Note that there is a dependent setting restricting or allowing unauthenticated access to specific resources, see `restrict_unauthenticated` for more details.
* `quarantined_instances`: ActivityPub instances where private (DMs, followers-only) activities will not be send.
+* `rejected_instances`: ActivityPub instances to reject requests from if authorized_fetch_mode is enabled.
* `allowed_post_formats`: MIME-type list of formats allowed to be posted (transformed into HTML).
* `extended_nickname_format`: Set to `true` to use extended local nicknames format (allows underscores/dashes). This will break federation with
older software for theses nicknames.
@@ -283,6 +284,7 @@ Notes:
* `deny_follow_blocked`: Whether to disallow following an account that has blocked the user in question
* `sign_object_fetches`: Sign object fetches with HTTP signatures
* `authorized_fetch_mode`: Require HTTP signatures for AP fetches
+* `authorized_fetch_mode_exceptions`: List of IPs (CIDR format accepted) to exempt from HTTP Signatures requirement (for example to allow debugging, you shouldn't otherwise need this)
## Pleroma.User
@@ -433,7 +435,6 @@ config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Http,
* `ignore_hosts`: list of hosts which will be ignored by the metadata parser. For example `["accounts.google.com", "xss.website"]`, defaults to `[]`.
* `ignore_tld`: list TLDs (top-level domains) which will ignore for parse metadata. default is ["local", "localdomain", "lan"].
* `parsers`: list of Rich Media parsers.
-* `oembed_providers_enabled`: Embed rich media from a list of known providers. This takes precedence over other parsers.
* `failure_backoff`: Amount of milliseconds after request failure, during which the request will not be retried.
## HTTP server
@@ -472,6 +473,7 @@ This will make Pleroma listen on `127.0.0.1` port `8080` and generate urls start
* ``ct_max_age``: The maximum age for the `Expect-CT` header if sent.
* ``referrer_policy``: The referrer policy to use, either `"same-origin"` or `"no-referrer"`.
* ``report_uri``: Adds the specified url to `report-uri` and `report-to` group in CSP header.
+* `allow_unsafe_eval`: Adds `wasm-unsafe-eval` to the CSP header. Needed for some non-essential frontend features like Flash emulation.
### Pleroma.Web.Plugs.RemoteIp
@@ -662,6 +664,19 @@ config :ex_aws, :s3,
host: "s3.eu-central-1.amazonaws.com"
```
+#### Pleroma.Uploaders.IPFS
+
+* `post_gateway_url`: URL with port of POST Gateway (unauthenticated)
+* `get_gateway_url`: URL of public GET Gateway
+
+Example:
+
+```elixir
+config :pleroma, Pleroma.Uploaders.IPFS,
+ post_gateway_url: "http://localhost:5001",
+ get_gateway_url: "http://{CID}.ipfs.mydomain.com"
+```
+
### Upload filters
#### Pleroma.Upload.Filter.AnonymizeFilename
diff --git a/docs/configuration/search.md b/docs/configuration/search.md
index 0316c9bf43..d34f84d4f9 100644
--- a/docs/configuration/search.md
+++ b/docs/configuration/search.md
@@ -10,6 +10,30 @@ To use built-in search that has no external dependencies, set the search module
While it has no external dependencies, it has problems with performance and relevancy.
+## QdrantSearch
+
+This uses the vector search engine [Qdrant](https://qdrant.tech) to search the posts in a vector space. This needs a way to generate embeddings and uses the [OpenAI API](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings). This is implemented by several project besides OpenAI itself, including the python-based fastembed-server found in `supplemental/search/fastembed-api`.
+
+The default settings will support a setup where both the fastembed server and Qdrant run on the same system as pleroma. To use it, set the search provider and run the fastembed server, see the README in `supplemental/search/fastembed-api`:
+
+> config :pleroma, Pleroma.Search, module: Pleroma.Search.QdrantSearch
+
+Then, start the Qdrant server, see [here](https://qdrant.tech/documentation/quick-start/) for instructions.
+
+You will also need to create the Qdrant index once by running `mix pleroma.search.indexer create_index`. Running `mix pleroma.search.indexer index` will retroactively index the last 100_000 activities.
+
+### Indexing and model options
+
+To see the available configuration options, check out the QdrantSearch section in `config/config.exs`.
+
+The default indexing option work for the default model (`snowflake-arctic-embed-xs`). To optimize for a low memory footprint, adjust the index configuration as described in the [Qdrant docs](https://qdrant.tech/documentation/guides/optimize/). See also [this blog post](https://qdrant.tech/articles/memory-consumption/) that goes into detail.
+
+Different embedding models will need different vector size settings. You can see a list of the models supported by the fastembed server [here](https://qdrant.github.io/fastembed/examples/Supported_Models), including their vector dimensions. These vector dimensions need to be set in the `qdrant_index_configuration`.
+
+E.g, If you want to use `sentence-transformers/all-MiniLM-L6-v2` as a model, you will not need to adjust things, because it and `snowflake-arctic-embed-xs` are both 384 dimensional models. If you want to use `snowflake/snowflake-arctic-embed-l`, you will need to adjust the `size` parameter in the `qdrant_index_configuration` to 1024, as it has a dimension of 1024.
+
+When using a different model, you will need do drop the index and recreate it (`mix pleroma.search.indexer drop_index` and `mix pleroma.search.indexer create_index`), as the different embeddings are not compatible with each other.
+
## Meilisearch
Note that it's quite a bit more memory hungry than PostgreSQL (around 4-5G for ~1.2 million
diff --git a/docs/development/API/admin_api.md b/docs/development/API/admin_api.md
index e50ececa16..f1a8f23b30 100644
--- a/docs/development/API/admin_api.md
+++ b/docs/development/API/admin_api.md
@@ -1907,3 +1907,52 @@ Note that this differs from the Mastodon API variant: Mastodon API only returns
```json
{}
```
+
+## `GET /api/v1/pleroma/admin/rules`
+
+### List rules
+
+- Response: JSON, list of rules
+
+```json
+[
+ {
+ "id": "1",
+ "priority": 1,
+ "text": "There are no rules",
+ "hint": null
+ }
+]
+```
+
+## `POST /api/v1/pleroma/admin/rules`
+
+### Create a rule
+
+- Params:
+ - `text`: string, required, rule content
+ - `hint`: string, optional, rule description
+ - `priority`: integer, optional, rule ordering priority
+
+- Response: JSON, a single rule
+
+## `PATCH /api/v1/pleroma/admin/rules/:id`
+
+### Update a rule
+
+- Params:
+ - `text`: string, optional, rule content
+ - `hint`: string, optional, rule description
+ - `priority`: integer, optional, rule ordering priority
+
+- Response: JSON, a single rule
+
+## `DELETE /api/v1/pleroma/admin/rules/:id`
+
+### Delete a rule
+
+- Response: JSON, empty object
+
+```json
+{}
+```
diff --git a/docs/development/API/differences_in_mastoapi_responses.md b/docs/development/API/differences_in_mastoapi_responses.md
index 61a32cd6c4..0f7877be36 100644
--- a/docs/development/API/differences_in_mastoapi_responses.md
+++ b/docs/development/API/differences_in_mastoapi_responses.md
@@ -40,6 +40,7 @@ Has these additional fields under the `pleroma` object:
- `parent_visible`: If the parent of this post is visible to the user or not.
- `pinned_at`: a datetime (iso8601) when status was pinned, `null` otherwise.
- `quotes_count`: the count of status quotes.
+- `event`: event information if the post is an event, `null` otherwise.
- `non_anonymous`: true if the source post specifies the poll results are not anonymous. Currently only implemented by Smithereen.
- `bookmark_folder`: the ID of the folder bookmark is stored within (if any).
- `event`: event information if the post is an event, `null` otherwise.
diff --git a/docs/development/API/pleroma_api.md b/docs/development/API/pleroma_api.md
index f9e3021f16..947cb600c1 100644
--- a/docs/development/API/pleroma_api.md
+++ b/docs/development/API/pleroma_api.md
@@ -406,9 +406,7 @@ See [Admin-API](admin_api.md)
"id": "9umDrYheeY451cQnEe",
"name": "Read later",
"emoji": "🕓",
- "source": {
- "emoji": "🕓"
- }
+ "emoji_url": null
}
]
```
diff --git a/docs/development/API/prometheus.md b/docs/development/API/prometheus.md
index a5158d9052..140291fe03 100644
--- a/docs/development/API/prometheus.md
+++ b/docs/development/API/prometheus.md
@@ -1,44 +1,47 @@
-# Prometheus Metrics
+# Prometheus / OpenTelemetry Metrics
-Pleroma includes support for exporting metrics via the [prometheus_ex](https://github.com/deadtrickster/prometheus.ex) library.
+Pleroma includes support for exporting metrics via the [prom_ex](https://github.com/akoutmos/prom_ex) library.
+The metrics are exposed by a dedicated webserver/port to improve privacy and security.
Config example:
```
-config :prometheus, Pleroma.Web.Endpoint.MetricsExporter,
- enabled: true,
- auth: {:basic, "myusername", "mypassword"},
- ip_whitelist: ["127.0.0.1"],
- path: "/api/pleroma/app_metrics",
- format: :text
-```
-
-* `enabled` (Pleroma extension) enables the endpoint
-* `ip_whitelist` (Pleroma extension) could be used to restrict access only to specified IPs
-* `auth` sets the authentication (`false` for no auth; configurable to HTTP Basic Auth, see [prometheus-plugs](https://github.com/deadtrickster/prometheus-plugs#exporting) documentation)
-* `format` sets the output format (`:text` or `:protobuf`)
-* `path` sets the path to app metrics page
-
-
-## `/api/pleroma/app_metrics`
-
-### Exports Prometheus application metrics
-
-* Method: `GET`
-* Authentication: not required by default (see configuration options above)
-* Params: none
-* Response: text
-
-## Grafana
-
-### Config example
-
-The following is a config example to use with [Grafana](https://grafana.com)
+config :pleroma, Pleroma.PromEx,
+ disabled: false,
+ manual_metrics_start_delay: :no_delay,
+ drop_metrics_groups: [],
+ grafana: [
+ host: System.get_env("GRAFANA_HOST", "http://localhost:3000"),
+ auth_token: System.get_env("GRAFANA_TOKEN"),
+ upload_dashboards_on_start: false,
+ folder_name: "BEAM",
+ annotate_app_lifecycle: true
+ ],
+ metrics_server: [
+ port: 4021,
+ path: "/metrics",
+ protocol: :http,
+ pool_size: 5,
+ cowboy_opts: [],
+ auth_strategy: :none
+ ],
+ datasource: "Prometheus"
```
- - job_name: 'beam'
- metrics_path: /api/pleroma/app_metrics
- scheme: https
+
+PromEx supports the ability to automatically publish dashboards to your Grafana server as well as register Annotations. If you do not wish to configure this capability you must generate the dashboard JSON files and import them directly. You can find the mix commands in the upstream [documentation](https://hexdocs.pm/prom_ex/Mix.Tasks.PromEx.Dashboard.Export.html). You can find the list of modules enabled in Pleroma for which you should generate dashboards for by examining the contents of the `lib/pleroma/prom_ex.ex` module.
+
+## prometheus.yml
+
+The following is a bare minimum config example to use with [Prometheus](https://prometheus.io) or Prometheus-compatible software like [VictoriaMetrics](https://victoriametrics.com).
+
+```
+global:
+ scrape_interval: 15s
+
+scrape_configs:
+ - job_name: 'pleroma'
+ scheme: http
static_configs:
- - targets: ['pleroma.soykaf.com']
+ - targets: ['pleroma.soykaf.com:4021']
```
diff --git a/docs/installation/debian_based_jp.md b/docs/installation/debian_based_jp.md
index b6b5c9c078..5a0823a634 100644
--- a/docs/installation/debian_based_jp.md
+++ b/docs/installation/debian_based_jp.md
@@ -14,7 +14,7 @@ Note: This article is potentially outdated because at this time we may not have
- PostgreSQL 11.0以上 (Ubuntu16.04では9.5しか提供されていないので,[](https://www.postgresql.org/download/linux/ubuntu/)こちらから新しいバージョンを入手してください)
- `postgresql-contrib` 11.0以上 (同上)
-- Elixir 1.8 以上 ([Debianのリポジトリからインストールしないこと!!! ここからインストールすること!](https://elixir-lang.org/install.html#unix-and-unix-like)。または [asdf](https://github.com/asdf-vm/asdf) をpleromaユーザーでインストールしてください)
+- Elixir 1.13 以上 ([Debianのリポジトリからインストールしないこと!!! ここからインストールすること!](https://elixir-lang.org/install.html#unix-and-unix-like)。または [asdf](https://github.com/asdf-vm/asdf) をpleromaユーザーでインストールしてください)
- `erlang-dev`
- `erlang-nox`
- `git`
diff --git a/docs/installation/generic_dependencies.include b/docs/installation/generic_dependencies.include
index 6572716ed0..666f49fbbf 100644
--- a/docs/installation/generic_dependencies.include
+++ b/docs/installation/generic_dependencies.include
@@ -1,7 +1,7 @@
## Required dependencies
* PostgreSQL >=11.0
-* Elixir >=1.11.0 <1.15
+* Elixir >=1.13.0 <1.15
* Erlang OTP >=22.2.0 (supported: <27)
* git
* file / libmagic
diff --git a/lib/mix/tasks/pleroma/search/indexer.ex b/lib/mix/tasks/pleroma/search/indexer.ex
new file mode 100644
index 0000000000..81a9fced63
--- /dev/null
+++ b/lib/mix/tasks/pleroma/search/indexer.ex
@@ -0,0 +1,80 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2021 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Mix.Tasks.Pleroma.Search.Indexer do
+ import Mix.Pleroma
+ import Ecto.Query
+
+ alias Pleroma.Workers.SearchIndexingWorker
+
+ def run(["create_index"]) do
+ start_pleroma()
+
+ with :ok <- Pleroma.Config.get([Pleroma.Search, :module]).create_index() do
+ IO.puts("Index created")
+ else
+ e -> IO.puts("Could not create index: #{inspect(e)}")
+ end
+ end
+
+ def run(["drop_index"]) do
+ start_pleroma()
+
+ with :ok <- Pleroma.Config.get([Pleroma.Search, :module]).drop_index() do
+ IO.puts("Index dropped")
+ else
+ e -> IO.puts("Could not drop index: #{inspect(e)}")
+ end
+ end
+
+ def run(["index" | options]) do
+ {options, [], []} =
+ OptionParser.parse(
+ options,
+ strict: [
+ limit: :integer
+ ]
+ )
+
+ start_pleroma()
+
+ limit = Keyword.get(options, :limit, 100_000)
+
+ per_step = 1000
+ chunks = max(div(limit, per_step), 1)
+
+ 1..chunks
+ |> Enum.each(fn step ->
+ q =
+ from(a in Pleroma.Activity,
+ limit: ^per_step,
+ offset: ^per_step * (^step - 1),
+ select: [:id],
+ order_by: [desc: :id]
+ )
+
+ {:ok, ids} =
+ Pleroma.Repo.transaction(fn ->
+ Pleroma.Repo.stream(q, timeout: :infinity)
+ |> Enum.map(fn a ->
+ a.id
+ end)
+ end)
+
+ IO.puts("Got #{length(ids)} activities, adding to indexer")
+
+ ids
+ |> Enum.chunk_every(100)
+ |> Enum.each(fn chunk ->
+ IO.puts("Adding #{length(chunk)} activities to indexing queue")
+
+ chunk
+ |> Enum.map(fn id ->
+ SearchIndexingWorker.new(%{"op" => "add_to_index", "activity" => id})
+ end)
+ |> Oban.insert_all()
+ end)
+ end)
+ end
+end
diff --git a/lib/pleroma/application.ex b/lib/pleroma/application.ex
index 10f5413868..723576a13f 100644
--- a/lib/pleroma/application.ex
+++ b/lib/pleroma/application.ex
@@ -15,6 +15,7 @@ defmodule Pleroma.Application do
@compat_name Mix.Project.config()[:compat_name]
@version Mix.Project.config()[:version]
@repository Mix.Project.config()[:source_url]
+ @compile_env Mix.env()
def name, do: @name
def compat_name, do: @compat_name
@@ -54,7 +55,11 @@ def start(_type, _args) do
Pleroma.HTML.compile_scrubbers()
Pleroma.Config.Oban.warn()
Config.DeprecationWarnings.warn()
- Pleroma.Web.Plugs.HTTPSecurityPlug.warn_if_disabled()
+
+ if @compile_env != :test do
+ Pleroma.Web.Plugs.HTTPSecurityPlug.warn_if_disabled()
+ end
+
Pleroma.ApplicationRequirements.verify!()
load_custom_modules()
Pleroma.Docs.JSON.compile()
@@ -112,7 +117,8 @@ def start(_type, _args) do
task_children() ++
streamer_registry() ++
background_migrators() ++
- [Pleroma.Gopher.Server]
+ [Pleroma.Gopher.Server] ++
+ [Pleroma.Search.Healthcheck]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
@@ -159,15 +165,16 @@ defp cachex_children do
build_cachex("web_resp", limit: 2500),
build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10),
build_cachex("failed_proxy_url", limit: 2500),
+ build_cachex("failed_media_helper_url", default_ttl: :timer.minutes(15), limit: 2_500),
build_cachex("banned_urls", default_ttl: :timer.hours(24 * 30), limit: 5_000),
build_cachex("chat_message_id_idempotency_key",
expiration: chat_message_id_idempotency_key_expiration(),
limit: 500_000
),
- build_cachex("anti_duplication_mrf", limit: 5_000),
- build_cachex("translations", default_ttl: :timer.hours(24), limit: 5_000),
build_cachex("rel_me", default_ttl: :timer.minutes(30), limit: 2_500),
build_cachex("host_meta", default_ttl: :timer.minutes(120), limit: 5000),
+ build_cachex("anti_duplication_mrf", limit: 5_000),
+ build_cachex("translations", default_ttl: :timer.hours(24), limit: 5_000),
build_cachex("domain", limit: 2500)
]
end
diff --git a/lib/pleroma/application_requirements.ex b/lib/pleroma/application_requirements.ex
index b3d9762ed0..db1bfc133a 100644
--- a/lib/pleroma/application_requirements.ex
+++ b/lib/pleroma/application_requirements.ex
@@ -28,6 +28,7 @@ def verify! do
|> check_welcome_message_config!()
|> check_rum!()
|> check_repo_pool_size!()
+ |> check_mrfs()
|> handle_result()
end
@@ -268,4 +269,25 @@ defp check_filter(filter, command_required) do
true
end
end
+
+ defp check_mrfs(:ok) do
+ mrfs = Config.get!([:mrf, :policies])
+
+ missing_mrfs =
+ Enum.reduce(mrfs, [], fn x, acc ->
+ if Code.ensure_compiled(x) do
+ acc
+ else
+ acc ++ [x]
+ end
+ end)
+
+ if Enum.empty?(missing_mrfs) do
+ :ok
+ else
+ {:error, "The following MRF modules are configured but missing: #{inspect(missing_mrfs)}"}
+ end
+ end
+
+ defp check_mrfs(result), do: result
end
diff --git a/lib/pleroma/constants.ex b/lib/pleroma/constants.ex
index 09687360f2..7d435eca27 100644
--- a/lib/pleroma/constants.ex
+++ b/lib/pleroma/constants.ex
@@ -20,6 +20,7 @@ defmodule Pleroma.Constants do
"deleted_activity_id",
"pleroma_internal",
"generator",
+ "rules",
"assigned_account",
"rules",
"content_type",
diff --git a/lib/pleroma/ecto_type/activity_pub/object_validators/content_language_map.ex b/lib/pleroma/ecto_type/activity_pub/object_validators/content_language_map.ex
index 2cc0fda003..0271c0b7e3 100644
--- a/lib/pleroma/ecto_type/activity_pub/object_validators/content_language_map.ex
+++ b/lib/pleroma/ecto_type/activity_pub/object_validators/content_language_map.ex
@@ -5,13 +5,13 @@
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.ContentLanguageMap do
use Ecto.Type
- import Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode,
- only: [is_good_locale_code?: 1]
+ alias Pleroma.MultiLanguage
def type, do: :map
def cast(%{} = object) do
- with {status, %{} = data} when status in [:modified, :ok] <- validate_map(object) do
+ with {status, %{} = data} when status in [:modified, :ok] <-
+ MultiLanguage.validate_map(object) do
{:ok, data}
else
{_, nil} -> {:ok, nil}
@@ -24,26 +24,4 @@ def cast(_), do: :error
def dump(data), do: {:ok, data}
def load(data), do: {:ok, data}
-
- defp validate_map(%{} = object) do
- {status, data} =
- object
- |> Enum.reduce({:ok, %{}}, fn
- {lang, value}, {status, acc} when is_binary(lang) and is_binary(value) ->
- if is_good_locale_code?(lang) do
- {status, Map.put(acc, lang, value)}
- else
- {:modified, acc}
- end
-
- _, {_status, acc} ->
- {:modified, acc}
- end)
-
- if data == %{} do
- {status, nil}
- else
- {status, data}
- end
- end
end
diff --git a/lib/pleroma/ecto_type/activity_pub/object_validators/language_code.ex b/lib/pleroma/ecto_type/activity_pub/object_validators/language_code.ex
index b15e9ec5e0..ce91b42acc 100644
--- a/lib/pleroma/ecto_type/activity_pub/object_validators/language_code.ex
+++ b/lib/pleroma/ecto_type/activity_pub/object_validators/language_code.ex
@@ -5,10 +5,12 @@
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode do
use Ecto.Type
+ alias Pleroma.MultiLanguage
+
def type, do: :string
def cast(language) when is_binary(language) do
- if is_good_locale_code?(language) do
+ if MultiLanguage.good_locale_code?(language) do
{:ok, language}
else
{:error, :invalid_language}
@@ -20,8 +22,4 @@ def cast(_), do: :error
def dump(data), do: {:ok, data}
def load(data), do: {:ok, data}
-
- def is_good_locale_code?(code) when is_binary(code), do: code =~ ~r<^[a-zA-Z0-9\-]+$>
-
- def is_good_locale_code?(_code), do: false
end
diff --git a/lib/pleroma/emoji/formatter.ex b/lib/pleroma/emoji/formatter.ex
index 87fd35f138..b87cdc00b2 100644
--- a/lib/pleroma/emoji/formatter.ex
+++ b/lib/pleroma/emoji/formatter.ex
@@ -48,5 +48,9 @@ def get_emoji_map(text) when is_binary(text) do
end)
end
+ def get_emoji_map(%{} = map) do
+ Enum.reduce(map, %{}, fn {_, content}, acc -> Map.merge(acc, get_emoji_map(content)) end)
+ end
+
def get_emoji_map(_), do: %{}
end
diff --git a/lib/pleroma/helpers/inet_helper.ex b/lib/pleroma/helpers/inet_helper.ex
index 704d37f8a7..00e18649ec 100644
--- a/lib/pleroma/helpers/inet_helper.ex
+++ b/lib/pleroma/helpers/inet_helper.ex
@@ -16,4 +16,15 @@ def parse_address(ip) when is_binary(ip) do
def parse_address(ip) do
:inet.parse_address(ip)
end
+
+ def parse_cidr(proxy) when is_binary(proxy) do
+ proxy =
+ cond do
+ "/" in String.codepoints(proxy) -> proxy
+ InetCidr.v4?(InetCidr.parse_address!(proxy)) -> proxy <> "/32"
+ InetCidr.v6?(InetCidr.parse_address!(proxy)) -> proxy <> "/128"
+ end
+
+ InetCidr.parse_cidr!(proxy, true)
+ end
end
diff --git a/lib/pleroma/helpers/media_helper.ex b/lib/pleroma/helpers/media_helper.ex
index 7864296fac..e44114d9da 100644
--- a/lib/pleroma/helpers/media_helper.ex
+++ b/lib/pleroma/helpers/media_helper.ex
@@ -12,6 +12,8 @@ defmodule Pleroma.Helpers.MediaHelper do
require Logger
+ @cachex Pleroma.Config.get([:cachex, :provider], Cachex)
+
def missing_dependencies do
Enum.reduce([ffmpeg: "ffmpeg"], [], fn {sym, executable}, acc ->
if Pleroma.Utils.command_available?(executable) do
@@ -43,29 +45,40 @@ def image_resize(url, options) do
@spec video_framegrab(String.t()) :: {:ok, binary()} | {:error, any()}
def video_framegrab(url) do
with executable when is_binary(executable) <- System.find_executable("ffmpeg"),
+ false <- @cachex.exists?(:failed_media_helper_cache, url),
{:ok, env} <- HTTP.get(url, [], pool: :media),
{:ok, pid} <- StringIO.open(env.body) do
body_stream = IO.binstream(pid, 1)
- result =
- Exile.stream!(
- [
- executable,
- "-i",
- "pipe:0",
- "-vframes",
- "1",
- "-f",
- "mjpeg",
- "pipe:1"
- ],
- input: body_stream,
- ignore_epipe: true,
- stderr: :disable
- )
- |> Enum.into(<<>>)
+ task =
+ Task.async(fn ->
+ Exile.stream!(
+ [
+ executable,
+ "-i",
+ "pipe:0",
+ "-vframes",
+ "1",
+ "-f",
+ "mjpeg",
+ "pipe:1"
+ ],
+ input: body_stream,
+ ignore_epipe: true,
+ stderr: :disable
+ )
+ |> Enum.into(<<>>)
+ end)
- {:ok, result}
+ case Task.yield(task, 5_000) do
+ nil ->
+ Task.shutdown(task)
+ @cachex.put(:failed_media_helper_cache, url, nil)
+ {:error, {:ffmpeg, :timeout}}
+
+ result ->
+ {:ok, result}
+ end
else
nil -> {:error, {:ffmpeg, :command_not_found}}
{:error, _} = error -> error
diff --git a/lib/pleroma/html.ex b/lib/pleroma/html.ex
index 84ff2f1297..4de7cbb76d 100644
--- a/lib/pleroma/html.ex
+++ b/lib/pleroma/html.ex
@@ -65,20 +65,16 @@ def ensure_scrubbed_html(
end
end
- @spec extract_first_external_url_from_object(Pleroma.Object.t()) ::
- {:ok, String.t()} | {:error, :no_content}
+ @spec extract_first_external_url_from_object(Pleroma.Object.t()) :: String.t() | nil
def extract_first_external_url_from_object(%{data: %{"content" => content}})
when is_binary(content) do
- url =
- content
- |> Floki.parse_fragment!()
- |> Floki.find("a:not(.mention,.hashtag,.attachment,[rel~=\"tag\"])")
- |> Enum.take(1)
- |> Floki.attribute("href")
- |> Enum.at(0)
-
- {:ok, url}
+ content
+ |> Floki.parse_fragment!()
+ |> Floki.find("a:not(.mention,.hashtag,.attachment,[rel~=\"tag\"])")
+ |> Enum.take(1)
+ |> Floki.attribute("href")
+ |> Enum.at(0)
end
- def extract_first_external_url_from_object(_), do: {:error, :no_content}
+ def extract_first_external_url_from_object(_), do: nil
end
diff --git a/lib/pleroma/http_signatures_api.ex b/lib/pleroma/http_signatures_api.ex
new file mode 100644
index 0000000000..8e73dc98ef
--- /dev/null
+++ b/lib/pleroma/http_signatures_api.ex
@@ -0,0 +1,4 @@
+defmodule Pleroma.HTTPSignaturesAPI do
+ @callback validate_conn(conn :: Plug.Conn.t()) :: boolean
+ @callback signature_for_conn(conn :: Plug.Conn.t()) :: map
+end
diff --git a/lib/pleroma/multi_language.ex b/lib/pleroma/multi_language.ex
new file mode 100644
index 0000000000..ad6a28b39a
--- /dev/null
+++ b/lib/pleroma/multi_language.ex
@@ -0,0 +1,43 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.MultiLanguage do
+ def good_locale_code?(code) when is_binary(code), do: code =~ ~r<^[a-zA-Z0-9\-]+$>
+
+ def good_locale_code?(_code), do: false
+
+ def validate_map(%{} = object) do
+ {status, data} =
+ object
+ |> Enum.reduce({:ok, %{}}, fn
+ {lang, value}, {status, acc} when is_binary(lang) and is_binary(value) ->
+ if good_locale_code?(lang) do
+ {status, Map.put(acc, lang, value)}
+ else
+ {:modified, acc}
+ end
+
+ _, {_status, acc} ->
+ {:modified, acc}
+ end)
+
+ if data == %{} do
+ {status, nil}
+ else
+ {status, data}
+ end
+ end
+
+ def validate_map(_), do: {:error, nil}
+
+ def str_to_map(data, opts \\ []) do
+ with lang when is_binary(lang) <- opts[:lang],
+ true <- good_locale_code?(lang) do
+ %{lang => data}
+ else
+ _ ->
+ %{"und" => data}
+ end
+ end
+end
diff --git a/lib/pleroma/notification.ex b/lib/pleroma/notification.ex
index 288558db79..b7c45f886b 100644
--- a/lib/pleroma/notification.ex
+++ b/lib/pleroma/notification.ex
@@ -74,6 +74,7 @@ def unread_notifications_count(%User{id: user_id}) do
reblog
poll
status
+ update
pleroma:participation_accepted
pleroma:participation_request
pleroma:event_reminder
@@ -285,7 +286,7 @@ def set_read_up_to(%{id: user_id} = user, id) do
select: n.id
)
- {:ok, %{ids: {_, notification_ids}, marker: marker}} =
+ {:ok, %{marker: marker}} =
Multi.new()
|> Multi.update_all(:ids, query, set: [seen: true, updated_at: NaiveDateTime.utc_now()])
|> Marker.multi_set_last_read_id(user, "notifications")
@@ -293,23 +294,21 @@ def set_read_up_to(%{id: user_id} = user, id) do
Streamer.stream(["user", "user:notification"], marker)
- for_user_query(user)
- |> where([n], n.id in ^notification_ids)
- |> Repo.all()
+ {:ok, %{marker: marker}}
end
@spec read_one(User.t(), String.t()) ::
{:ok, Notification.t()} | {:error, Ecto.Changeset.t()} | nil
def read_one(%User{} = user, notification_id) do
- with {:ok, %Notification{} = notification} <- get(user, notification_id) do
- Multi.new()
- |> Multi.update(:update, changeset(notification, %{seen: true}))
- |> Marker.multi_set_last_read_id(user, "notifications")
- |> Repo.transaction()
- |> case do
- {:ok, %{update: notification}} -> {:ok, notification}
- {:error, :update, changeset, _} -> {:error, changeset}
- end
+ with {:ok, %Notification{} = notification} <- get(user, notification_id),
+ {:ok, %{marker: marker}} <-
+ Multi.new()
+ |> Multi.update(:update, changeset(notification, %{seen: true}))
+ |> Marker.multi_set_last_read_id(user, "notifications")
+ |> Repo.transaction() do
+ Streamer.stream(["user", "user:notification"], marker)
+
+ {:ok, %{marker: marker}}
end
end
@@ -368,20 +367,20 @@ def dismiss(%{id: user_id} = _user, id) do
end
end
- @spec create_notifications(Activity.t(), keyword()) :: {:ok, [Notification.t()] | []}
- def create_notifications(activity, options \\ [])
+ @spec create_notifications(Activity.t()) :: {:ok, [Notification.t()] | []}
+ def create_notifications(activity)
- def create_notifications(%Activity{data: %{"to" => _, "type" => "Create"}} = activity, options) do
+ def create_notifications(%Activity{data: %{"to" => _, "type" => "Create"}} = activity) do
object = Object.normalize(activity, fetch: false)
if object && object.data["type"] == "Answer" do
{:ok, []}
else
- do_create_notifications(activity, options)
+ do_create_notifications(activity)
end
end
- def create_notifications(%Activity{data: %{"type" => type}} = activity, options)
+ def create_notifications(%Activity{data: %{"type" => type}} = activity)
when type in [
"Follow",
"Like",
@@ -393,41 +392,29 @@ def create_notifications(%Activity{data: %{"type" => type}} = activity, options)
"Accept",
"Join"
] do
- do_create_notifications(activity, options)
+ do_create_notifications(activity)
end
- def create_notifications(_, _), do: {:ok, []}
+ def create_notifications(_), do: {:ok, []}
- defp do_create_notifications(%Activity{} = activity, options) do
- do_send = Keyword.get(options, :do_send, true)
+ defp do_create_notifications(%Activity{} = activity) do
+ enabled_participants = get_notified_participants_from_activity(activity)
- {enabled_participants, disabled_participants} =
- get_notified_participants_from_activity(activity)
+ enabled_receivers = get_notified_from_activity(activity) -- enabled_participants
- potential_participants = enabled_participants ++ disabled_participants
-
- {enabled_receivers, disabled_receivers} = get_notified_from_activity(activity)
-
- potential_receivers = (enabled_receivers ++ disabled_receivers) -- potential_participants
-
- {enabled_subscribers, disabled_subscribers} = get_notified_subscribers_from_activity(activity)
-
- potential_subscribers =
- (enabled_subscribers ++ disabled_subscribers) --
- (potential_participants ++ potential_receivers)
+ enabled_subscribers =
+ get_notified_subscribers_from_activity(activity) --
+ (enabled_participants ++ enabled_receivers)
notifications =
- (Enum.map(potential_receivers, fn user ->
- do_send = do_send && user in enabled_receivers
- create_notification(activity, user, do_send: do_send)
+ (Enum.map(enabled_receivers, fn user ->
+ create_notification(activity, user)
end) ++
- Enum.map(potential_subscribers, fn user ->
- do_send = do_send && user in enabled_subscribers
- create_notification(activity, user, do_send: do_send, type: "status")
+ Enum.map(enabled_subscribers, fn user ->
+ create_notification(activity, user, type: "status")
end) ++
- Enum.map(potential_participants, fn user ->
- do_send = do_send && user in enabled_participants
- create_notification(activity, user, do_send: do_send, type: "pleroma:event_update")
+ Enum.map(enabled_participants, fn user ->
+ create_notification(activity, user, type: "pleroma:event_update")
end))
|> Enum.reject(&is_nil/1)
@@ -493,7 +480,6 @@ defp type_from_activity_object(%{data: %{"type" => "Create"}} = activity) do
# TODO move to sql, too.
def create_notification(%Activity{} = activity, %User{} = user, opts \\ []) do
- do_send = Keyword.get(opts, :do_send, true)
type = Keyword.get(opts, :type, type_from_activity(activity))
unless skip?(activity, user, opts) do
@@ -508,11 +494,6 @@ def create_notification(%Activity{} = activity, %User{} = user, opts \\ []) do
|> Marker.multi_set_last_read_id(user, "notifications")
|> Repo.transaction()
- if do_send do
- Streamer.stream(["user", "user:notification"], notification)
- Push.send(notification)
- end
-
notification
end
end
@@ -594,13 +575,10 @@ def get_notified_from_activity(%Activity{data: %{"type" => type}} = activity, lo
|> exclude_relationship_restricted_ap_ids(activity)
|> exclude_thread_muter_ap_ids(activity)
- notification_enabled_users =
- Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
-
- {notification_enabled_users, potential_receivers -- notification_enabled_users}
+ Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
end
- def get_notified_from_activity(_, _local_only), do: {[], []}
+ def get_notified_from_activity(_, _local_only), do: []
def get_notified_subscribers_from_activity(activity, local_only \\ true)
@@ -615,13 +593,10 @@ def get_notified_subscribers_from_activity(
potential_receivers =
User.get_users_from_set(notification_enabled_ap_ids, local_only: local_only)
- notification_enabled_users =
- Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
-
- {notification_enabled_users, potential_receivers -- notification_enabled_users}
+ Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
end
- def get_notified_subscribers_from_activity(_, _), do: {[], []}
+ def get_notified_subscribers_from_activity(_, _), do: []
def get_notified_participants_from_activity(activity, local_only \\ true)
@@ -636,13 +611,10 @@ def get_notified_participants_from_activity(
potential_receivers =
User.get_users_from_set(notification_enabled_ap_ids, local_only: local_only)
- notification_enabled_users =
- Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
-
- {notification_enabled_users, potential_receivers -- notification_enabled_users}
+ Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
end
- def get_notified_participants_from_activity(_, _), do: {[], []}
+ def get_notified_participants_from_activity(_, _), do: []
# For some activities, only notify the author of the object
def get_potential_receiver_ap_ids(%{data: %{"type" => type, "object" => object_id}})
@@ -780,6 +752,7 @@ def skip?(activity, user, opts \\ [])
def skip?(%Activity{} = activity, %User{} = user, opts) do
[
:self,
+ :internal,
:invisible,
:block_from_strangers,
:recently_followed,
@@ -799,6 +772,12 @@ def skip?(:self, %Activity{} = activity, %User{} = user, opts) do
end
end
+ def skip?(:internal, %Activity{} = activity, _user, _opts) do
+ actor = activity.data["actor"]
+ user = User.get_cached_by_ap_id(actor)
+ User.internal?(user)
+ end
+
def skip?(:invisible, %Activity{} = activity, _user, _opts) do
actor = activity.data["actor"]
user = User.get_cached_by_ap_id(actor)
@@ -885,4 +864,12 @@ def mark_context_as_read(%User{id: id}, context) do
)
|> Repo.update_all(set: [seen: true])
end
+
+ @spec send(list(Notification.t())) :: :ok
+ def send(notifications) do
+ Enum.each(notifications, fn notification ->
+ Streamer.stream(["user", "user:notification"], notification)
+ Push.send(notification)
+ end)
+ end
end
diff --git a/lib/pleroma/rule.ex b/lib/pleroma/rule.ex
index 067d071112..f59294a006 100644
--- a/lib/pleroma/rule.ex
+++ b/lib/pleroma/rule.ex
@@ -16,13 +16,14 @@ defmodule Pleroma.Rule do
schema "rules" do
field(:priority, :integer, default: 0)
field(:text, :string)
+ field(:hint, :string)
timestamps()
end
def changeset(%Rule{} = rule, params \\ %{}) do
rule
- |> cast(params, [:priority, :text])
+ |> cast(params, [:priority, :text, :hint])
|> validate_required([:text])
end
@@ -39,6 +40,11 @@ def get(ids) when is_list(ids) do
def get(id), do: Repo.get(__MODULE__, id)
+ def exists?(id) do
+ from(r in __MODULE__, where: r.id == ^id)
+ |> Repo.exists?()
+ end
+
def create(params) do
{:ok, rule} =
%Rule{}
diff --git a/lib/pleroma/scheduled_activity.ex b/lib/pleroma/scheduled_activity.ex
index 63c6cb45b1..c361d7d893 100644
--- a/lib/pleroma/scheduled_activity.ex
+++ b/lib/pleroma/scheduled_activity.ex
@@ -204,7 +204,7 @@ def due_activities(offset \\ 0) do
def job_query(scheduled_activity_id) do
from(j in Oban.Job,
- where: j.queue == "scheduled_activities",
+ where: j.queue == "federator_outgoing",
where: fragment("args ->> 'activity_id' = ?::text", ^to_string(scheduled_activity_id))
)
end
diff --git a/lib/pleroma/search.ex b/lib/pleroma/search.ex
index 3b266e59bb..fd0218cb8e 100644
--- a/lib/pleroma/search.ex
+++ b/lib/pleroma/search.ex
@@ -10,8 +10,12 @@ def remove_from_index(%Pleroma.Object{id: object_id}) do
end
def search(query, options) do
- search_module = Pleroma.Config.get([Pleroma.Search, :module], Pleroma.Activity)
-
+ search_module = Pleroma.Config.get([Pleroma.Search, :module])
search_module.search(options[:for_user], query, options)
end
+
+ def healthcheck_endpoints do
+ search_module = Pleroma.Config.get([Pleroma.Search, :module])
+ search_module.healthcheck_endpoints
+ end
end
diff --git a/lib/pleroma/search/database_search.ex b/lib/pleroma/search/database_search.ex
index 31bfc7e338..c6fe8a9bd8 100644
--- a/lib/pleroma/search/database_search.ex
+++ b/lib/pleroma/search/database_search.ex
@@ -48,6 +48,15 @@ def add_to_index(_activity), do: :ok
@impl true
def remove_from_index(_object), do: :ok
+ @impl true
+ def create_index, do: :ok
+
+ @impl true
+ def drop_index, do: :ok
+
+ @impl true
+ def healthcheck_endpoints, do: nil
+
def maybe_restrict_author(query, %User{} = author) do
Activity.Queries.by_author(query, author)
end
diff --git a/lib/pleroma/search/healthcheck.ex b/lib/pleroma/search/healthcheck.ex
new file mode 100644
index 0000000000..e562c84785
--- /dev/null
+++ b/lib/pleroma/search/healthcheck.ex
@@ -0,0 +1,86 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2024 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+defmodule Pleroma.Search.Healthcheck do
+ @doc """
+ Monitors health of search backend to control processing of events based on health and availability.
+ """
+ use GenServer
+ require Logger
+
+ @queue :search_indexing
+ @tick :timer.seconds(5)
+ @timeout :timer.seconds(2)
+
+ def start_link(_) do
+ GenServer.start_link(__MODULE__, [], name: __MODULE__)
+ end
+
+ @impl true
+ def init(_) do
+ state = %{healthy: false}
+ {:ok, state, {:continue, :start}}
+ end
+
+ @impl true
+ def handle_continue(:start, state) do
+ tick()
+ {:noreply, state}
+ end
+
+ @impl true
+ def handle_info(:check, state) do
+ urls = Pleroma.Search.healthcheck_endpoints()
+
+ new_state =
+ if check(urls) do
+ Oban.resume_queue(queue: @queue)
+ Map.put(state, :healthy, true)
+ else
+ Oban.pause_queue(queue: @queue)
+ Map.put(state, :healthy, false)
+ end
+
+ maybe_log_state_change(state, new_state)
+
+ tick()
+ {:noreply, new_state}
+ end
+
+ @impl true
+ def handle_call(:state, _from, state) do
+ {:reply, state, state, :hibernate}
+ end
+
+ def state, do: GenServer.call(__MODULE__, :state)
+
+ def check([]), do: true
+
+ def check(urls) when is_list(urls) do
+ Enum.all?(
+ urls,
+ fn url ->
+ case Pleroma.HTTP.get(url, [], recv_timeout: @timeout) do
+ {:ok, %{status: 200}} -> true
+ _ -> false
+ end
+ end
+ )
+ end
+
+ def check(_), do: true
+
+ defp tick do
+ Process.send_after(self(), :check, @tick)
+ end
+
+ defp maybe_log_state_change(%{healthy: true}, %{healthy: false}) do
+ Logger.error("Pausing Oban queue #{@queue} due to search backend healthcheck failure")
+ end
+
+ defp maybe_log_state_change(%{healthy: false}, %{healthy: true}) do
+ Logger.info("Resuming Oban queue #{@queue} due to search backend healthcheck pass")
+ end
+
+ defp maybe_log_state_change(_, _), do: :ok
+end
diff --git a/lib/pleroma/search/meilisearch.ex b/lib/pleroma/search/meilisearch.ex
index 2bff663e88..9bba5b30f8 100644
--- a/lib/pleroma/search/meilisearch.ex
+++ b/lib/pleroma/search/meilisearch.ex
@@ -10,6 +10,12 @@ defmodule Pleroma.Search.Meilisearch do
@behaviour Pleroma.Search.SearchBackend
+ @impl true
+ def create_index, do: :ok
+
+ @impl true
+ def drop_index, do: :ok
+
defp meili_headers do
private_key = Config.get([Pleroma.Search.Meilisearch, :private_key])
@@ -178,4 +184,15 @@ def add_to_index(activity) do
def remove_from_index(object) do
meili_delete("/indexes/objects/documents/#{object.id}")
end
+
+ @impl true
+ def healthcheck_endpoints do
+ endpoint =
+ Config.get([Pleroma.Search.Meilisearch, :url])
+ |> URI.parse()
+ |> Map.put(:path, "/health")
+ |> URI.to_string()
+
+ [endpoint]
+ end
end
diff --git a/lib/pleroma/search/qdrant_search.ex b/lib/pleroma/search/qdrant_search.ex
new file mode 100644
index 0000000000..b659bb682c
--- /dev/null
+++ b/lib/pleroma/search/qdrant_search.ex
@@ -0,0 +1,182 @@
+defmodule Pleroma.Search.QdrantSearch do
+ @behaviour Pleroma.Search.SearchBackend
+ import Ecto.Query
+
+ alias Pleroma.Activity
+ alias Pleroma.Config.Getting, as: Config
+
+ alias __MODULE__.OpenAIClient
+ alias __MODULE__.QdrantClient
+
+ import Pleroma.Search.Meilisearch, only: [object_to_search_data: 1]
+ import Pleroma.Search.DatabaseSearch, only: [maybe_fetch: 3]
+
+ @impl true
+ def create_index do
+ payload = Config.get([Pleroma.Search.QdrantSearch, :qdrant_index_configuration])
+
+ with {:ok, %{status: 200}} <- QdrantClient.put("/collections/posts", payload) do
+ :ok
+ else
+ e -> {:error, e}
+ end
+ end
+
+ @impl true
+ def drop_index do
+ with {:ok, %{status: 200}} <- QdrantClient.delete("/collections/posts") do
+ :ok
+ else
+ e -> {:error, e}
+ end
+ end
+
+ def get_embedding(text) do
+ with {:ok, %{body: %{"data" => [%{"embedding" => embedding}]}}} <-
+ OpenAIClient.post("/v1/embeddings", %{
+ input: text,
+ model: Config.get([Pleroma.Search.QdrantSearch, :openai_model])
+ }) do
+ {:ok, embedding}
+ else
+ _ ->
+ {:error, "Failed to get embedding"}
+ end
+ end
+
+ defp actor_from_activity(%{data: %{"actor" => actor}}) do
+ actor
+ end
+
+ defp actor_from_activity(_), do: nil
+
+ defp build_index_payload(activity, embedding) do
+ actor = actor_from_activity(activity)
+ published_at = activity.data["published"]
+
+ %{
+ points: [
+ %{
+ id: activity.id |> FlakeId.from_string() |> Ecto.UUID.cast!(),
+ vector: embedding,
+ payload: %{actor: actor, published_at: published_at}
+ }
+ ]
+ }
+ end
+
+ defp build_search_payload(embedding, options) do
+ base = %{
+ vector: embedding,
+ limit: options[:limit] || 20,
+ offset: options[:offset] || 0
+ }
+
+ if author = options[:author] do
+ Map.put(base, :filter, %{
+ must: [%{key: "actor", match: %{value: author.ap_id}}]
+ })
+ else
+ base
+ end
+ end
+
+ @impl true
+ def add_to_index(activity) do
+ # This will only index public or unlisted notes
+ maybe_search_data = object_to_search_data(activity.object)
+
+ if activity.data["type"] == "Create" and maybe_search_data do
+ with {:ok, embedding} <- get_embedding(maybe_search_data.content),
+ {:ok, %{status: 200}} <-
+ QdrantClient.put(
+ "/collections/posts/points",
+ build_index_payload(activity, embedding)
+ ) do
+ :ok
+ else
+ e -> {:error, e}
+ end
+ else
+ :ok
+ end
+ end
+
+ @impl true
+ def remove_from_index(object) do
+ activity = Activity.get_by_object_ap_id_with_object(object.data["id"])
+ id = activity.id |> FlakeId.from_string() |> Ecto.UUID.cast!()
+
+ with {:ok, %{status: 200}} <-
+ QdrantClient.post("/collections/posts/points/delete", %{"points" => [id]}) do
+ :ok
+ else
+ e -> {:error, e}
+ end
+ end
+
+ @impl true
+ def search(user, original_query, options) do
+ query = "Represent this sentence for searching relevant passages: #{original_query}"
+
+ with {:ok, embedding} <- get_embedding(query),
+ {:ok, %{body: %{"result" => result}}} <-
+ QdrantClient.post(
+ "/collections/posts/points/search",
+ build_search_payload(embedding, options)
+ ) do
+ ids =
+ Enum.map(result, fn %{"id" => id} ->
+ Ecto.UUID.dump!(id)
+ end)
+
+ from(a in Activity, where: a.id in ^ids)
+ |> Activity.with_preloaded_object()
+ |> Activity.restrict_deactivated_users()
+ |> Ecto.Query.order_by([a], fragment("array_position(?, ?)", ^ids, a.id))
+ |> Pleroma.Repo.all()
+ |> maybe_fetch(user, original_query)
+ else
+ _ ->
+ []
+ end
+ end
+
+ @impl true
+ def healthcheck_endpoints do
+ qdrant_health =
+ Config.get([Pleroma.Search.QdrantSearch, :qdrant_url])
+ |> URI.parse()
+ |> Map.put(:path, "/healthz")
+ |> URI.to_string()
+
+ openai_health = Config.get([Pleroma.Search.QdrantSearch, :openai_healthcheck_url])
+
+ [qdrant_health, openai_health] |> Enum.filter(& &1)
+ end
+end
+
+defmodule Pleroma.Search.QdrantSearch.OpenAIClient do
+ use Tesla
+ alias Pleroma.Config.Getting, as: Config
+
+ plug(Tesla.Middleware.BaseUrl, Config.get([Pleroma.Search.QdrantSearch, :openai_url]))
+ plug(Tesla.Middleware.JSON)
+
+ plug(Tesla.Middleware.Headers, [
+ {"Authorization",
+ "Bearer #{Pleroma.Config.get([Pleroma.Search.QdrantSearch, :openai_api_key])}"}
+ ])
+end
+
+defmodule Pleroma.Search.QdrantSearch.QdrantClient do
+ use Tesla
+ alias Pleroma.Config.Getting, as: Config
+
+ plug(Tesla.Middleware.BaseUrl, Config.get([Pleroma.Search.QdrantSearch, :qdrant_url]))
+ plug(Tesla.Middleware.JSON)
+
+ plug(Tesla.Middleware.Headers, [
+ {"api-key", Pleroma.Config.get([Pleroma.Search.QdrantSearch, :qdrant_api_key])}
+ ])
+end
diff --git a/lib/pleroma/search/search_backend.ex b/lib/pleroma/search/search_backend.ex
index 68bc48cec2..f4ed13c362 100644
--- a/lib/pleroma/search/search_backend.ex
+++ b/lib/pleroma/search/search_backend.ex
@@ -21,4 +21,22 @@ defmodule Pleroma.Search.SearchBackend do
from index.
"""
@callback remove_from_index(object :: Pleroma.Object.t()) :: :ok | {:error, any()}
+
+ @doc """
+ Create the index
+ """
+ @callback create_index() :: :ok | {:error, any()}
+
+ @doc """
+ Drop the index
+ """
+ @callback drop_index() :: :ok | {:error, any()}
+
+ @doc """
+ Healthcheck endpoints of search backend infrastructure to monitor for controlling
+ processing of jobs in the Oban queue.
+
+ It is expected a 200 response is healthy and other responses are unhealthy.
+ """
+ @callback healthcheck_endpoints :: list() | nil
end
diff --git a/lib/pleroma/upload.ex b/lib/pleroma/upload.ex
index e6c4845482..87290c6c21 100644
--- a/lib/pleroma/upload.ex
+++ b/lib/pleroma/upload.ex
@@ -62,6 +62,7 @@ defmodule Pleroma.Upload do
height: integer(),
blurhash: String.t(),
description: String.t(),
+ description_map: map(),
path: String.t()
}
defstruct [
@@ -73,21 +74,44 @@ defmodule Pleroma.Upload do
:height,
:blurhash,
:description,
+ :description_map,
:path
]
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
defp get_description(upload) do
- case {upload.description, Pleroma.Config.get([Pleroma.Upload, :default_description])} do
- {description, _} when is_binary(description) -> description
+ case {upload, Pleroma.Config.get([Pleroma.Upload, :default_description])} do
+ {%{description_map: %{} = description_map}, _} -> description_map
+ {%{description: description}, _} when is_binary(description) -> description
{_, :filename} -> upload.name
{_, str} when is_binary(str) -> str
_ -> ""
end
end
- @spec store(source, options :: [option()]) :: {:ok, map()} | {:error, any()}
+ defp validate_description_limit(%{} = description) do
+ Enum.all?(description, fn {_, content} ->
+ String.length(content) <= Pleroma.Config.get([:instance, :description_limit])
+ end)
+ end
+
+ defp validate_description_limit(description) when is_binary(description) do
+ String.length(description) <= Pleroma.Config.get([:instance, :description_limit])
+ end
+
+ defp description_fields(%{} = description, language) do
+ %{
+ "name" => Map.get(description, language),
+ "nameMap" => description
+ }
+ end
+
+ defp description_fields(description, _) when is_binary(description) do
+ %{"name" => description}
+ end
+
+ @spec store(source, options :: [option()]) :: {:ok, Map.t()} | {:error, any()}
@doc "Store a file. If using a `Plug.Upload{}` as the source, be sure to use `Majic.Plug` to ensure its content_type and filename is correct."
def store(upload, opts \\ []) do
opts = get_opts(opts)
@@ -96,9 +120,10 @@ def store(upload, opts \\ []) do
upload = %__MODULE__{upload | path: upload.path || "#{upload.id}/#{upload.name}"},
{:ok, upload} <- Pleroma.Upload.Filter.filter(opts.filters, upload),
description = get_description(upload),
+ {_, true} <- {:description_limit, validate_description_limit(description)},
{_, true} <-
- {:description_limit,
- String.length(description) <= Pleroma.Config.get([:instance, :description_limit])},
+ {:valid_locale,
+ opts[:language] == nil or Pleroma.MultiLanguage.good_locale_code?(opts[:language])},
{:ok, url_spec} <- Pleroma.Uploaders.Uploader.put_file(opts.uploader, upload) do
{:ok,
%{
@@ -113,10 +138,11 @@ def store(upload, opts \\ []) do
}
|> Maps.put_if_present("width", upload.width)
|> Maps.put_if_present("height", upload.height)
- ],
- "name" => description
+ ]
}
- |> Maps.put_if_present("blurhash", upload.blurhash)}
+ |> Map.merge(description_fields(description, opts[:language]))
+ |> Maps.put_if_present("blurhash", upload.blurhash)
+ |> Maps.put_if_present("language", opts[:language])}
else
{:description_limit, _} ->
{:error, :description_too_long}
@@ -156,6 +182,7 @@ defp get_opts(opts) do
uploader: Keyword.get(opts, :uploader, Pleroma.Config.get([__MODULE__, :uploader])),
filters: Keyword.get(opts, :filters, Pleroma.Config.get([__MODULE__, :filters])),
description: Keyword.get(opts, :description),
+ description_map: Keyword.get(opts, :description_map),
base_url: base_url()
}
end
@@ -168,7 +195,8 @@ defp prepare_upload(%Plug.Upload{} = file, opts) do
name: file.filename,
tempfile: file.path,
content_type: file.content_type,
- description: opts.description
+ description: opts.description,
+ description_map: opts.description_map
}}
end
end
@@ -239,8 +267,12 @@ defp url_from_spec(%__MODULE__{name: name}, base_url, {:file, path}) do
""
end
- [base_url, path]
- |> Path.join()
+ if String.contains?(base_url, Pleroma.Uploaders.IPFS.placeholder()) do
+ String.replace(base_url, Pleroma.Uploaders.IPFS.placeholder(), path)
+ else
+ [base_url, path]
+ |> Path.join()
+ end
end
defp url_from_spec(_upload, _base_url, {:url, url}), do: url
@@ -277,6 +309,9 @@ def base_url do
Path.join([upload_base_url, bucket_with_namespace])
end
+ Pleroma.Uploaders.IPFS ->
+ @config_impl.get([Pleroma.Uploaders.IPFS, :get_gateway_url])
+
_ ->
public_endpoint || upload_base_url || Pleroma.Web.Endpoint.url() <> "/media/"
end
diff --git a/lib/pleroma/uploaders/ipfs.ex b/lib/pleroma/uploaders/ipfs.ex
new file mode 100644
index 0000000000..d171e46525
--- /dev/null
+++ b/lib/pleroma/uploaders/ipfs.ex
@@ -0,0 +1,77 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Uploaders.IPFS do
+ @behaviour Pleroma.Uploaders.Uploader
+ require Logger
+
+ alias Tesla.Multipart
+
+ @config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
+
+ defp get_final_url(method) do
+ config = @config_impl.get([__MODULE__])
+ post_base_url = Keyword.get(config, :post_gateway_url)
+
+ Path.join([post_base_url, method])
+ end
+
+ def put_file_endpoint do
+ get_final_url("/api/v0/add")
+ end
+
+ def delete_file_endpoint do
+ get_final_url("/api/v0/files/rm")
+ end
+
+ @placeholder "{CID}"
+ def placeholder, do: @placeholder
+
+ @impl true
+ def get_file(file) do
+ b_url = Pleroma.Upload.base_url()
+
+ if String.contains?(b_url, @placeholder) do
+ {:ok, {:url, String.replace(b_url, @placeholder, URI.decode(file))}}
+ else
+ {:error, "IPFS Get URL doesn't contain 'cid' placeholder"}
+ end
+ end
+
+ @impl true
+ def put_file(%Pleroma.Upload{} = upload) do
+ mp =
+ Multipart.new()
+ |> Multipart.add_content_type_param("charset=utf-8")
+ |> Multipart.add_file(upload.tempfile)
+
+ case Pleroma.HTTP.post(put_file_endpoint(), mp, [], params: ["cid-version": "1"]) do
+ {:ok, ret} ->
+ case Jason.decode(ret.body) do
+ {:ok, ret} ->
+ if Map.has_key?(ret, "Hash") do
+ {:ok, {:file, ret["Hash"]}}
+ else
+ {:error, "JSON doesn't contain Hash key"}
+ end
+
+ error ->
+ Logger.error("#{__MODULE__}: #{inspect(error)}")
+ {:error, "JSON decode failed"}
+ end
+
+ error ->
+ Logger.error("#{__MODULE__}: #{inspect(error)}")
+ {:error, "IPFS Gateway upload failed"}
+ end
+ end
+
+ @impl true
+ def delete_file(file) do
+ case Pleroma.HTTP.post(delete_file_endpoint(), "", [], params: [arg: file]) do
+ {:ok, %{status: 204}} -> :ok
+ error -> {:error, inspect(error)}
+ end
+ end
+end
diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex
index 61953e4f04..558f3a181a 100644
--- a/lib/pleroma/web/activity_pub/activity_pub.ex
+++ b/lib/pleroma/web/activity_pub/activity_pub.ex
@@ -33,6 +33,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
import Ecto.Query
import Pleroma.Web.ActivityPub.Utils
import Pleroma.Web.ActivityPub.Visibility
+ import Pleroma.Web.Gettext
import Pleroma.Webhook.Notify, only: [trigger_webhooks: 2]
require Logger
@@ -170,9 +171,7 @@ def insert(map, local \\ true, fake \\ false, bypass_actor_check \\ false) when
# Splice in the child object if we have one.
activity = Maps.put_if_present(activity, :object, object)
- ConcurrentLimiter.limit(Pleroma.Web.RichMedia.Helpers, fn ->
- Task.start(fn -> Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity) end)
- end)
+ Pleroma.Web.RichMedia.Card.get_by_activity(activity)
# Add local posts to search index
if local, do: Pleroma.Search.add_to_index(activity)
@@ -200,7 +199,7 @@ def insert(map, local \\ true, fake \\ false, bypass_actor_check \\ false) when
id: "pleroma:fakeid"
}
- Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
+ Pleroma.Web.RichMedia.Card.get_by_activity(activity)
{:ok, activity}
{:remote_limit_pass, _} ->
@@ -1362,6 +1361,15 @@ defp restrict_unauthenticated(query, nil) do
defp restrict_unauthenticated(query, _), do: query
+ defp restrict_rule(query, %{rule_id: rule_id}) do
+ from(
+ activity in query,
+ where: fragment("(?)->'rules' \\? (?)", activity.data, ^rule_id)
+ )
+ end
+
+ defp restrict_rule(query, _), do: query
+
defp exclude_poll_votes(query, %{include_poll_votes: true}), do: query
defp exclude_poll_votes(query, _) do
@@ -1540,7 +1548,9 @@ def fetch_activities_query(recipients, opts \\ %{}) do
|> restrict_announce_object_actor(opts)
|> restrict_object(opts)
|> restrict_filtered(opts)
+ |> restrict_rule(opts)
|> restrict_quote_url(opts)
+ |> restrict_rule(opts)
|> maybe_restrict_deactivated_users(opts)
|> exclude_poll_votes(opts)
|> exclude_chat_messages(opts)
@@ -1615,12 +1625,37 @@ def fetch_activities_bounded(
|> Enum.reverse()
end
+ defp validate_media_description_map(%{} = map, language) do
+ with {:ok, %{}} <- Pleroma.MultiLanguage.validate_map(map),
+ true <- Pleroma.MultiLanguage.good_locale_code?(language) do
+ :ok
+ else
+ false -> :invalid_language
+ _ -> :error
+ end
+ end
+
+ defp validate_media_description_map(nil, _), do: :ok
+ defp validate_media_description_map(_, _), do: :error
+
@spec upload(Upload.source(), keyword()) :: {:ok, Object.t()} | {:error, any()}
def upload(file, opts \\ []) do
- with {:ok, data} <- Upload.store(sanitize_upload_file(file), opts) do
+ with {_, :ok} <-
+ {:description_map,
+ validate_media_description_map(opts[:description_map], opts[:language])},
+ {:ok, data} <- Upload.store(sanitize_upload_file(file), opts) do
obj_data = Maps.put_if_present(data, "actor", opts[:actor])
Repo.insert(%Object{data: obj_data})
+ else
+ {:description_map, :invalid_language} ->
+ {:error, dgettext("errors", "valid language must be provided with description_map")}
+
+ {:description_map, _} ->
+ {:error, dgettext("errors", "description_map invalid")}
+
+ e ->
+ e
end
end
diff --git a/lib/pleroma/web/activity_pub/activity_pub_controller.ex b/lib/pleroma/web/activity_pub/activity_pub_controller.ex
index 5865119311..7516f20fc7 100644
--- a/lib/pleroma/web/activity_pub/activity_pub_controller.ex
+++ b/lib/pleroma/web/activity_pub/activity_pub_controller.ex
@@ -62,6 +62,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
when action in [:following, :followers, :pinned, :inbox, :outbox, :update_outbox]
)
+ plug(:log_inbox_metadata when action in [:inbox])
plug(:set_requester_reachable when action in [:inbox])
plug(:relay_active? when action in [:relay])
@@ -531,6 +532,13 @@ defp set_requester_reachable(%Plug.Conn{} = conn, _) do
conn
end
+ defp log_inbox_metadata(%{params: %{"actor" => actor, "type" => type}} = conn, _) do
+ Logger.metadata(actor: actor, type: type)
+ conn
+ end
+
+ defp log_inbox_metadata(conn, _), do: conn
+
def upload_media(%{assigns: %{user: %User{} = user}} = conn, %{"file" => file} = data) do
with {:ok, object} <-
ActivityPub.upload(
diff --git a/lib/pleroma/web/activity_pub/builder.ex b/lib/pleroma/web/activity_pub/builder.ex
index de1777f360..0901c7d580 100644
--- a/lib/pleroma/web/activity_pub/builder.ex
+++ b/lib/pleroma/web/activity_pub/builder.ex
@@ -216,19 +216,51 @@ def create(actor, object, recipients) do
@spec note(ActivityDraft.t()) :: {:ok, map(), keyword()}
def note(%ActivityDraft{} = draft) do
+ content_fields =
+ if draft.content_html_map do
+ case Map.keys(draft.content_html_map) do
+ ["und"] ->
+ %{"content" => Map.get(draft.content_html_map, "und")}
+
+ _ ->
+ %{
+ "contentMap" => draft.content_html_map,
+ "content" => Map.get(draft.content_html_map, draft.language)
+ }
+ end
+ else
+ %{"content" => draft.content_html}
+ end
+
+ summary_fields =
+ if draft.summary_map do
+ case Map.keys(draft.summary_map) do
+ ["und"] ->
+ %{"summary" => Map.get(draft.summary_map, "und")}
+
+ _ ->
+ %{
+ "summaryMap" => draft.summary_map,
+ "summary" => Map.get(draft.summary_map, draft.language)
+ }
+ end
+ else
+ %{"summary" => draft.summary}
+ end
+
data =
%{
"type" => "Note",
"to" => draft.to,
"cc" => draft.cc,
- "content" => draft.content_html,
- "summary" => draft.summary,
"sensitive" => draft.sensitive,
"context" => draft.context,
"attachment" => draft.attachments,
"actor" => draft.user.ap_id,
"tag" => Keyword.values(draft.tags) |> Enum.uniq()
}
+ |> Map.merge(content_fields)
+ |> Map.merge(summary_fields)
|> add_in_reply_to(draft.in_reply_to)
|> add_quote(draft.quote_post)
|> Map.merge(draft.extra)
diff --git a/lib/pleroma/web/activity_pub/mrf/anti_mention_spam_policy.ex b/lib/pleroma/web/activity_pub/mrf/anti_mention_spam_policy.ex
index 9cdb2077f2..531e75ce80 100644
--- a/lib/pleroma/web/activity_pub/mrf/anti_mention_spam_policy.ex
+++ b/lib/pleroma/web/activity_pub/mrf/anti_mention_spam_policy.ex
@@ -3,6 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.AntiMentionSpamPolicy do
+ alias Pleroma.Config
alias Pleroma.User
require Pleroma.Constants
@@ -11,8 +12,9 @@ defmodule Pleroma.Web.ActivityPub.MRF.AntiMentionSpamPolicy do
defp user_has_posted?(%User{} = u), do: u.note_count > 0
defp user_has_age?(%User{} = u) do
- diff = NaiveDateTime.utc_now() |> NaiveDateTime.diff(u.inserted_at, :second)
- diff >= :timer.seconds(30)
+ user_age_limit = Config.get([:mrf_antimentionspam, :user_age_limit], 30_000)
+ diff = NaiveDateTime.utc_now() |> NaiveDateTime.diff(u.inserted_at, :millisecond)
+ diff >= user_age_limit
end
defp good_reputation?(%User{} = u) do
diff --git a/lib/pleroma/web/activity_pub/mrf/dnsrbl_policy.ex b/lib/pleroma/web/activity_pub/mrf/dnsrbl_policy.ex
new file mode 100644
index 0000000000..9543cc5453
--- /dev/null
+++ b/lib/pleroma/web/activity_pub/mrf/dnsrbl_policy.ex
@@ -0,0 +1,142 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2024 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.ActivityPub.MRF.DNSRBLPolicy do
+ @moduledoc """
+ Dynamic activity filtering based on an RBL database
+
+ This MRF makes queries to a custom DNS server which will
+ respond with values indicating the classification of the domain
+ the activity originated from. This method has been widely used
+ in the email anti-spam industry for very fast reputation checks.
+
+ e.g., if the DNS response is 127.0.0.1 or empty, the domain is OK
+ Other values such as 127.0.0.2 may be used for specific classifications.
+
+ Information for why the host is blocked can be stored in a corresponding TXT record.
+
+ This method is fail-open so if the queries fail the activites are accepted.
+
+ An example of software meant for this purpsoe is rbldnsd which can be found
+ at http://www.corpit.ru/mjt/rbldnsd.html or mirrored at
+ https://git.pleroma.social/feld/rbldnsd
+
+ It is highly recommended that you run your own copy of rbldnsd and use an
+ external mechanism to sync/share the contents of the zone file. This is
+ important to keep the latency on the queries as low as possible and prevent
+ your DNS server from being attacked so it fails and content is permitted.
+ """
+
+ @behaviour Pleroma.Web.ActivityPub.MRF.Policy
+
+ alias Pleroma.Config
+
+ require Logger
+
+ @query_retries 1
+ @query_timeout 500
+
+ @impl true
+ def filter(%{"actor" => actor} = object) do
+ actor_info = URI.parse(actor)
+
+ with {:ok, object} <- check_rbl(actor_info, object) do
+ {:ok, object}
+ else
+ _ -> {:reject, "[DNSRBLPolicy]"}
+ end
+ end
+
+ @impl true
+ def filter(object), do: {:ok, object}
+
+ @impl true
+ def describe do
+ mrf_dnsrbl =
+ Config.get(:mrf_dnsrbl)
+ |> Enum.into(%{})
+
+ {:ok, %{mrf_dnsrbl: mrf_dnsrbl}}
+ end
+
+ @impl true
+ def config_description do
+ %{
+ key: :mrf_dnsrbl,
+ related_policy: "Pleroma.Web.ActivityPub.MRF.DNSRBLPolicy",
+ label: "MRF DNSRBL",
+ description: "DNS RealTime Blackhole Policy",
+ children: [
+ %{
+ key: :nameserver,
+ type: {:string},
+ description: "DNSRBL Nameserver to Query (IP or hostame)",
+ suggestions: ["127.0.0.1"]
+ },
+ %{
+ key: :port,
+ type: {:string},
+ description: "Nameserver port",
+ suggestions: ["53"]
+ },
+ %{
+ key: :zone,
+ type: {:string},
+ description: "Root zone for querying",
+ suggestions: ["bl.pleroma.com"]
+ }
+ ]
+ }
+ end
+
+ defp check_rbl(%{host: actor_host}, object) do
+ with false <- match?(^actor_host, Pleroma.Web.Endpoint.host()),
+ zone when not is_nil(zone) <- Keyword.get(Config.get([:mrf_dnsrbl]), :zone) do
+ query =
+ Enum.join([actor_host, zone], ".")
+ |> String.to_charlist()
+
+ rbl_response = rblquery(query)
+
+ if Enum.empty?(rbl_response) do
+ {:ok, object}
+ else
+ Task.start(fn ->
+ reason = rblquery(query, :txt) || "undefined"
+
+ Logger.warning(
+ "DNSRBL Rejected activity from #{actor_host} for reason: #{inspect(reason)}"
+ )
+ end)
+
+ :error
+ end
+ else
+ _ -> {:ok, object}
+ end
+ end
+
+ defp get_rblhost_ip(rblhost) do
+ case rblhost |> String.to_charlist() |> :inet_parse.address() do
+ {:ok, _} -> rblhost |> String.to_charlist() |> :inet_parse.address()
+ _ -> {:ok, rblhost |> String.to_charlist() |> :inet_res.lookup(:in, :a) |> Enum.random()}
+ end
+ end
+
+ defp rblquery(query, type \\ :a) do
+ config = Config.get([:mrf_dnsrbl])
+
+ case get_rblhost_ip(config[:nameserver]) do
+ {:ok, rblnsip} ->
+ :inet_res.lookup(query, :in, type,
+ nameservers: [{rblnsip, config[:port]}],
+ timeout: @query_timeout,
+ retry: @query_retries
+ )
+
+ _ ->
+ []
+ end
+ end
+end
diff --git a/lib/pleroma/web/activity_pub/mrf/ensure_re_prepended.ex b/lib/pleroma/web/activity_pub/mrf/ensure_re_prepended.ex
index a148cc1e7f..c02c49fb9e 100644
--- a/lib/pleroma/web/activity_pub/mrf/ensure_re_prepended.ex
+++ b/lib/pleroma/web/activity_pub/mrf/ensure_re_prepended.ex
@@ -12,18 +12,41 @@ defmodule Pleroma.Web.ActivityPub.MRF.EnsureRePrepended do
def history_awareness, do: :auto
+ def filter_by_summary(
+ %{data: %{"summaryMap" => %{} = parent_summary_map} = parent},
+ %{"summaryMap" => %{} = child_summary_map} = child
+ ) do
+ fixed_summary_map =
+ Enum.reduce(child_summary_map, %{}, fn {lang, cur}, acc ->
+ with {:ok, fixed_cur} <- fix_one(cur, parent_summary_map[lang]) do
+ Map.put(acc, lang, fixed_cur)
+ else
+ _ -> Map.put(acc, lang, cur)
+ end
+ end)
+
+ fixed_summary =
+ with {:ok, fixed} <- fix_one(child["summary"], parent["summary"]) do
+ fixed
+ else
+ _ -> child["summary"]
+ end
+
+ child
+ |> Map.put("summaryMap", fixed_summary_map)
+ |> Map.put("summary", fixed_summary)
+ end
+
def filter_by_summary(
%{data: %{"summary" => parent_summary}} = _in_reply_to,
%{"summary" => child_summary} = child
)
when not is_nil(child_summary) and byte_size(child_summary) > 0 and
not is_nil(parent_summary) and byte_size(parent_summary) > 0 do
- if (child_summary == parent_summary and not Regex.match?(@reply_prefix, child_summary)) or
- (Regex.match?(@reply_prefix, parent_summary) &&
- Regex.replace(@reply_prefix, parent_summary, "") == child_summary) do
- Map.put(child, "summary", "re: " <> child_summary)
+ with {:ok, fixed_child_summary} <- fix_one(child_summary, parent_summary) do
+ Map.put(child, "summary", fixed_child_summary)
else
- child
+ _ -> child
end
end
@@ -44,4 +67,20 @@ def filter(%{"type" => type, "object" => child_object} = object)
def filter(object), do: {:ok, object}
def describe, do: {:ok, %{}}
+
+ defp fix_one(child_summary, parent_summary)
+ when is_binary(child_summary) and child_summary != "" and is_binary(parent_summary) and
+ parent_summary != "" do
+ if (child_summary == parent_summary and not Regex.match?(@reply_prefix, child_summary)) or
+ (Regex.match?(@reply_prefix, parent_summary) &&
+ Regex.replace(@reply_prefix, parent_summary, "") == child_summary) do
+ {:ok, "re: " <> child_summary}
+ else
+ {:nochange, nil}
+ end
+ end
+
+ defp fix_one(_, _) do
+ {:nochange, nil}
+ end
end
diff --git a/lib/pleroma/web/activity_pub/mrf/force_mentions_in_content.ex b/lib/pleroma/web/activity_pub/mrf/force_mentions_in_content.ex
index 5532093cb7..eae12b0952 100644
--- a/lib/pleroma/web/activity_pub/mrf/force_mentions_in_content.ex
+++ b/lib/pleroma/web/activity_pub/mrf/force_mentions_in_content.ex
@@ -78,23 +78,52 @@ defp clean_recipients(recipients, object) do
def filter(
%{
"type" => type,
- "object" => %{"type" => "Note", "to" => to, "inReplyTo" => in_reply_to}
- } = object
+ "object" => %{"type" => "Note", "to" => to, "inReplyTo" => in_reply_to} = object
+ } = activity
)
when type in ["Create", "Update"] and is_list(to) and is_binary(in_reply_to) do
- # image-only posts from pleroma apparently reach this MRF without the content field
- content = object["object"]["content"] || ""
-
# Get the replied-to user for sorting
- replied_to_user = get_replied_to_user(object["object"])
+ replied_to_user = get_replied_to_user(object)
mention_users =
to
- |> clean_recipients(object)
+ |> clean_recipients(activity)
|> Enum.map(&User.get_cached_by_ap_id/1)
|> Enum.reject(&is_nil/1)
|> sort_replied_user(replied_to_user)
+ fixed_object =
+ with %{} = content_map <- object["contentMap"] do
+ fixed_content_map =
+ Enum.reduce(content_map, %{}, fn {lang, content}, acc ->
+ fixed_content = fix_content(content, mention_users)
+
+ Map.put(acc, lang, fixed_content)
+ end)
+
+ object
+ |> Map.put("contentMap", fixed_content_map)
+ |> Map.put("content", fix_content(object["content"] || "", mention_users))
+ else
+ _ ->
+ # image-only posts from pleroma apparently reach this MRF without the content field
+ content = object["content"] || ""
+
+ fixed_content = fix_content(content, mention_users)
+
+ Map.put(object, "content", fixed_content)
+ end
+
+ {:ok, put_in(activity["object"], fixed_object)}
+ end
+
+ @impl true
+ def filter(object), do: {:ok, object}
+
+ @impl true
+ def describe, do: {:ok, %{}}
+
+ defp fix_content(content, mention_users) do
explicitly_mentioned_uris =
extract_mention_uris_from_content(content)
|> MapSet.new()
@@ -113,25 +142,16 @@ def filter(
do: "#{added_mentions}",
else: ""
- content =
- cond do
- # For Markdown posts, insert the mentions inside the first
tag
- recipients_inline != "" && String.starts_with?(content, "
") ->
- "
" <> recipients_inline <> String.trim_leading(content, "
")
+ cond do
+ # For Markdown posts, insert the mentions inside the first
tag
+ recipients_inline != "" && String.starts_with?(content, "
") ->
+ "
" <> recipients_inline <> String.trim_leading(content, "
")
- recipients_inline != "" ->
- recipients_inline <> content
+ recipients_inline != "" ->
+ recipients_inline <> content
- true ->
- content
- end
-
- {:ok, put_in(object["object"]["content"], content)}
+ true ->
+ content
+ end
end
-
- @impl true
- def filter(object), do: {:ok, object}
-
- @impl true
- def describe, do: {:ok, %{}}
end
diff --git a/lib/pleroma/web/activity_pub/mrf/keyword_policy.ex b/lib/pleroma/web/activity_pub/mrf/keyword_policy.ex
index 729da4e9c9..1ee04a099a 100644
--- a/lib/pleroma/web/activity_pub/mrf/keyword_policy.ex
+++ b/lib/pleroma/web/activity_pub/mrf/keyword_policy.ex
@@ -85,18 +85,32 @@ defp check_ftl_removal(message) do
end
defp check_replace(%{"object" => %{} = object} = message) do
+ config = Pleroma.Config.get([:mrf_keyword, :replace])
+
replace_kw = fn object ->
["content", "name", "summary"]
- |> Enum.filter(fn field -> Map.has_key?(object, field) && object[field] end)
+ |> Enum.filter(fn field ->
+ is_map(object[field <> "Map"]) or
+ (Map.has_key?(object, field) && object[field])
+ end)
|> Enum.reduce(object, fn field, object ->
- data =
- Enum.reduce(
- Pleroma.Config.get([:mrf_keyword, :replace]),
- object[field],
- fn {pat, repl}, acc -> String.replace(acc, pat, repl) end
- )
+ field_name_map = field <> "Map"
- Map.put(object, field, data)
+ with %{} = data_map <- object[field_name_map] do
+ fixed_data_map =
+ Enum.reduce(data_map, %{}, fn {lang, content}, acc ->
+ Map.put(acc, lang, replace_keyword(content, config))
+ end)
+
+ object
+ |> Map.put(field_name_map, fixed_data_map)
+ |> Map.put(field, replace_keyword(object[field], config))
+ else
+ _ ->
+ data = replace_keyword(object[field], config)
+
+ Map.put(object, field, data)
+ end
end)
|> (fn object -> {:ok, object} end).()
end
@@ -108,6 +122,14 @@ defp check_replace(%{"object" => %{} = object} = message) do
{:ok, message}
end
+ defp replace_keyword(data, config) do
+ Enum.reduce(
+ config,
+ data,
+ fn {pat, repl}, acc -> String.replace(acc, pat, repl) end
+ )
+ end
+
@impl true
def filter(%{"type" => type, "object" => %{"content" => _content}} = message)
when type in ["Create", "Update"] do
diff --git a/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex b/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex
index c95d35bb91..0c5b53def0 100644
--- a/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex
+++ b/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex
@@ -11,11 +11,6 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do
require Logger
- @adapter_options [
- pool: :media,
- recv_timeout: 10_000
- ]
-
@impl true
def history_awareness, do: :auto
@@ -27,17 +22,14 @@ defp prefetch(url) do
Logger.debug("Prefetching #{inspect(url)} as #{inspect(prefetch_url)}")
- if Pleroma.Config.get(:env) == :test do
- fetch(prefetch_url)
- else
- ConcurrentLimiter.limit(__MODULE__, fn ->
- Task.start(fn -> fetch(prefetch_url) end)
- end)
- end
+ fetch(prefetch_url)
end
end
- defp fetch(url), do: HTTP.get(url, [], @adapter_options)
+ defp fetch(url) do
+ http_client_opts = Pleroma.Config.get([:media_proxy, :proxy_opts, :http], pool: :media)
+ HTTP.get(url, [], http_client_opts)
+ end
defp preload(%{"object" => %{"attachment" => attachments}} = _message) do
Enum.each(attachments, fn
diff --git a/lib/pleroma/web/activity_pub/mrf/no_empty_policy.ex b/lib/pleroma/web/activity_pub/mrf/no_empty_policy.ex
index 12bf4ddd27..ee08a48bc2 100644
--- a/lib/pleroma/web/activity_pub/mrf/no_empty_policy.ex
+++ b/lib/pleroma/web/activity_pub/mrf/no_empty_policy.ex
@@ -43,8 +43,16 @@ defp has_attachment?(_), do: false
defp only_mentions?(%{"object" => %{"type" => "Note", "source" => source}}) do
source =
case source do
- %{"content" => text} -> text
- _ -> source
+ %{"contentMap" => %{} = text_map} ->
+ text_map
+ |> Enum.map(fn {_, content} -> content end)
+ |> Enum.join("\n")
+
+ %{"content" => text} ->
+ text
+
+ _ ->
+ source
end
non_mentions =
diff --git a/lib/pleroma/web/activity_pub/mrf/no_placeholder_text_policy.ex b/lib/pleroma/web/activity_pub/mrf/no_placeholder_text_policy.ex
index f81e9e52a0..23b5324aed 100644
--- a/lib/pleroma/web/activity_pub/mrf/no_placeholder_text_policy.ex
+++ b/lib/pleroma/web/activity_pub/mrf/no_placeholder_text_policy.ex
@@ -6,9 +6,53 @@ defmodule Pleroma.Web.ActivityPub.MRF.NoPlaceholderTextPolicy do
@moduledoc "Ensure no content placeholder is present (such as the dot from mastodon)"
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
+ @placeholders [".", "
.
"]
+
@impl true
def history_awareness, do: :auto
+ @impl true
+ def filter(
+ %{
+ "type" => type,
+ "object" => %{"contentMap" => %{} = content_map, "attachment" => _} = child_object
+ } = object
+ )
+ when type in ["Create", "Update"] do
+ fixed_content_map =
+ Enum.reduce(content_map, %{}, fn {lang, content}, acc ->
+ if content in @placeholders do
+ acc
+ else
+ Map.put(acc, lang, content)
+ end
+ end)
+
+ fixed_content =
+ if child_object["content"] in @placeholders do
+ ""
+ else
+ child_object["content"]
+ end
+
+ fixed_object =
+ if fixed_content_map == %{} do
+ Map.put(
+ object,
+ "object",
+ object["object"]
+ |> Map.drop(["contentMap"])
+ |> Map.put("content", "")
+ )
+ else
+ object
+ |> put_in(["object", "contentMap"], fixed_content_map)
+ |> put_in(["object", "content"], fixed_content)
+ end
+
+ {:ok, fixed_object}
+ end
+
@impl true
def filter(
%{
@@ -16,7 +60,7 @@ def filter(
"object" => %{"content" => content, "attachment" => _} = _child_object
} = object
)
- when type in ["Create", "Update"] and content in [".", ".
"] do
+ when type in ["Create", "Update"] and content in @placeholders do
{:ok, put_in(object, ["object", "content"], "")}
end
diff --git a/lib/pleroma/web/activity_pub/mrf/normalize_markup.ex b/lib/pleroma/web/activity_pub/mrf/normalize_markup.ex
index 2dfc9a901c..2493f4574c 100644
--- a/lib/pleroma/web/activity_pub/mrf/normalize_markup.ex
+++ b/lib/pleroma/web/activity_pub/mrf/normalize_markup.ex
@@ -16,11 +16,24 @@ def filter(%{"type" => type, "object" => child_object} = object)
when type in ["Create", "Update"] do
scrub_policy = Pleroma.Config.get([:mrf_normalize_markup, :scrub_policy])
- content =
- child_object["content"]
- |> HTML.filter_tags(scrub_policy)
+ object =
+ with %{} = content_map <- child_object["contentMap"] do
+ fixed_content_map =
+ Enum.reduce(content_map, %{}, fn {lang, content}, acc ->
+ Map.put(acc, lang, HTML.filter_tags(content, scrub_policy))
+ end)
- object = put_in(object, ["object", "content"], content)
+ object
+ |> put_in(["object", "contentMap"], fixed_content_map)
+ |> put_in(["object", "content"], HTML.filter_tags(child_object["content"], scrub_policy))
+ else
+ _ ->
+ content =
+ child_object["content"]
+ |> HTML.filter_tags(scrub_policy)
+
+ put_in(object, ["object", "content"], content)
+ end
{:ok, object}
end
diff --git a/lib/pleroma/web/activity_pub/object_validators/article_note_page_validator.ex b/lib/pleroma/web/activity_pub/object_validators/article_note_page_validator.ex
index 5309e1bf14..f5ca255acd 100644
--- a/lib/pleroma/web/activity_pub/object_validators/article_note_page_validator.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/article_note_page_validator.ex
@@ -71,27 +71,6 @@ defp fix_replies(%{"replies" => replies} = data) when not is_list(replies),
defp fix_replies(data), do: data
- defp fix_quote_url(%{"quoteUrl" => _quote_url} = data), do: data
-
- # Fedibird
- # https://github.com/fedibird/mastodon/commit/dbd7ae6cf58a92ec67c512296b4daaea0d01e6ac
- defp fix_quote_url(%{"quoteUri" => quote_url} = data) do
- Map.put(data, "quoteUrl", quote_url)
- end
-
- # Old Fedibird (bug)
- # https://github.com/fedibird/mastodon/issues/9
- defp fix_quote_url(%{"quoteURL" => quote_url} = data) do
- Map.put(data, "quoteUrl", quote_url)
- end
-
- # Misskey fallback
- defp fix_quote_url(%{"_misskey_quote" => quote_url} = data) do
- Map.put(data, "quoteUrl", quote_url)
- end
-
- defp fix_quote_url(data), do: data
-
def fix_attachments(%{"attachment" => attachment} = data) when is_map(attachment),
do: Map.put(data, "attachment", [attachment])
@@ -104,7 +83,6 @@ defp fix(data, meta) do
|> fix_url()
|> fix_tag()
|> fix_replies()
- |> fix_quote_url()
|> fix_attachments()
|> CommonFixes.fix_quote_url()
|> Transmogrifier.fix_emoji()
diff --git a/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex b/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex
index 01960da831..d3902f689b 100644
--- a/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex
@@ -15,6 +15,8 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator do
field(:type, :string, default: "Link")
field(:mediaType, ObjectValidators.MIME, default: "application/octet-stream")
field(:name, :string)
+ field(:nameMap, ObjectValidators.ContentLanguageMap)
+ field(:summary, :string)
field(:blurhash, :string)
embeds_many :url, UrlObjectValidator, primary_key: false do
@@ -44,7 +46,7 @@ def changeset(struct, data) do
|> fix_url()
struct
- |> cast(data, [:id, :type, :mediaType, :name, :blurhash])
+ |> cast(data, [:id, :type, :mediaType, :name, :nameMap, :summary, :blurhash])
|> cast_embed(:url, with: &url_changeset/2, required: true)
end
diff --git a/lib/pleroma/web/activity_pub/object_validators/common_fields.ex b/lib/pleroma/web/activity_pub/object_validators/common_fields.ex
index 22cf0cc05b..881ab35a68 100644
--- a/lib/pleroma/web/activity_pub/object_validators/common_fields.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/common_fields.ex
@@ -50,7 +50,9 @@ defmacro status_object_fields do
embeds_many(:tag, TagValidator)
field(:name, :string)
+ field(:nameMap, ObjectValidators.ContentLanguageMap)
field(:summary, :string)
+ field(:summaryMap, ObjectValidators.ContentLanguageMap)
field(:context, :string)
diff --git a/lib/pleroma/web/activity_pub/object_validators/common_fixes.ex b/lib/pleroma/web/activity_pub/object_validators/common_fixes.ex
index eb56414c6b..934222cef2 100644
--- a/lib/pleroma/web/activity_pub/object_validators/common_fixes.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/common_fixes.ex
@@ -6,6 +6,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes do
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.Language.LanguageDetector
alias Pleroma.Maps
+ alias Pleroma.MultiLanguage
alias Pleroma.Object
alias Pleroma.Object.Containment
alias Pleroma.User
@@ -14,9 +15,6 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes do
require Pleroma.Constants
- import Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode,
- only: [is_good_locale_code?: 1]
-
import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1]
def cast_and_filter_recipients(message, field, follower_collection, field_fallback \\ []) do
@@ -163,7 +161,7 @@ def maybe_add_language(object) do
get_language_from_content_map(object),
get_language_from_content(object)
]
- |> Enum.find(&is_good_locale_code?(&1))
+ |> Enum.find(&MultiLanguage.good_locale_code?(&1))
if language do
Map.put(object, "language", language)
@@ -202,6 +200,8 @@ defp get_language_from_content(%{"content" => content}) do
defp get_language_from_content(_), do: nil
+ def maybe_add_content_map(%{"contentMap" => %{}} = object), do: object
+
def maybe_add_content_map(%{"language" => language, "content" => content} = object)
when not_empty_string(language) do
Map.put(object, "contentMap", Map.put(%{}, language, content))
diff --git a/lib/pleroma/web/activity_pub/object_validators/question_options_validator.ex b/lib/pleroma/web/activity_pub/object_validators/question_options_validator.ex
index 8d7f7b9fa6..eaddebb400 100644
--- a/lib/pleroma/web/activity_pub/object_validators/question_options_validator.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/question_options_validator.ex
@@ -7,10 +7,13 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.QuestionOptionsValidator do
import Ecto.Changeset
+ alias Pleroma.EctoType.ActivityPub.ObjectValidators
+
@primary_key false
embedded_schema do
field(:name, :string)
+ field(:nameMap, ObjectValidators.ContentLanguageMap)
embeds_one :replies, Replies, primary_key: false do
field(:totalItems, :integer)
@@ -22,7 +25,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.QuestionOptionsValidator do
def changeset(struct, data) do
struct
- |> cast(data, [:name, :type])
+ |> cast(data, [:name, :nameMap, :type])
|> cast_embed(:replies, with: &replies_changeset/2)
|> validate_inclusion(:type, ["Note"])
|> validate_required([:name, :type])
diff --git a/lib/pleroma/web/activity_pub/side_effects.ex b/lib/pleroma/web/activity_pub/side_effects.ex
index fdb76fd250..c02d4e3692 100644
--- a/lib/pleroma/web/activity_pub/side_effects.ex
+++ b/lib/pleroma/web/activity_pub/side_effects.ex
@@ -22,7 +22,6 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
alias Pleroma.Web.ActivityPub.Pipeline
alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.ActivityPub.Visibility
- alias Pleroma.Web.Push
alias Pleroma.Web.Streamer
alias Pleroma.Workers.EventReminderWorker
alias Pleroma.Workers.PollWorker
@@ -116,7 +115,7 @@ def handle(
nil
end
- {:ok, notifications} = Notification.create_notifications(object, do_send: false)
+ {:ok, notifications} = Notification.create_notifications(object)
meta =
meta
@@ -175,7 +174,11 @@ def handle(%{data: %{"type" => "Like"}} = object, meta) do
liked_object = Object.get_by_ap_id(object.data["object"])
Utils.add_like_to_object(object, liked_object)
- Notification.create_notifications(object)
+ {:ok, notifications} = Notification.create_notifications(object)
+
+ meta =
+ meta
+ |> add_notifications(notifications)
{:ok, object, meta}
end
@@ -193,7 +196,7 @@ def handle(%{data: %{"type" => "Like"}} = object, meta) do
def handle(%{data: %{"type" => "Create"}} = activity, meta) do
with {:ok, object, meta} <- handle_object_creation(meta[:object_data], activity, meta),
%User{} = user <- User.get_cached_by_ap_id(activity.data["actor"]) do
- {:ok, notifications} = Notification.create_notifications(activity, do_send: false)
+ {:ok, notifications} = Notification.create_notifications(activity)
{:ok, _user} = ActivityPub.increase_note_count_if_public(user, object)
{:ok, _user} = ActivityPub.update_last_status_at_if_public(user, object)
@@ -218,9 +221,7 @@ def handle(%{data: %{"type" => "Create"}} = activity, meta) do
end
end
- ConcurrentLimiter.limit(Pleroma.Web.RichMedia.Helpers, fn ->
- Task.start(fn -> Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity) end)
- end)
+ Pleroma.Web.RichMedia.Card.get_by_activity(activity)
Pleroma.Search.add_to_index(Map.put(activity, :object, object))
@@ -253,11 +254,13 @@ def handle(%{data: %{"type" => "Announce"}} = object, meta) do
Utils.add_announce_to_object(object, announced_object)
- if !User.internal?(user) do
- Notification.create_notifications(object)
+ {:ok, notifications} = Notification.create_notifications(object)
- ap_streamer().stream_out(object)
- end
+ if !User.internal?(user), do: ap_streamer().stream_out(object)
+
+ meta =
+ meta
+ |> add_notifications(notifications)
{:ok, object, meta}
end
@@ -278,7 +281,11 @@ def handle(%{data: %{"type" => "EmojiReact"}} = object, meta) do
reacted_object = Object.get_by_ap_id(object.data["object"])
Utils.add_emoji_reaction_to_object(object, reacted_object)
- Notification.create_notifications(object)
+ {:ok, notifications} = Notification.create_notifications(object)
+
+ meta =
+ meta
+ |> add_notifications(notifications)
{:ok, object, meta}
end
@@ -689,10 +696,7 @@ defp delete_object(object) do
defp send_notifications(meta) do
Keyword.get(meta, :notifications, [])
- |> Enum.each(fn notification ->
- Streamer.stream(["user", "user:notification"], notification)
- Push.send(notification)
- end)
+ |> Notification.send()
meta
end
diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex
index c5871d2fc9..bb8ba6f046 100644
--- a/lib/pleroma/web/activity_pub/transmogrifier.ex
+++ b/lib/pleroma/web/activity_pub/transmogrifier.ex
@@ -215,7 +215,7 @@ def fix_context(object) do
def fix_attachments(%{"attachment" => attachment} = object) when is_list(attachment) do
attachments =
attachment
- |> Enum.filter(fn data -> Map.has_key?(data, "url") end)
+ |> Enum.filter(fn data -> Map.has_key?(data, "url") or Map.has_key?(data, "href") end)
|> Enum.map(fn data ->
url =
cond do
@@ -342,10 +342,12 @@ def fix_tag(%{"tag" => %{} = tag} = object) do
def fix_tag(object), do: object
+ # prefer content over contentMap
def fix_content_map(%{"content" => content} = object) when not_empty_string(content), do: object
# content map usually only has one language so this will do for now.
- def fix_content_map(%{"contentMap" => content_map} = object) do
+ def fix_content_map(%{"contentMap" => content_map} = object)
+ when is_map(content_map) and content_map != %{} do
content_groups = Map.to_list(content_map)
{_, content} = Enum.at(content_groups, 0)
diff --git a/lib/pleroma/web/activity_pub/views/user_view.ex b/lib/pleroma/web/activity_pub/views/user_view.ex
index 0f884fa460..1261ba4cf3 100644
--- a/lib/pleroma/web/activity_pub/views/user_view.ex
+++ b/lib/pleroma/web/activity_pub/views/user_view.ex
@@ -69,7 +69,12 @@ def render("user.json", %{user: %User{nickname: nil} = user}),
def render("user.json", %{user: %User{nickname: "internal." <> _} = user}) do
nickname = user.nickname |> String.split("@") |> List.first()
- render("service.json", %{user: user}) |> Map.put("preferredUsername", nickname)
+
+ render("service.json", %{user: user})
+ |> Map.merge(%{
+ "preferredUsername" => nickname,
+ "webfinger" => "acct:#{User.full_nickname(user)}"
+ })
end
def render("user.json", %{user: user}) do
@@ -124,6 +129,7 @@ def render("user.json", %{user: user}) do
"capabilities" => capabilities,
"alsoKnownAs" => user.also_known_as,
"vcard:bday" => birthday,
+ "webfinger" => "acct:#{User.full_nickname(user)}",
"vcard:Address" => user.location
}
|> Map.merge(maybe_make_image(&User.avatar_url/2, "icon", user))
diff --git a/lib/pleroma/web/admin_api/views/report_view.ex b/lib/pleroma/web/admin_api/views/report_view.ex
index 9b386fa0c0..da61660509 100644
--- a/lib/pleroma/web/admin_api/views/report_view.ex
+++ b/lib/pleroma/web/admin_api/views/report_view.ex
@@ -10,8 +10,8 @@ defmodule Pleroma.Web.AdminAPI.ReportView do
alias Pleroma.User
alias Pleroma.Web.AdminAPI
alias Pleroma.Web.AdminAPI.Report
+ alias Pleroma.Web.AdminAPI.RuleView
alias Pleroma.Web.CommonAPI.Utils
- alias Pleroma.Web.MastodonAPI.InstanceView
alias Pleroma.Web.MastodonAPI.StatusView
defdelegate merge_account_views(user), to: AdminAPI.AccountView
@@ -60,8 +60,8 @@ def render("show.json", %{
}),
state: report.data["state"],
notes: render(__MODULE__, "index_notes.json", %{notes: report.report_notes}),
- assigned_account: assigned_account,
- rules: rules(Map.get(report.data, "rules", nil))
+ rules: rules(Map.get(report.data, "rules", nil)),
+ assigned_account: assigned_account
}
end
@@ -92,8 +92,10 @@ defp rules(nil) do
end
defp rules(rule_ids) do
- rule_ids
- |> Rule.get()
- |> render_many(InstanceView, "rule.json", as: :rule)
+ rules =
+ rule_ids
+ |> Rule.get()
+
+ render(RuleView, "index.json", rules: rules)
end
end
diff --git a/lib/pleroma/web/admin_api/views/rule_view.ex b/lib/pleroma/web/admin_api/views/rule_view.ex
index f291452483..606443f051 100644
--- a/lib/pleroma/web/admin_api/views/rule_view.ex
+++ b/lib/pleroma/web/admin_api/views/rule_view.ex
@@ -13,9 +13,10 @@ def render("index.json", %{rules: rules} = _opts) do
def render("show.json", %{rule: rule} = _opts) do
%{
- id: rule.id,
+ id: to_string(rule.id),
priority: rule.priority,
- text: rule.text
+ text: rule.text,
+ hint: rule.hint
}
end
end
diff --git a/lib/pleroma/web/api_spec.ex b/lib/pleroma/web/api_spec.ex
index e4b2ba7e9a..f29af0090d 100644
--- a/lib/pleroma/web/api_spec.ex
+++ b/lib/pleroma/web/api_spec.ex
@@ -97,6 +97,7 @@ def spec(opts \\ []) do
"Frontend management",
"Instance configuration",
"Instance documents",
+ "Instance rule managment",
"Invites",
"MediaProxy cache",
"OAuth application management",
diff --git a/lib/pleroma/web/api_spec/helpers.ex b/lib/pleroma/web/api_spec/helpers.ex
index 7257253ba1..1d1a42bbdd 100644
--- a/lib/pleroma/web/api_spec/helpers.ex
+++ b/lib/pleroma/web/api_spec/helpers.ex
@@ -82,4 +82,21 @@ def empty_array_response do
def no_content_response do
Operation.response("No Content", "application/json", %Schema{type: :string, example: ""})
end
+
+ def multilang_map_of(embedded_schema, opts \\ []) do
+ struct(
+ %Schema{
+ type: :object,
+ title:
+ if embedded_schema.title do
+ "MultiLang map of #{embedded_schema.title}"
+ else
+ "MultiLang map"
+ end,
+ additionalProperties: embedded_schema,
+ description: "Map from a BCP47 language tag to a string in that language."
+ },
+ opts
+ )
+ end
end
diff --git a/lib/pleroma/web/api_spec/operations/account_operation.ex b/lib/pleroma/web/api_spec/operations/account_operation.ex
index e5ddd95538..10ed654f19 100644
--- a/lib/pleroma/web/api_spec/operations/account_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/account_operation.ex
@@ -524,9 +524,10 @@ def identity_proofs_operation do
def familiar_followers_operation do
%Operation{
tags: ["Retrieve account information"],
- summary: "Followers you know",
- operationId: "AccountController.relationships",
- description: "Returns followers of given account you know.",
+ summary: "Followers that you follow",
+ operationId: "AccountController.familiar_followers",
+ description:
+ "Obtain a list of all accounts that follow a given account, filtered for accounts you follow.",
security: [%{"oAuth" => ["read:follows"]}],
parameters: [
Operation.parameter(
diff --git a/lib/pleroma/web/api_spec/operations/admin/report_operation.ex b/lib/pleroma/web/api_spec/operations/admin/report_operation.ex
index 47d3bb9360..58669a1fc1 100644
--- a/lib/pleroma/web/api_spec/operations/admin/report_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/admin/report_operation.ex
@@ -30,6 +30,12 @@ def index_operation do
report_state(),
"Filter by report state"
),
+ Operation.parameter(
+ :rule_id,
+ :query,
+ %Schema{type: :string},
+ "Filter by selected rule id"
+ ),
Operation.parameter(
:limit,
:query,
@@ -192,19 +198,20 @@ defp report do
}
}
},
- assigned_account:
- account_admin()
- |> Map.put(:nullable, true),
rules: %Schema{
type: :array,
items: %Schema{
type: :object,
properties: %{
- id: %Schema{type: :integer},
- text: %Schema{type: :string}
+ id: %Schema{type: :string},
+ text: %Schema{type: :string},
+ hint: %Schema{type: :string, nullable: true}
}
}
- }
+ },
+ assigned_account:
+ account_admin()
+ |> Map.put(:nullable, true)
}
}
end
diff --git a/lib/pleroma/web/api_spec/operations/admin/rule_operation.ex b/lib/pleroma/web/api_spec/operations/admin/rule_operation.ex
index 0f2689cc61..6d06728f4c 100644
--- a/lib/pleroma/web/api_spec/operations/admin/rule_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/admin/rule_operation.ex
@@ -84,7 +84,8 @@ defp create_request do
required: [:text],
properties: %{
priority: %Schema{type: :integer},
- text: %Schema{type: :string}
+ text: %Schema{type: :string},
+ hint: %Schema{type: :string}
}
}
end
@@ -94,7 +95,8 @@ defp update_request do
type: :object,
properties: %{
priority: %Schema{type: :integer},
- text: %Schema{type: :string}
+ text: %Schema{type: :string},
+ hint: %Schema{type: :string}
}
}
end
@@ -103,10 +105,10 @@ defp rule do
%Schema{
type: :object,
properties: %{
- id: %Schema{type: :integer},
+ id: %Schema{type: :string},
priority: %Schema{type: :integer},
text: %Schema{type: :string},
- created_at: %Schema{type: :string, format: :"date-time"}
+ hint: %Schema{type: :string, nullable: true}
}
}
end
diff --git a/lib/pleroma/web/api_spec/operations/instance_operation.ex b/lib/pleroma/web/api_spec/operations/instance_operation.ex
index 9fa61db595..c83cbfd1de 100644
--- a/lib/pleroma/web/api_spec/operations/instance_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/instance_operation.ex
@@ -100,6 +100,15 @@ defp instance do
%Schema{
type: :object,
properties: %{
+ accounts: %Schema{
+ type: :object,
+ properties: %{
+ max_featured_tags: %Schema{
+ type: :integer,
+ description: "The maximum number of featured tags allowed for each account."
+ }
+ }
+ },
uri: %Schema{type: :string, description: "The domain name of the instance"},
title: %Schema{type: :string, description: "The title of the website"},
description: %Schema{
@@ -379,6 +388,19 @@ defp instance2 do
type: :object,
description: "Instance configuration",
properties: %{
+ accounts: %Schema{
+ type: :object,
+ properties: %{
+ max_featured_tags: %Schema{
+ type: :integer,
+ description: "The maximum number of featured tags allowed for each account."
+ },
+ max_pinned_statuses: %Schema{
+ type: :integer,
+ description: "The maximum number of pinned statuses for each account."
+ }
+ }
+ },
urls: %Schema{
type: :object,
properties: %{
@@ -392,6 +414,11 @@ defp instance2 do
type: :object,
description: "A map with poll limits for local statuses",
properties: %{
+ characters_reserved_per_url: %Schema{
+ type: :integer,
+ description:
+ "Each URL in a status will be assumed to be exactly this many characters."
+ },
max_characters: %Schema{
type: :integer,
description: "Posts character limit (CW/Subject included in the counter)"
@@ -458,8 +485,9 @@ defp array_of_rules do
items: %Schema{
type: :object,
properties: %{
- id: %Schema{type: :integer},
- text: %Schema{type: :string}
+ id: %Schema{type: :string},
+ text: %Schema{type: :string},
+ hint: %Schema{type: :string}
}
}
}
diff --git a/lib/pleroma/web/api_spec/operations/media_operation.ex b/lib/pleroma/web/api_spec/operations/media_operation.ex
index e6df212467..9bfdeaca90 100644
--- a/lib/pleroma/web/api_spec/operations/media_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/media_operation.ex
@@ -47,6 +47,16 @@ defp create_request do
type: :string,
description: "A plain-text description of the media, for accessibility purposes."
},
+ description_map:
+ Helpers.multilang_map_of(%Schema{
+ type: :string,
+ description: "A plain-text description of the media, for accessibility purposes."
+ }),
+ language: %Schema{
+ type: :string,
+ nullable: true,
+ description: "ISO 639 language code for this status."
+ },
focus: %Schema{
type: :string,
description: "Two floating points (x,y), comma-delimited, ranging from -1.0 to 1.0."
@@ -88,6 +98,16 @@ defp update_request do
type: :string,
description: "A plain-text description of the media, for accessibility purposes."
},
+ description_map:
+ Helpers.multilang_map_of(%Schema{
+ type: :string,
+ description: "A plain-text description of the media, for accessibility purposes."
+ }),
+ language: %Schema{
+ type: :string,
+ nullable: true,
+ description: "ISO 639 language code for this status."
+ },
focus: %Schema{
type: :string,
description: "Two floating points (x,y), comma-delimited, ranging from -1.0 to 1.0."
diff --git a/lib/pleroma/web/api_spec/operations/notification_operation.ex b/lib/pleroma/web/api_spec/operations/notification_operation.ex
index a6aa3b7f2d..993ea08a90 100644
--- a/lib/pleroma/web/api_spec/operations/notification_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/notification_operation.ex
@@ -208,6 +208,7 @@ defp notification_type do
"follow_request",
"poll",
"status",
+ "update",
"pleroma:participation_accepted",
"pleroma:participation_request",
"pleroma:event_reminder",
@@ -227,6 +228,7 @@ defp notification_type do
- `pleroma:chat_mention` - Someone mentioned you in a chat message
- `pleroma:report` - Someone was reported
- `status` - Someone you are subscribed to created a status
+ - `update` - A status you interacted with was updated
- `pleroma:event_reminder` – An event you are participating in or created is taking place soon
- `pleroma:event_update` – An event you are participating in was edited
- `pleroma:participation_request - Someone wants to participate in your event
diff --git a/lib/pleroma/web/api_spec/operations/pleroma_notification_operation.ex b/lib/pleroma/web/api_spec/operations/pleroma_notification_operation.ex
index a994345dbd..0e28651914 100644
--- a/lib/pleroma/web/api_spec/operations/pleroma_notification_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/pleroma_notification_operation.ex
@@ -5,7 +5,6 @@
defmodule Pleroma.Web.ApiSpec.PleromaNotificationOperation do
alias OpenApiSpex.Operation
alias OpenApiSpex.Schema
- alias Pleroma.Web.ApiSpec.NotificationOperation
alias Pleroma.Web.ApiSpec.Schemas.ApiError
import Pleroma.Web.ApiSpec.Helpers
@@ -35,12 +34,7 @@ def mark_as_read_operation do
Operation.response(
"A Notification or array of Notifications",
"application/json",
- %Schema{
- anyOf: [
- %Schema{type: :array, items: NotificationOperation.notification()},
- NotificationOperation.notification()
- ]
- }
+ %Schema{type: :string}
),
400 => Operation.response("Bad Request", "application/json", ApiError)
}
diff --git a/lib/pleroma/web/api_spec/operations/report_operation.ex b/lib/pleroma/web/api_spec/operations/report_operation.ex
index fd68f67a2e..f5f88974c6 100644
--- a/lib/pleroma/web/api_spec/operations/report_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/report_operation.ex
@@ -57,7 +57,7 @@ defp create_request do
rule_ids: %Schema{
type: :array,
nullable: true,
- items: %Schema{type: :number},
+ items: %Schema{type: :string},
description: "Array of rules"
}
},
@@ -67,7 +67,7 @@ defp create_request do
"status_ids" => ["1337"],
"comment" => "bad status!",
"forward" => "false",
- "rule_ids" => [3]
+ "rule_ids" => ["3"]
}
}
end
diff --git a/lib/pleroma/web/api_spec/operations/status_operation.ex b/lib/pleroma/web/api_spec/operations/status_operation.ex
index 662036ba3b..a123fcc695 100644
--- a/lib/pleroma/web/api_spec/operations/status_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/status_operation.ex
@@ -6,6 +6,7 @@ defmodule Pleroma.Web.ApiSpec.StatusOperation do
alias OpenApiSpex.Operation
alias OpenApiSpex.Schema
alias Pleroma.Web.ApiSpec.AccountOperation
+ alias Pleroma.Web.ApiSpec.Helpers
alias Pleroma.Web.ApiSpec.Schemas.Account
alias Pleroma.Web.ApiSpec.Schemas.ApiError
alias Pleroma.Web.ApiSpec.Schemas.Attachment
@@ -600,6 +601,12 @@ defp create_request do
description:
"Text content of the status. If `media_ids` is provided, this becomes optional. Attaching a `poll` is optional while `status` is provided."
},
+ status_map:
+ Helpers.multilang_map_of(%Schema{
+ type: :string,
+ description:
+ "Text content of the status. If `media_ids` is provided, this becomes optional. Attaching a `poll` is optional while `status` is provided."
+ }),
media_ids: %Schema{
nullable: true,
type: :array,
@@ -623,6 +630,12 @@ defp create_request do
description:
"Text to be shown as a warning or subject before the actual content. Statuses are generally collapsed behind this field."
},
+ spoiler_text_map:
+ Helpers.multilang_map_of(%Schema{
+ type: :string,
+ description:
+ "Text to be shown as a warning or subject before the actual content. Statuses are generally collapsed behind this field."
+ }),
scheduled_at: %Schema{
type: :string,
format: :"date-time",
@@ -631,9 +644,20 @@ defp create_request do
"ISO 8601 Datetime at which to schedule a status. Providing this parameter will cause ScheduledStatus to be returned instead of Status. Must be at least 5 minutes in the future."
},
language: %Schema{
- type: :string,
- nullable: true,
- description: "ISO 639 language code for this status."
+ oneOf: [
+ %Schema{
+ type: :string,
+ nullable: true,
+ description: "ISO 639 language code for this status."
+ },
+ %Schema{
+ type: :array,
+ items: %Schema{
+ type: :string,
+ description: "ISO 639 language code for this status."
+ }
+ }
+ ]
},
# Pleroma-specific properties:
preview: %Schema{
@@ -751,18 +775,23 @@ def poll_params do
%Schema{
nullable: true,
type: :object,
- required: [:options, :expires_in],
+ required: [:expires_in],
properties: %{
options: %Schema{
type: :array,
items: %Schema{type: :string},
description: "Array of possible answers. Must be provided with `poll[expires_in]`."
},
+ options_map: %Schema{
+ type: :array,
+ items: Helpers.multilang_map_of(%Schema{type: :string}),
+ description: "Array of possible answers. Must be provided with `poll[expires_in]`."
+ },
expires_in: %Schema{
type: :integer,
nullable: true,
description:
- "Duration the poll should be open, in seconds. Must be provided with `poll[options]`"
+ "Duration the poll should be open, in seconds. Must be provided with `poll[options]` or `poll[options_map]`"
},
multiple: %Schema{
allOf: [BooleanLike],
@@ -809,6 +838,12 @@ defp status_history_response do
format: :html,
description: "HTML-encoded status content"
},
+ content_map:
+ Helpers.multilang_map_of(%Schema{
+ type: :string,
+ format: :html,
+ description: "HTML-encoded status content"
+ }),
sensitive: %Schema{
type: :boolean,
description: "Is this status marked as sensitive content?"
@@ -818,6 +853,12 @@ defp status_history_response do
description:
"Subject or summary line, below which status content is collapsed until expanded"
},
+ spoiler_text_map:
+ Helpers.multilang_map_of(%Schema{
+ type: :string,
+ description:
+ "Subject or summary line, below which status content is collapsed until expanded"
+ }),
created_at: %Schema{
type: :string,
format: "date-time",
@@ -856,11 +897,22 @@ defp status_source_response do
type: :string,
description: "Raw source of status content"
},
+ text_map:
+ Helpers.multilang_map_of(%Schema{
+ type: :string,
+ description: "Raw source of status content"
+ }),
spoiler_text: %Schema{
type: :string,
description:
"Subject or summary line, below which status content is collapsed until expanded"
},
+ spoiler_text_map:
+ Helpers.multilang_map_of(%Schema{
+ type: :string,
+ description:
+ "Subject or summary line, below which status content is collapsed until expanded"
+ }),
content_type: %Schema{
type: :string,
description: "The content type of the source"
diff --git a/lib/pleroma/web/api_spec/schemas/attachment.ex b/lib/pleroma/web/api_spec/schemas/attachment.ex
index 2871b5f999..dfa6b440f2 100644
--- a/lib/pleroma/web/api_spec/schemas/attachment.ex
+++ b/lib/pleroma/web/api_spec/schemas/attachment.ex
@@ -4,6 +4,7 @@
defmodule Pleroma.Web.ApiSpec.Schemas.Attachment do
alias OpenApiSpex.Schema
+ alias Pleroma.Web.ApiSpec.Helpers
require OpenApiSpex
@@ -42,6 +43,13 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Attachment do
description:
"Alternate text that describes what is in the media attachment, to be used for the visually impaired or when media attachments do not load"
},
+ description_map:
+ Helpers.multilang_map_of(%Schema{
+ type: :string,
+ nullable: true,
+ description:
+ "Alternate text that describes what is in the media attachment, to be used for the visually impaired or when media attachments do not load"
+ }),
type: %Schema{
type: :string,
enum: ["image", "video", "audio", "unknown"],
@@ -50,7 +58,11 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Attachment do
pleroma: %Schema{
type: :object,
properties: %{
- mime_type: %Schema{type: :string, description: "mime type of the attachment"}
+ mime_type: %Schema{type: :string, description: "mime type of the attachment"},
+ name: %Schema{
+ type: :string,
+ description: "Name of the attachment, typically the filename"
+ }
}
}
},
@@ -62,6 +74,7 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Attachment do
preview_url: "someurl",
text_url: "someurl",
description: nil,
+ description_map: %{},
pleroma: %{mime_type: "image/png"}
}
})
diff --git a/lib/pleroma/web/api_spec/schemas/poll.ex b/lib/pleroma/web/api_spec/schemas/poll.ex
index 20cf5b061b..584bb48626 100644
--- a/lib/pleroma/web/api_spec/schemas/poll.ex
+++ b/lib/pleroma/web/api_spec/schemas/poll.ex
@@ -4,6 +4,7 @@
defmodule Pleroma.Web.ApiSpec.Schemas.Poll do
alias OpenApiSpex.Schema
+ alias Pleroma.Web.ApiSpec.Helpers
alias Pleroma.Web.ApiSpec.Schemas.Emoji
alias Pleroma.Web.ApiSpec.Schemas.FlakeID
@@ -52,6 +53,7 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Poll do
type: :object,
properties: %{
title: %Schema{type: :string},
+ title_map: Helpers.multilang_map_of(%Schema{type: :string}),
votes_count: %Schema{type: :integer}
}
},
@@ -81,10 +83,12 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Poll do
options: [
%{
title: "accept",
+ title_map: %{"en" => "accept", "cmn" => "接受"},
votes_count: 6
},
%{
title: "deny",
+ title_map: %{"en" => "deny", "cmn" => "拒绝"},
votes_count: 4
}
],
diff --git a/lib/pleroma/web/api_spec/schemas/status.ex b/lib/pleroma/web/api_spec/schemas/status.ex
index de25770706..6f7f35cdf5 100644
--- a/lib/pleroma/web/api_spec/schemas/status.ex
+++ b/lib/pleroma/web/api_spec/schemas/status.ex
@@ -4,6 +4,7 @@
defmodule Pleroma.Web.ApiSpec.Schemas.Status do
alias OpenApiSpex.Schema
+ alias Pleroma.Web.ApiSpec.Helpers
alias Pleroma.Web.ApiSpec.Schemas.Account
alias Pleroma.Web.ApiSpec.Schemas.Attachment
alias Pleroma.Web.ApiSpec.Schemas.Emoji
@@ -59,16 +60,34 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Status do
format: :uri,
description: "Preview thumbnail"
},
+ image_description: %Schema{
+ type: :string,
+ description: "Alternate text that describes what is in the thumbnail"
+ },
title: %Schema{type: :string, description: "Title of linked resource"},
description: %Schema{type: :string, description: "Description of preview"}
}
},
content: %Schema{type: :string, format: :html, description: "HTML-encoded status content"},
+ content_map:
+ Helpers.multilang_map_of(%Schema{
+ type: :string,
+ format: :html,
+ description: "HTML-encoded status content"
+ }),
text: %Schema{
type: :string,
description: "Original unformatted content in plain text",
nullable: true
},
+ text_map:
+ Helpers.multilang_map_of(
+ %Schema{
+ type: :string,
+ description: "Original unformatted content in plain text"
+ },
+ nullable: true
+ ),
created_at: %Schema{
type: :string,
format: "date-time",
@@ -153,6 +172,12 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Status do
description:
"A map consisting of alternate representations of the `content` property with the key being it's mimetype. Currently the only alternate representation supported is `text/plain`"
},
+ content_map: %Schema{
+ type: :object,
+ additionalProperties: Helpers.multilang_map_of(%Schema{type: :string}),
+ description:
+ "A map consisting of alternate representations of the `content_map` property with the key being it's mimetype. Currently the only alternate representation supported is `text/plain`"
+ },
content_type: %Schema{
type: :string,
nullable: true,
@@ -242,6 +267,12 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Status do
description:
"A map consisting of alternate representations of the `spoiler_text` property with the key being it's mimetype. Currently the only alternate representation supported is `text/plain`."
},
+ spoiler_text_map: %Schema{
+ type: :object,
+ additionalProperties: Helpers.multilang_map_of(%Schema{type: :string}),
+ description:
+ "A map consisting of alternate representations of the `spoiler_text_map` property with the key being it's mimetype. Currently the only alternate representation supported is `text/plain`."
+ },
thread_muted: %Schema{
type: :boolean,
description: "`true` if the thread the post belongs to is muted"
@@ -283,6 +314,12 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Status do
description:
"Subject or summary line, below which status content is collapsed until expanded"
},
+ spoiler_text_map:
+ Helpers.multilang_map_of(%Schema{
+ type: :string,
+ description:
+ "Subject or summary line, below which status content is collapsed until expanded"
+ }),
tags: %Schema{type: :array, items: Tag},
uri: %Schema{
type: :string,
@@ -360,6 +397,10 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Status do
"bookmarked" => false,
"card" => nil,
"content" => "foobar",
+ "content_map" => %{
+ "en" => "mew mew",
+ "cmn" => "喵喵"
+ },
"created_at" => "2020-04-07T19:48:51.000Z",
"emojis" => [],
"favourited" => false,
@@ -374,6 +415,12 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Status do
"pinned" => false,
"pleroma" => %{
"content" => %{"text/plain" => "foobar"},
+ "content_map" => %{
+ "text/plain" => %{
+ "en" => "mew mew",
+ "cmn" => "喵喵"
+ }
+ },
"context" => "http://localhost:4001/objects/8b4c0c80-6a37-4d2a-b1b9-05a19e3875aa",
"conversation_id" => 345_972,
"direct_conversation_id" => nil,
@@ -382,6 +429,12 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Status do
"in_reply_to_account_acct" => nil,
"local" => true,
"spoiler_text" => %{"text/plain" => ""},
+ "spoiler_text_map" => %{
+ "text/plain" => %{
+ "en" => "",
+ "cmn" => ""
+ }
+ },
"thread_muted" => false,
"quotes_count" => 0
},
@@ -392,6 +445,10 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Status do
"replies_count" => 0,
"sensitive" => false,
"spoiler_text" => "",
+ "spoiler_text_map" => %{
+ "en" => "",
+ "cmn" => ""
+ },
"tags" => [],
"uri" => "http://localhost:4001/objects/0f5dad44-0e9e-4610-b377-a2631e499190",
"url" => "http://localhost:4001/notice/9toJCu5YZW7O7gfvH6",
diff --git a/lib/pleroma/web/common_api.ex b/lib/pleroma/web/common_api.ex
index 3df2a7bdd7..02490c0563 100644
--- a/lib/pleroma/web/common_api.ex
+++ b/lib/pleroma/web/common_api.ex
@@ -675,8 +675,7 @@ defp get_report_rules(nil) do
defp get_report_rules(rule_ids) do
rule_ids
- |> Rule.get()
- |> Enum.map(& &1.id)
+ |> Enum.filter(&Rule.exists?/1)
end
def update_report_state(activity_ids, state) when is_list(activity_ids) do
diff --git a/lib/pleroma/web/common_api/activity_draft.ex b/lib/pleroma/web/common_api/activity_draft.ex
index a77f415b4b..ffd48a9720 100644
--- a/lib/pleroma/web/common_api/activity_draft.ex
+++ b/lib/pleroma/web/common_api/activity_draft.ex
@@ -6,6 +6,7 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do
alias Pleroma.Activity
alias Pleroma.Conversation.Participation
alias Pleroma.Language.LanguageDetector
+ alias Pleroma.MultiLanguage
alias Pleroma.Object
alias Pleroma.Repo
alias Pleroma.Web.ActivityPub.Builder
@@ -13,9 +14,6 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do
alias Pleroma.Web.CommonAPI
alias Pleroma.Web.CommonAPI.Utils
- import Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode,
- only: [is_good_locale_code?: 1]
-
import Pleroma.Web.Gettext
import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1]
@@ -25,8 +23,11 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do
errors: [],
user: nil,
params: %{},
+ language: nil,
status: nil,
+ status_map: nil,
summary: nil,
+ summary_map: nil,
full_payload: nil,
attachments: [],
in_reply_to: nil,
@@ -37,13 +38,13 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do
extra: nil,
emoji: %{},
content_html: nil,
+ content_html_map: nil,
mentions: [],
tags: [],
to: [],
cc: [],
context: nil,
sensitive: false,
- language: nil,
object: nil,
preview?: false,
changes: %{},
@@ -64,7 +65,8 @@ def create(user, params) do
|> status()
|> summary()
|> with_valid(&attachments/1)
- |> full_payload()
+ |> with_valid(&language/1)
+ |> with_valid(&full_payload/1)
|> expires_at()
|> poll()
|> with_valid(&in_reply_to/1)
@@ -72,10 +74,9 @@ def create(user, params) do
|> with_valid("e_post/1)
|> with_valid(&visibility/1)
|> with_valid("ing_visibility/1)
- |> content()
+ |> with_valid(&content/1)
|> with_valid(&to_and_cc/1)
|> with_valid(&context/1)
- |> with_valid(&language/1)
|> sensitive()
|> with_valid(&object/1)
|> preview?()
@@ -149,15 +150,80 @@ defp put_params(draft, params) do
%__MODULE__{draft | params: params}
end
+ defp language(%{params: %{language: language}} = draft) when not is_nil(language) do
+ if MultiLanguage.good_locale_code?(language) do
+ %__MODULE__{draft | language: language}
+ else
+ add_error(
+ draft,
+ dgettext("errors", "language \"%{language}\" is invalid", language: language)
+ )
+ end
+ end
+
+ defp language(%{status: status} = draft) when is_binary(status) do
+ detected_language =
+ LanguageDetector.detect(
+ draft.status <> " " <> (draft.summary || draft.params[:summary] || draft.params[:name])
+ )
+
+ if MultiLanguage.good_locale_code?(detected_language) do
+ %__MODULE__{
+ draft
+ | params: Map.put(draft.params, :language, detected_language),
+ language: detected_language
+ }
+ else
+ draft
+ end
+ end
+
+ defp language(draft), do: draft
+
+ defp status(%{params: %{status_map: %{} = status_map}} = draft) do
+ with {:ok, %{}} <- MultiLanguage.validate_map(status_map) do
+ %__MODULE__{draft | status_map: status_map}
+ else
+ _ -> add_error(draft, dgettext("errors", "status_map is not a valid multilang map"))
+ end
+ end
+
defp status(%{params: %{status: status}} = draft) do
%__MODULE__{draft | status: String.trim(status)}
end
+ defp summary(%{params: %{spoiler_text_map: %{} = spoiler_text_map}} = draft) do
+ with {:ok, %{}} <- MultiLanguage.validate_map(spoiler_text_map) do
+ %__MODULE__{draft | summary_map: spoiler_text_map}
+ else
+ _ -> add_error(draft, dgettext("errors", "spoiler_text_map is not a valid multilang map"))
+ end
+ end
+
defp summary(%{params: params} = draft) do
%__MODULE__{draft | summary: Map.get(params, :spoiler_text, "")}
end
- defp full_payload(%{status: status, summary: summary} = draft) do
+ defp full_payload(%{status: status, status_map: nil} = draft) do
+ full_payload(%__MODULE__{
+ draft
+ | status_map: MultiLanguage.str_to_map(status, lang: draft.language)
+ })
+ end
+
+ defp full_payload(%{summary: summary, summary_map: nil} = draft) do
+ full_payload(%__MODULE__{
+ draft
+ | summary_map: MultiLanguage.str_to_map(summary, lang: draft.language)
+ })
+ end
+
+ defp full_payload(%{status_map: %{} = status_map, summary_map: %{} = summary_map} = draft) do
+ status = status_map |> Enum.reduce("", fn {_lang, content}, acc -> acc <> " " <> content end)
+
+ summary =
+ summary_map |> Enum.reduce("", fn {_lang, content}, acc -> acc <> " " <> content end)
+
full_payload = String.trim(status <> summary)
case Utils.validate_character_limit(full_payload, draft.attachments) do
@@ -178,14 +244,22 @@ defp attachments(%{params: params} = draft) do
defp in_reply_to(%{params: %{in_reply_to_status_id: ""}} = draft), do: draft
- defp in_reply_to(%{params: %{in_reply_to_status_id: id}} = draft) when is_binary(id) do
- case Activity.get_by_id(id) do
- %Activity{} = activity ->
- %__MODULE__{draft | in_reply_to: activity}
+ defp in_reply_to(%{params: %{in_reply_to_status_id: :deleted}} = draft) do
+ add_error(draft, dgettext("errors", "Cannot reply to a deleted status"))
+ end
- _ ->
- add_error(draft, dgettext("errors", "The post being replied to was deleted"))
- end
+ defp in_reply_to(%{params: %{in_reply_to_status_id: id} = params} = draft) when is_binary(id) do
+ activity = Activity.get_by_id(id)
+
+ params =
+ if is_nil(activity) do
+ # Deleted activities are returned as nil
+ Map.put(params, :in_reply_to_status_id, :deleted)
+ else
+ Map.put(params, :in_reply_to_status_id, activity)
+ end
+
+ in_reply_to(%{draft | params: params})
end
defp in_reply_to(%{params: %{in_reply_to_status_id: %Activity{} = in_reply_to}} = draft) do
@@ -265,7 +339,9 @@ defp poll(draft) do
end
defp content(%{mentions: mentions} = draft) do
- {content_html, mentioned_users, tags} = Utils.make_content_html(draft)
+ {content_html_or_map, mentioned_users, tags} = Utils.make_content_html(draft)
+
+ {content_html, content_html_map} = differentiate_string_map(content_html_or_map)
mentioned_ap_ids =
Enum.map(mentioned_users, fn {_, mentioned_user} -> mentioned_user.ap_id end)
@@ -275,7 +351,13 @@ defp content(%{mentions: mentions} = draft) do
|> Kernel.++(mentioned_ap_ids)
|> Utils.get_addressed_users(draft.params[:to])
- %__MODULE__{draft | content_html: content_html, mentions: mentions, tags: tags}
+ %__MODULE__{
+ draft
+ | content_html: content_html,
+ content_html_map: content_html_map,
+ mentions: mentions,
+ tags: tags
+ }
end
defp to_and_cc(draft) do
@@ -293,20 +375,6 @@ defp sensitive(draft) do
%__MODULE__{draft | sensitive: sensitive}
end
- defp language(draft) do
- language =
- draft.params[:language] ||
- LanguageDetector.detect(
- draft.content_html <> " " <> (draft.summary || draft.params[:name])
- )
-
- if is_good_locale_code?(language) do
- %__MODULE__{draft | language: language}
- else
- draft
- end
- end
-
defp object(draft) do
emoji = Map.merge(Pleroma.Emoji.Formatter.get_emoji_map(draft.full_payload), draft.emoji)
@@ -341,10 +409,12 @@ defp object(draft) do
object =
note_data
|> Map.put("emoji", emoji)
- |> Map.put("source", %{
- "content" => draft.status,
- "mediaType" => Utils.get_content_type(draft.params[:content_type])
- })
+ |> Map.put(
+ "source",
+ Map.merge(get_source_map(draft), %{
+ "mediaType" => Utils.get_content_type(draft.params[:content_type])
+ })
+ )
|> Map.put("generator", draft.params[:generator])
|> Map.put("content_type", draft.params[:content_type])
|> Map.put("language", draft.language)
@@ -461,4 +531,18 @@ defp add_error(draft, message) do
defp validate(%{valid?: true} = draft), do: {:ok, draft}
defp validate(%{errors: [message | _]}), do: {:error, message}
+
+ defp differentiate_string_map(%{} = map), do: {nil, map}
+ defp differentiate_string_map(str) when is_binary(str), do: {str, nil}
+
+ defp get_source_map(%{status_map: %{} = status_map} = draft) do
+ %{
+ "content" => Map.get(draft, :status),
+ "contentMap" => status_map
+ }
+ end
+
+ defp get_source_map(%{status: status} = _draft) do
+ %{"content" => status}
+ end
end
diff --git a/lib/pleroma/web/common_api/utils.ex b/lib/pleroma/web/common_api/utils.ex
index accbe4a72c..23159d2d3a 100644
--- a/lib/pleroma/web/common_api/utils.ex
+++ b/lib/pleroma/web/common_api/utils.ex
@@ -10,6 +10,7 @@ defmodule Pleroma.Web.CommonAPI.Utils do
alias Pleroma.Config
alias Pleroma.Conversation.Participation
alias Pleroma.Formatter
+ alias Pleroma.MultiLanguage
alias Pleroma.Object
alias Pleroma.Repo
alias Pleroma.User
@@ -147,22 +148,36 @@ def make_poll_data(%{"poll" => %{"expires_in" => expires_in}} = data)
|> make_poll_data()
end
- def make_poll_data(%{poll: %{options: options, expires_in: expires_in}} = data)
- when is_list(options) do
+ def make_poll_data(%{poll: %{options_map: options_map, expires_in: expires_in}} = data)
+ when is_list(options_map) do
limits = Config.get([:instance, :poll_limits])
- options = options |> Enum.uniq()
+ options_map = options_map |> Enum.uniq()
with :ok <- validate_poll_expiration(expires_in, limits),
- :ok <- validate_poll_options_amount(options, limits),
- :ok <- validate_poll_options_length(options, limits) do
+ :ok <- validate_poll_options_map(options_map),
+ :ok <- validate_poll_options_amount(options_map, limits),
+ :ok <- validate_poll_options_length(options_map, limits) do
{option_notes, emoji} =
- Enum.map_reduce(options, %{}, fn option, emoji ->
- note = %{
- "name" => option,
- "type" => "Note",
- "replies" => %{"type" => "Collection", "totalItems" => 0}
- }
+ Enum.map_reduce(options_map, %{}, fn option, emoji ->
+ is_single_language = Map.keys(option) == ["und"]
+
+ name_attrs =
+ if is_single_language do
+ %{"name" => option["und"]}
+ else
+ %{
+ "name" => Map.get(option, data.language),
+ "nameMap" => option
+ }
+ end
+
+ note =
+ %{
+ "type" => "Note",
+ "replies" => %{"type" => "Collection", "totalItems" => 0}
+ }
+ |> Map.merge(name_attrs)
{note, Map.merge(emoji, Pleroma.Emoji.Formatter.get_emoji_map(option))}
end)
@@ -179,6 +194,19 @@ def make_poll_data(%{poll: %{options: options, expires_in: expires_in}} = data)
end
end
+ def make_poll_data(%{poll: %{options: options}} = data) when is_list(options) do
+ new_poll =
+ data.poll
+ |> Map.put(
+ :options_map,
+ Enum.map(options, &MultiLanguage.str_to_map(&1, lang: data[:language]))
+ )
+
+ data
+ |> Map.put(:poll, new_poll)
+ |> make_poll_data()
+ end
+
def make_poll_data(%{"poll" => poll}) when is_map(poll) do
{:error, "Invalid poll"}
end
@@ -187,6 +215,20 @@ def make_poll_data(_data) do
{:ok, {%{}, %{}}}
end
+ defp validate_poll_options_map(options) do
+ if Enum.all?(options, fn opt ->
+ with {:ok, %{}} <- MultiLanguage.validate_map(opt) do
+ true
+ else
+ _ -> false
+ end
+ end) do
+ :ok
+ else
+ {:error, dgettext("errors", "Poll option map not valid")}
+ end
+ end
+
defp validate_poll_options_amount(options, %{max_options: max_options}) do
cond do
Enum.count(options) < 2 ->
@@ -200,8 +242,11 @@ defp validate_poll_options_amount(options, %{max_options: max_options}) do
end
end
- defp validate_poll_options_length(options, %{max_option_chars: max_option_chars}) do
- if Enum.any?(options, &(String.length(&1) > max_option_chars)) do
+ defp validate_poll_options_length(options_map, %{max_option_chars: max_option_chars}) do
+ if Enum.any?(options_map, fn option ->
+ Enum.reduce(option, 0, fn {_lang, cur}, acc -> acc + String.length(cur) end)
+ |> Kernel.>(max_option_chars)
+ end) do
{:error, "Poll options cannot be longer than #{max_option_chars} characters each"}
else
:ok
@@ -231,7 +276,7 @@ def make_content_html(%ActivityDraft{} = draft) do
[]
end
- draft.status
+ draft
|> format_input(content_type, options)
|> maybe_add_attachments(draft.attachments, attachment_links)
end
@@ -253,6 +298,15 @@ def make_context(_, _), do: Utils.generate_context_id()
def maybe_add_attachments(parsed, _attachments, false = _no_links), do: parsed
+ def maybe_add_attachments({%{} = text_map, mentions, tags}, attachments, _no_links) do
+ text_map =
+ Enum.reduce(text_map, %{}, fn {lang, text}, acc ->
+ Map.put(acc, lang, add_attachments(text, attachments))
+ end)
+
+ {text_map, mentions, tags}
+ end
+
def maybe_add_attachments({text, mentions, tags}, attachments, _no_links) do
text = add_attachments(text, attachments)
{text, mentions, tags}
@@ -273,6 +327,31 @@ defp build_attachment_link(_), do: ""
def format_input(text, format, options \\ [])
+ def format_input(%ActivityDraft{status_map: status_map} = _draft, format, options)
+ when is_map(status_map) do
+ {content_map, mentions, tags} =
+ Enum.reduce(
+ status_map,
+ {%{}, [], []},
+ fn {lang, status}, {content_map, mentions, tags} ->
+ {cur_content, cur_mentions, cur_tags} = format_input(status, format, options)
+
+ {
+ Map.put(content_map, lang, cur_content),
+ mentions ++ cur_mentions,
+ tags ++ cur_tags
+ }
+ end
+ )
+
+ {content_map, Enum.uniq(mentions), Enum.uniq(tags)}
+ end
+
+ def format_input(%ActivityDraft{status: status} = _draft, format, options)
+ when is_binary(status) do
+ format_input(status, format, options)
+ end
+
@doc """
Formatting text to plain text, BBCode, HTML, or Markdown
"""
diff --git a/lib/pleroma/web/endpoint.ex b/lib/pleroma/web/endpoint.ex
index b7f7414f65..6402b6741b 100644
--- a/lib/pleroma/web/endpoint.ex
+++ b/lib/pleroma/web/endpoint.ex
@@ -23,6 +23,8 @@ defmodule Pleroma.Web.Endpoint do
plug(Plug.Telemetry, event_prefix: [:phoenix, :endpoint])
+ plug(Pleroma.Web.Plugs.LoggerMetadataPath)
+
plug(Pleroma.Web.Plugs.SetLocalePlug)
plug(CORSPlug)
plug(Pleroma.Web.Plugs.HTTPSecurityPlug)
diff --git a/lib/pleroma/web/federator.ex b/lib/pleroma/web/federator.ex
index 1f2c3835a8..4b30fd21d2 100644
--- a/lib/pleroma/web/federator.ex
+++ b/lib/pleroma/web/federator.ex
@@ -44,7 +44,7 @@ def incoming_ap_doc(%{params: params, req_headers: req_headers}) do
end
def incoming_ap_doc(%{"type" => "Delete"} = params) do
- ReceiverWorker.enqueue("incoming_ap_doc", %{"params" => params}, priority: 3)
+ ReceiverWorker.enqueue("incoming_ap_doc", %{"params" => params}, priority: 3, queue: :slow)
end
def incoming_ap_doc(params) do
diff --git a/lib/pleroma/web/mastodon_api/controllers/account_controller.ex b/lib/pleroma/web/mastodon_api/controllers/account_controller.ex
index bc8e895f77..b93cf6987b 100644
--- a/lib/pleroma/web/mastodon_api/controllers/account_controller.ex
+++ b/lib/pleroma/web/mastodon_api/controllers/account_controller.ex
@@ -643,7 +643,7 @@ def endorsements(%{assigns: %{user: user}} = conn, params) do
@doc "GET /api/v1/accounts/familiar_followers"
def familiar_followers(
%{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: id}}}} = conn,
- _
+ _id
) do
users =
User.get_all_by_ids(List.wrap(id))
diff --git a/lib/pleroma/web/mastodon_api/controllers/media_controller.ex b/lib/pleroma/web/mastodon_api/controllers/media_controller.ex
index a861273a8a..831e67e1d2 100644
--- a/lib/pleroma/web/mastodon_api/controllers/media_controller.ex
+++ b/lib/pleroma/web/mastodon_api/controllers/media_controller.ex
@@ -5,6 +5,7 @@
defmodule Pleroma.Web.MastodonAPI.MediaController do
use Pleroma.Web, :controller
+ alias Pleroma.MultiLanguage
alias Pleroma.Object
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.Plugs.OAuthScopesPlug
@@ -24,15 +25,36 @@ def create(
conn,
_
) do
- with {:ok, object} <-
+ with language <- Map.get(data, :language),
+ {_, true} <-
+ {:valid_locale,
+ Map.get(data, :description_map) == nil or MultiLanguage.good_locale_code?(language)},
+ {_, true} <-
+ {:locale_included,
+ Map.get(data, :description_map) == nil or
+ Map.has_key?(Map.get(data, :description_map), language)},
+ {:ok, object} <-
ActivityPub.upload(
file,
actor: user.ap_id,
- description: Map.get(data, :description)
+ description: Map.get(data, :description),
+ description_map: Map.get(data, :description_map),
+ language: language
) do
attachment_data = Map.put(object.data, "id", object.id)
render(conn, "attachment.json", %{attachment: attachment_data})
+ else
+ {:valid_locale, _} ->
+ render_error(conn, 422, "valid language must be provided with description_map")
+
+ {:locale_included, _} ->
+ render_error(conn, 422, "the provided language must be included in description_map")
+
+ {:error, e} ->
+ conn
+ |> put_status(:unprocessable_entity)
+ |> json(%{error: e})
end
end
@@ -44,23 +66,82 @@ def create2(
conn,
_
) do
- with {:ok, object} <-
+ with language <- Map.get(data, :language),
+ {_, true} <-
+ {:valid_locale,
+ Map.get(data, :description_map) == nil or MultiLanguage.good_locale_code?(language)},
+ {_, true} <-
+ {:locale_included,
+ Map.get(data, :description_map) == nil or
+ Map.has_key?(Map.get(data, :description_map), language)},
+ {:ok, object} <-
ActivityPub.upload(
file,
actor: user.ap_id,
- description: Map.get(data, :description)
+ description: Map.get(data, :description),
+ description_map: Map.get(data, :description_map),
+ language: language
) do
attachment_data = Map.put(object.data, "id", object.id)
conn
|> put_status(202)
|> render("attachment.json", %{attachment: attachment_data})
+ else
+ {:valid_locale, _} ->
+ render_error(conn, 422, "valid language must be provided with description_map")
+
+ {:locale_included, _} ->
+ render_error(conn, 422, "the provided language must be included in description_map")
+
+ {:error, e} ->
+ conn
+ |> put_status(:unprocessable_entity)
+ |> json(%{error: e})
end
end
def create2(_conn, _data), do: {:error, :bad_request}
@doc "PUT /api/v1/media/:id"
+ def update(
+ %{
+ assigns: %{user: user},
+ private: %{
+ open_api_spex: %{
+ body_params: %{description_map: %{} = description_map} = body_params,
+ params: %{id: id}
+ }
+ }
+ } = conn,
+ _
+ ) do
+ with %Object{} = object <- Object.get_by_id(id),
+ :ok <- Object.authorize_access(object, user),
+ language = Map.get(body_params, :language, object.data["language"]),
+ {_, true} <- {:valid_locale, MultiLanguage.good_locale_code?(language)},
+ {_, true} <- {:locale_included, Map.has_key?(description_map, language)},
+ {_, {:ok, %{}}} <- {:description_map, MultiLanguage.validate_map(description_map)},
+ {:ok, %Object{data: data}} <-
+ Object.update_data(object, %{
+ "name" => Map.get(description_map, language),
+ "nameMap" => description_map
+ }) do
+ attachment_data = Map.put(data, "id", object.id)
+
+ render(conn, "attachment.json", %{attachment: attachment_data})
+ else
+ {:valid_locale, _} ->
+ render_error(conn, 422, "valid language must be provided with description_map")
+
+ {:locale_included, _} ->
+ render_error(conn, 422, "the provided language must be included in description_map")
+
+ {:description_map, _} ->
+ render_error(conn, 422, "description_map not valid")
+ end
+ end
+
def update(
%{
assigns: %{user: user},
diff --git a/lib/pleroma/web/mastodon_api/controllers/status_controller.ex b/lib/pleroma/web/mastodon_api/controllers/status_controller.ex
index 4a50c9acc8..319dded66c 100644
--- a/lib/pleroma/web/mastodon_api/controllers/status_controller.ex
+++ b/lib/pleroma/web/mastodon_api/controllers/status_controller.ex
@@ -38,7 +38,6 @@ defmodule Pleroma.Web.MastodonAPI.StatusController do
when action in [
:index,
:show,
- :card,
:context,
:show_history,
:show_source,
@@ -152,8 +151,7 @@ def create(
)
when not is_nil(scheduled_at) do
params =
- params
- |> Map.put(:in_reply_to_status_id, params[:in_reply_to_id])
+ Map.put(params, :in_reply_to_status_id, params[:in_reply_to_id])
|> Map.put(:generator, conn.assigns.application)
attrs = %{
@@ -209,12 +207,45 @@ def create(
|> do_create
end
+ def create(
+ %{
+ assigns: %{user: _user},
+ private: %{open_api_spex: %{body_params: %{status_map: _}}}
+ } = conn,
+ _
+ ) do
+ create(
+ put_in(
+ conn,
+ [Access.key(:private), Access.key(:open_api_spex), Access.key(:body_params), :status],
+ ""
+ ),
+ %{}
+ )
+ end
+
+ def create(
+ %{
+ assigns: %{user: _user},
+ private: %{open_api_spex: %{body_params: %{media_ids: _}}}
+ } = conn,
+ _
+ ) do
+ create(
+ put_in(
+ conn,
+ [Access.key(:private), Access.key(:open_api_spex), Access.key(:body_params), :status],
+ ""
+ ),
+ %{}
+ )
+ end
+
defp do_create(
%{assigns: %{user: user}, private: %{open_api_spex: %{body_params: params}}} = conn
) do
params =
- params
- |> Map.put(:in_reply_to_status_id, params[:in_reply_to_id])
+ Map.put(params, :in_reply_to_status_id, params[:in_reply_to_id])
|> Map.put(:generator, conn.assigns.application)
with {:ok, activity} <- CommonAPI.post(user, params) do
@@ -483,21 +514,6 @@ def unmute_conversation(
end
end
- @doc "GET /api/v1/statuses/:id/card"
- @deprecated "https://github.com/tootsuite/mastodon/pull/11213"
- def card(
- %{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: status_id}}}} = conn,
- _
- ) do
- with %Activity{} = activity <- Activity.get_by_id(status_id),
- true <- Visibility.visible_for_user?(activity, user) do
- data = Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
- render(conn, "card.json", %{embed: data})
- else
- _ -> render_error(conn, :not_found, "Record not found")
- end
- end
-
@doc "GET /api/v1/statuses/:id/favourited_by"
def favourited_by(
%{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: id}}}} = conn,
diff --git a/lib/pleroma/web/mastodon_api/views/instance_view.ex b/lib/pleroma/web/mastodon_api/views/instance_view.ex
index a1532c70c5..ef2de7816a 100644
--- a/lib/pleroma/web/mastodon_api/views/instance_view.ex
+++ b/lib/pleroma/web/mastodon_api/views/instance_view.ex
@@ -7,10 +7,8 @@ defmodule Pleroma.Web.MastodonAPI.InstanceView do
alias Pleroma.Config
alias Pleroma.Domain
- alias Pleroma.User
alias Pleroma.Web.ActivityPub.MRF
alias Pleroma.Web.AdminAPI.DomainView
- alias Pleroma.Web.MastodonAPI
@mastodon_api_level "2.7.2"
@@ -37,8 +35,8 @@ def render("show.json", _) do
|> to_string,
registrations: Keyword.get(instance, :registrations_open),
approval_required: Keyword.get(instance, :account_approval_required),
- configuration: configuration(),
contact_account: contact_account(Keyword.get(instance, :contact_username)),
+ configuration: configuration(),
rules: render(__MODULE__, "rules.json"),
# Extra (not present in Mastodon):
max_toot_chars: Keyword.get(instance, :limit),
@@ -96,8 +94,9 @@ def render("rules.json", _) do
def render("rule.json", %{rule: rule}) do
%{
- id: rule.id,
- text: rule.text
+ id: to_string(rule.id),
+ text: rule.text,
+ hint: rule.hint || ""
}
end
@@ -183,6 +182,7 @@ def features do
end,
"events",
"multitenancy",
+ "pleroma:multi_language",
"bigbuffet"
]
|> Enum.filter(& &1)
@@ -190,18 +190,16 @@ def features do
defp common_information(instance) do
%{
- title: Keyword.get(instance, :name),
- version: "#{@mastodon_api_level} (compatible; #{Pleroma.Application.compat_version()})",
languages: Keyword.get(instance, :languages, ["en"]),
rules: render(__MODULE__, "rules.json"),
- soapbox: %{
- version: Soapbox.version()
- }
+ title: Keyword.get(instance, :name),
+ version: "#{@mastodon_api_level} (compatible; #{Pleroma.Application.compat_version()})"
}
end
def federation do
quarantined = Config.get([:instance, :quarantined_instances], [])
+ rejected = Config.get([:instance, :rejected_instances], [])
if Config.get([:mrf, :transparency]) do
{:ok, data} = MRF.describe()
@@ -221,6 +219,12 @@ def federation do
|> Enum.map(fn {instance, reason} -> {instance, %{"reason" => reason}} end)
|> Map.new()
})
+ |> Map.put(
+ :rejected_instances,
+ rejected
+ |> Enum.map(fn {instance, reason} -> {instance, %{"reason" => reason}} end)
+ |> Map.new()
+ )
else
%{}
end
@@ -243,10 +247,10 @@ defp contact_account("@" <> username) do
end
defp contact_account(username) do
- user = User.get_cached_by_nickname(username)
+ user = Pleroma.User.get_cached_by_nickname(username)
if user do
- MastodonAPI.AccountView.render("show.json", %{user: user, for: nil})
+ Pleroma.Web.MastodonAPI.AccountView.render("show.json", %{user: user, for: nil})
else
nil
end
@@ -277,6 +281,8 @@ defp configuration do
defp configuration2 do
configuration()
+ |> put_in([:accounts, :max_pinned_statuses], Config.get([:instance, :max_pinned_statuses], 0))
+ |> put_in([:statuses, :characters_reserved_per_url], 0)
|> Map.merge(%{
translation: %{enabled: Pleroma.Language.Translation.configured?()},
urls: %{
diff --git a/lib/pleroma/web/mastodon_api/views/notification_view.ex b/lib/pleroma/web/mastodon_api/views/notification_view.ex
index c067c62152..f53114cd99 100644
--- a/lib/pleroma/web/mastodon_api/views/notification_view.ex
+++ b/lib/pleroma/web/mastodon_api/views/notification_view.ex
@@ -108,6 +108,9 @@ def render(
type when type in ["mention", "status", "poll", "pleroma:event_reminder"] ->
put_status(response, activity, reading_user, status_render_opts)
+ "status" ->
+ put_status(response, activity, reading_user, status_render_opts)
+
"favourite" ->
put_status(response, parent_activity_fn.(), reading_user, status_render_opts)
diff --git a/lib/pleroma/web/mastodon_api/views/poll_view.ex b/lib/pleroma/web/mastodon_api/views/poll_view.ex
index 5bc482c8d8..ddf78ac0a5 100644
--- a/lib/pleroma/web/mastodon_api/views/poll_view.ex
+++ b/lib/pleroma/web/mastodon_api/views/poll_view.ex
@@ -66,6 +66,7 @@ defp options_and_votes_count(options) do
{%{
title: name,
+ title_map: option["nameMap"] || %{},
votes_count: current_count
}, current_count + count}
end)
diff --git a/lib/pleroma/web/mastodon_api/views/status_view.ex b/lib/pleroma/web/mastodon_api/views/status_view.ex
index 0fb4067915..7de0741d67 100644
--- a/lib/pleroma/web/mastodon_api/views/status_view.ex
+++ b/lib/pleroma/web/mastodon_api/views/status_view.ex
@@ -22,8 +22,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
alias Pleroma.Web.MastodonAPI.StatusView
alias Pleroma.Web.MediaProxy
alias Pleroma.Web.PleromaAPI.EmojiReactionController
- alias Pleroma.Web.RichMedia.Parser.Card
- alias Pleroma.Web.RichMedia.Parser.Embed
+ alias Pleroma.Web.RichMedia.Card
import Pleroma.Web.ActivityPub.Visibility, only: [get_visibility: 1, visible_for_user?: 2]
@@ -32,9 +31,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
# pagination is restricted to 40 activities at a time
defp fetch_rich_media_for_activities(activities) do
Enum.each(activities, fn activity ->
- spawn(fn ->
- Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
- end)
+ spawn(fn -> Card.get_by_activity(activity) end)
end)
end
@@ -116,9 +113,7 @@ def render("index.json", opts) do
# To do: check AdminAPIControllerTest on the reasons behind nil activities in the list
activities = Enum.filter(opts.activities, & &1)
- # Start fetching rich media before doing anything else, so that later calls to get the cards
- # only block for timeout in the worst case, as opposed to
- # length(activities_with_links) * timeout
+ # Start prefetching rich media before doing anything else
fetch_rich_media_for_activities(activities)
replied_to_activities = get_replied_to_activities(activities)
quoted_activities = get_quoted_activities(activities)
@@ -217,6 +212,7 @@ def render(
in_reply_to_account_id: nil,
reblog: reblogged,
content: reblogged[:content] || "",
+ content_map: reblogged[:content_map] || %{},
created_at: created_at,
reblogs_count: 0,
replies_count: 0,
@@ -233,7 +229,7 @@ def render(
mentions: mentions,
tags: reblogged[:tags] || [],
application: build_application(object.data["generator"]),
- language: get_language(object),
+ language: get_language(object.data),
emojis: [],
pleroma: %{
local: activity.local,
@@ -346,31 +342,32 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity}
nil
end
- content =
- object
- |> render_content()
+ {content_html, content_html_map} =
+ get_content_and_map(%{
+ type: :html,
+ user: opts[:for],
+ activity: activity,
+ object: object,
+ chrono_order: chrono_order
+ })
- content_html =
- content
- |> Activity.HTML.get_cached_scrubbed_html_for_activity(
- User.html_filter_policy(opts[:for]),
- activity,
- "mastoapi:content:#{chrono_order}"
- )
-
- content_plaintext =
- content
- |> Activity.HTML.get_cached_stripped_html_for_activity(
- activity,
- "mastoapi:content:#{chrono_order}"
- )
+ {content_plaintext, content_plaintext_map} =
+ get_content_and_map(%{
+ type: :plain,
+ user: opts[:for],
+ activity: activity,
+ object: object,
+ chrono_order: chrono_order
+ })
summary = object.data["summary"] || ""
+ summary_map = object.data["summaryMap"] || %{}
card =
- render("card.json", %{
- embed: Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
- })
+ case Card.get_by_activity(activity) do
+ %Card{} = result -> render("card.json", result)
+ _ -> nil
+ end
url =
if user.local do
@@ -431,7 +428,9 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity}
reblog: nil,
card: card,
content: content_html,
+ content_map: content_html_map,
text: opts[:with_source] && get_source_text(object.data["source"]),
+ text_map: opts[:with_source] && get_source_text_map(Map.get(object.data, "source", "")),
created_at: created_at,
edited_at: edited_at,
reblogs_count: announcement_count,
@@ -444,13 +443,14 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity}
pinned: pinned?,
sensitive: sensitive,
spoiler_text: summary,
+ spoiler_text_map: summary_map,
visibility: get_visibility(object),
media_attachments: attachments,
poll: render(PollView, "show.json", object: object, for: opts[:for]),
mentions: mentions,
tags: build_tags(tags),
application: build_application(object.data["generator"]),
- language: get_language(object),
+ language: get_language(object.data),
emojis: build_emojis(object.data["emoji"]),
pleroma: %{
local: activity.local,
@@ -462,7 +462,9 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity}
quote_url: object.data["quoteUrl"],
quote_visible: visible_for_user?(quote_activity, opts[:for]),
content: %{"text/plain" => content_plaintext},
+ content_map: %{"text/plain" => content_plaintext_map},
spoiler_text: %{"text/plain" => summary},
+ spoiler_text_map: %{"text/plain" => summary_map},
expires_at: expires_at,
direct_conversation_id: direct_conversation_id,
thread_muted: thread_muted?,
@@ -535,19 +537,17 @@ def render(
created_at = Utils.to_masto_date(object.data["updated"] || object.data["published"])
- content =
- object
- |> render_content()
-
- content_html =
- content
- |> Activity.HTML.get_cached_scrubbed_html_for_activity(
- User.html_filter_policy(opts[:for]),
- activity,
- "mastoapi:content:#{chrono_order}"
- )
+ {content_html, content_html_map} =
+ get_content_and_map(%{
+ type: :html,
+ user: opts[:for],
+ activity: activity,
+ object: object,
+ chrono_order: chrono_order
+ })
summary = object.data["summary"] || ""
+ summary_map = object.data["summaryMap"] || %{}
%{
account:
@@ -556,8 +556,10 @@ def render(
for: opts[:for]
}),
content: content_html,
+ content_map: content_html_map,
sensitive: sensitive,
spoiler_text: summary,
+ spoiler_text_map: summary_map,
created_at: created_at,
media_attachments: attachments,
emojis: build_emojis(object.data["emoji"]),
@@ -571,29 +573,16 @@ def render("source.json", %{activity: %{data: %{"object" => _object}} = activity
%{
id: activity.id,
text: get_source_text(Map.get(object.data, "source", "")),
+ text_map: get_source_text_map(Map.get(object.data, "source", "")),
spoiler_text: Map.get(object.data, "summary", ""),
+ spoiler_text_map: Map.get(object.data, "summaryMap", %{}),
content_type: get_source_content_type(object.data["source"]),
location: build_source_location(object.data)
}
end
- def render("card.json", %{embed: %Embed{} = embed}) do
- with {:ok, %Card{} = card} <- Card.parse(embed) do
- Card.to_map(card)
- else
- _ -> nil
- end
- end
-
- def render("card.json", %{rich_media: rich_media, page_url: page_url}) do
- page_url_data = URI.parse(page_url)
-
- page_url_data =
- if is_binary(rich_media["url"]) do
- URI.merge(page_url_data, URI.parse(rich_media["url"]))
- else
- page_url_data
- end
+ def render("card.json", %Card{fields: rich_media}) do
+ page_url_data = URI.parse(rich_media["url"])
page_url = page_url_data |> to_string
@@ -607,6 +596,7 @@ def render("card.json", %{rich_media: rich_media, page_url: page_url}) do
provider_url: page_url_data.scheme <> "://" <> page_url_data.host,
url: page_url,
image: image_url,
+ image_description: rich_media["image:alt"] || "",
title: rich_media["title"] || "",
description: rich_media["description"] || "",
pleroma: %{
@@ -619,7 +609,6 @@ def render("card.json", %{rich_media: rich_media, page_url: page_url}) do
}
end
- def render("card.json", %{embed: %Card{} = card}), do: Card.to_map(card)
def render("card.json", _), do: nil
def render("attachment.json", %{attachment: attachment}) do
@@ -649,6 +638,19 @@ def render("attachment.json", %{attachment: attachment}) do
to_string(attachment["id"] || hash_id)
end
+ description =
+ if attachment["summary"] do
+ HTML.strip_tags(attachment["summary"])
+ else
+ attachment["name"]
+ end
+
+ name = if attachment["summary"], do: attachment["name"]
+
+ pleroma =
+ %{mime_type: media_type}
+ |> Maps.put_if_present(:name, name)
+
%{
id: attachment_id,
url: href,
@@ -656,8 +658,9 @@ def render("attachment.json", %{attachment: attachment}) do
preview_url: href_preview,
text_url: href,
type: type,
- description: attachment["name"],
- pleroma: %{mime_type: media_type},
+ description: description,
+ description_map: attachment["nameMap"] || %{},
+ pleroma: pleroma,
blurhash: attachment["blurhash"]
}
|> Maps.put_if_present(:meta, meta)
@@ -765,14 +768,18 @@ def get_quote(%{data: %{"object" => _object}} = activity, _) do
end
end
- def render_content(%{data: %{"name" => name, "type" => type}} = object)
+ def render_content(object) do
+ render_content(object, object.data["name"], object.data["content"])
+ end
+
+ def render_content(%{data: %{"type" => type}} = object, name, content)
when not is_nil(name) and name != "" and type != "Event" do
url = object.data["url"] || object.data["id"]
- "#{name}
#{object.data["content"]}"
+ "#{name}
#{content}"
end
- def render_content(object), do: object.data["content"] || ""
+ def render_content(_object, _name, content), do: content || ""
@doc """
Builds a dictionary tags.
@@ -935,6 +942,12 @@ defp get_source_text(_) do
""
end
+ defp get_source_text_map(%{"contentMap" => %{} = content_map} = _source) do
+ content_map
+ end
+
+ defp get_source_text_map(_), do: %{}
+
defp get_source_content_type(%{"mediaType" => type} = _source) do
type
end
@@ -963,7 +976,72 @@ def build_source_location(%{"location_id" => location_id}) when is_binary(locati
def build_source_location(_), do: nil
- defp get_language(%{data: %{"language" => "und"}}), do: nil
+ defp get_content(%{
+ type: type,
+ content: content,
+ user: user,
+ activity: activity,
+ chrono_order: chrono_order,
+ language: language
+ })
+ when type in [:html, :plain] do
+ language = language || "und"
+ cache_key = "mastoapi:content:#{chrono_order}:#{language}"
- defp get_language(object), do: object.data["language"]
+ if type == :html do
+ content
+ |> Activity.HTML.get_cached_scrubbed_html_for_activity(
+ User.html_filter_policy(user),
+ activity,
+ cache_key
+ )
+ else
+ content
+ |> Activity.HTML.get_cached_stripped_html_for_activity(
+ activity,
+ cache_key
+ )
+ end
+ end
+
+ defp get_content_and_map(%{
+ type: type,
+ user: user,
+ activity: activity,
+ object: object,
+ chrono_order: chrono_order
+ }) do
+ content_und =
+ get_content(%{
+ type: type,
+ user: user,
+ activity: activity,
+ content: render_content(object),
+ chrono_order: chrono_order,
+ language: "und"
+ })
+
+ content_map =
+ (object.data["contentMap"] || %{})
+ |> Enum.reduce(%{}, fn {lang, content}, acc ->
+ Map.put(
+ acc,
+ lang,
+ get_content(%{
+ type: type,
+ user: user,
+ activity: activity,
+ content: render_content(object, object.data["nameMap"][lang], content),
+ chrono_order: chrono_order,
+ language: lang
+ })
+ )
+ end)
+
+ {content_und, content_map}
+ end
+
+ defp get_language(%{"language" => "und"}), do: nil
+
+ defp get_language(data), do: data["language"]
end
diff --git a/lib/pleroma/web/media_proxy/media_proxy_controller.ex b/lib/pleroma/web/media_proxy/media_proxy_controller.ex
index c11484ecb5..0b446e0a60 100644
--- a/lib/pleroma/web/media_proxy/media_proxy_controller.ex
+++ b/lib/pleroma/web/media_proxy/media_proxy_controller.ex
@@ -54,9 +54,10 @@ def preview(%Conn{} = conn, %{"sig" => sig64, "url" => url64}) do
defp handle_preview(conn, url) do
media_proxy_url = MediaProxy.url(url)
+ http_client_opts = Pleroma.Config.get([:media_proxy, :proxy_opts, :http], pool: :media)
with {:ok, %{status: status} = head_response} when status in 200..299 <-
- Pleroma.HTTP.request(:head, media_proxy_url, "", [], pool: :media) do
+ Pleroma.HTTP.request(:head, media_proxy_url, "", [], http_client_opts) do
content_type = Tesla.get_header(head_response, "content-type")
content_length = Tesla.get_header(head_response, "content-length")
content_length = content_length && String.to_integer(content_length)
diff --git a/lib/pleroma/web/pleroma_api/controllers/notification_controller.ex b/lib/pleroma/web/pleroma_api/controllers/notification_controller.ex
index f860eaf7e6..435ccfabe4 100644
--- a/lib/pleroma/web/pleroma_api/controllers/notification_controller.ex
+++ b/lib/pleroma/web/pleroma_api/controllers/notification_controller.ex
@@ -23,8 +23,9 @@ def mark_as_read(
} = conn,
_
) do
- with {:ok, notification} <- Notification.read_one(user, notification_id) do
- render(conn, "show.json", notification: notification, for: user)
+ with {:ok, _} <- Notification.read_one(user, notification_id) do
+ conn
+ |> json("ok")
else
{:error, message} ->
conn
@@ -38,11 +39,14 @@ def mark_as_read(
conn,
_
) do
- notifications =
- user
- |> Notification.set_read_up_to(max_id)
- |> Enum.take(80)
-
- render(conn, "index.json", notifications: notifications, for: user)
+ with {:ok, _} <- Notification.set_read_up_to(user, max_id) do
+ conn
+ |> json("ok")
+ else
+ {:error, message} ->
+ conn
+ |> put_status(:bad_request)
+ |> json(%{"error" => message})
+ end
end
end
diff --git a/lib/pleroma/web/pleroma_api/views/chat/message_reference_view.ex b/lib/pleroma/web/pleroma_api/views/chat/message_reference_view.ex
index e2babc66bd..a1c88d075a 100644
--- a/lib/pleroma/web/pleroma_api/views/chat/message_reference_view.ex
+++ b/lib/pleroma/web/pleroma_api/views/chat/message_reference_view.ex
@@ -9,6 +9,7 @@ defmodule Pleroma.Web.PleromaAPI.Chat.MessageReferenceView do
alias Pleroma.User
alias Pleroma.Web.CommonAPI.Utils
alias Pleroma.Web.MastodonAPI.StatusView
+ alias Pleroma.Web.RichMedia.Card
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
@@ -23,6 +24,12 @@ def render(
}
}
) do
+ card =
+ case Card.get_by_object(object) do
+ %Card{} = card_data -> StatusView.render("card.json", card_data)
+ _ -> nil
+ end
+
%{
id: id |> to_string(),
content: chat_message["content"],
@@ -34,11 +41,7 @@ def render(
chat_message["attachment"] &&
StatusView.render("attachment.json", attachment: chat_message["attachment"]),
unread: unread,
- card:
- StatusView.render(
- "card.json",
- %{embed: Pleroma.Web.RichMedia.Helpers.fetch_data_for_object(object)}
- )
+ card: card
}
|> put_idempotency_key()
end
diff --git a/lib/pleroma/web/plugs/http_security_plug.ex b/lib/pleroma/web/plugs/http_security_plug.ex
index 8d5686f77b..58f38b4b1f 100644
--- a/lib/pleroma/web/plugs/http_security_plug.ex
+++ b/lib/pleroma/web/plugs/http_security_plug.ex
@@ -3,26 +3,27 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.Plugs.HTTPSecurityPlug do
- alias Pleroma.Config
import Plug.Conn
require Logger
+ @config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
+
def init(opts), do: opts
def call(conn, _options) do
- if Config.get([:http_security, :enabled]) do
+ if @config_impl.get([:http_security, :enabled]) do
conn
|> merge_resp_headers(headers())
- |> maybe_send_sts_header(Config.get([:http_security, :sts]))
+ |> maybe_send_sts_header(@config_impl.get([:http_security, :sts]))
else
conn
end
end
def primary_frontend do
- with %{"name" => frontend} <- Config.get([:frontends, :primary]),
- available <- Config.get([:frontends, :available]),
+ with %{"name" => frontend} <- @config_impl.get([:frontends, :primary]),
+ available <- @config_impl.get([:frontends, :available]),
%{} = primary_frontend <- Map.get(available, frontend) do
{:ok, primary_frontend}
end
@@ -37,8 +38,8 @@ def custom_http_frontend_headers do
end
def headers do
- referrer_policy = Config.get([:http_security, :referrer_policy])
- report_uri = Config.get([:http_security, :report_uri])
+ referrer_policy = @config_impl.get([:http_security, :referrer_policy])
+ report_uri = @config_impl.get([:http_security, :report_uri])
custom_http_frontend_headers = custom_http_frontend_headers()
headers = [
@@ -87,11 +88,11 @@ def headers do
@csp_start [Enum.join(static_csp_rules, ";") <> ";"]
defp csp_string do
- scheme = Config.get([Pleroma.Web.Endpoint, :url])[:scheme]
+ scheme = @config_impl.get([Pleroma.Web.Endpoint, :url])[:scheme]
static_url = Pleroma.Web.Endpoint.static_url()
websocket_url = Pleroma.Web.Endpoint.websocket_url()
- report_uri = Config.get([:http_security, :report_uri])
- sentry_dsn = Config.get([:frontend_configurations, :soapbox_fe, "sentryDsn"])
+ report_uri = @config_impl.get([:http_security, :report_uri])
+ sentry_dsn = @config_impl.get([:frontend_configurations, :soapbox_fe, "sentryDsn"])
img_src = "img-src 'self' data: blob:"
media_src = "media-src 'self'"
@@ -99,8 +100,8 @@ defp csp_string do
# Strict multimedia CSP enforcement only when MediaProxy is enabled
{img_src, media_src, connect_src} =
- if Config.get([:media_proxy, :enabled]) &&
- !Config.get([:media_proxy, :proxy_opts, :redirect_on_failure]) do
+ if @config_impl.get([:media_proxy, :enabled]) &&
+ !@config_impl.get([:media_proxy, :proxy_opts, :redirect_on_failure]) do
sources = build_csp_multimedia_source_list()
{
@@ -117,7 +118,7 @@ defp csp_string do
end
connect_src =
- if Config.get(:env) == :dev do
+ if @config_impl.get([:env]) == :dev do
[connect_src, " http://localhost:3035/"]
else
connect_src
@@ -131,10 +132,14 @@ defp csp_string do
end
script_src =
- if Config.get(:env) == :dev do
- "script-src 'self' 'unsafe-eval'"
+ if @config_impl.get([:http_security, :allow_unsafe_eval]) do
+ if @config_impl.get([:env]) == :dev do
+ "script-src 'self' 'unsafe-eval'"
+ else
+ "script-src 'self' 'wasm-unsafe-eval'"
+ end
else
- "script-src 'self' 'wasm-unsafe-eval'"
+ "script-src 'self'"
end
report = if report_uri, do: ["report-uri ", report_uri, ";report-to csp-endpoint"]
@@ -170,11 +175,11 @@ defp build_csp_param_from_whitelist(url), do: url
defp build_csp_multimedia_source_list do
media_proxy_whitelist =
[:media_proxy, :whitelist]
- |> Config.get()
+ |> @config_impl.get()
|> build_csp_from_whitelist([])
- captcha_method = Config.get([Pleroma.Captcha, :method])
- captcha_endpoint = Config.get([captcha_method, :endpoint])
+ captcha_method = @config_impl.get([Pleroma.Captcha, :method])
+ captcha_endpoint = @config_impl.get([captcha_method, :endpoint])
map_tile_server_endpoint = map_tile_server()
@@ -184,7 +189,7 @@ defp build_csp_multimedia_source_list do
[Pleroma.Upload, :base_url],
[Pleroma.Uploaders.S3, :public_endpoint]
]
- |> Enum.map(&Config.get/1)
+ |> Enum.map(&@config_impl.get/1)
[captcha_endpoint | base_endpoints]
|> Enum.map(&build_csp_param/1)
@@ -195,7 +200,7 @@ defp build_csp_multimedia_source_list do
defp map_tile_server do
with tile_server when is_binary(tile_server) <-
- Config.get([:frontend_configurations, :soapbox_fe, "tileServer"]),
+ @config_impl.get([:frontend_configurations, :soapbox_fe, "tileServer"]),
%{host: host} <- URI.parse(tile_server) do
["*.#{host}"]
else
@@ -222,7 +227,7 @@ defp build_csp_param(url) when is_binary(url) do
end
def warn_if_disabled do
- unless Config.get([:http_security, :enabled]) do
+ unless @config_impl.get([:http_security, :enabled]) do
Logger.warning("
.i;;;;i.
iYcviii;vXY:
@@ -267,8 +272,8 @@ def warn_if_disabled do
end
defp maybe_send_sts_header(conn, true) do
- max_age_sts = Config.get([:http_security, :sts_max_age])
- max_age_ct = Config.get([:http_security, :ct_max_age])
+ max_age_sts = @config_impl.get([:http_security, :sts_max_age])
+ max_age_ct = @config_impl.get([:http_security, :ct_max_age])
merge_resp_headers(conn, [
{"strict-transport-security", "max-age=#{max_age_sts}; includeSubDomains"},
diff --git a/lib/pleroma/web/plugs/http_signature_plug.ex b/lib/pleroma/web/plugs/http_signature_plug.ex
index ed193225a4..6bf2dd432a 100644
--- a/lib/pleroma/web/plugs/http_signature_plug.ex
+++ b/lib/pleroma/web/plugs/http_signature_plug.ex
@@ -3,14 +3,22 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.Plugs.HTTPSignaturePlug do
+ alias Pleroma.Helpers.InetHelper
+
import Plug.Conn
import Phoenix.Controller, only: [get_format: 1, text: 2]
- alias Pleroma.Config
alias Pleroma.Web.ActivityPub.MRF
require Logger
+ @config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
+ @http_signatures_impl Application.compile_env(
+ :pleroma,
+ [__MODULE__, :http_signatures_impl],
+ HTTPSignatures
+ )
+
def init(options) do
options
end
@@ -39,7 +47,7 @@ defp validate_signature(conn, request_target) do
|> put_req_header("(request-target)", request_target)
|> put_req_header("@request-target", request_target)
- HTTPSignatures.validate_conn(conn)
+ @http_signatures_impl.validate_conn(conn)
end
defp validate_signature(conn) do
@@ -105,28 +113,30 @@ defp has_signature_header?(conn) do
defp maybe_require_signature(%{assigns: %{valid_signature: true}} = conn), do: conn
- defp maybe_require_signature(conn) do
- cond do
- get_ip(conn) in Config.get([:instance, :trusted_unsigned], []) ->
- conn
- |> assign(:valid_signature, true)
- |> assign(:actor_id, Pleroma.Web.ActivityPub.Relay.ap_id())
+ defp maybe_require_signature(%{remote_ip: remote_ip} = conn) do
+ if @config_impl.get([:activitypub, :authorized_fetch_mode], false) do
+ exceptions =
+ @config_impl.get([:activitypub, :authorized_fetch_mode_exceptions], [])
+ |> Enum.map(&InetHelper.parse_cidr/1)
- Pleroma.Config.get([:activitypub, :authorized_fetch_mode], false) ->
+ if Enum.any?(exceptions, fn x -> InetCidr.contains?(x, remote_ip) end) do
+ conn
+ else
conn
|> put_status(:unauthorized)
|> text("Request not signed")
|> halt()
-
- true ->
- conn
+ end
+ else
+ conn
end
end
defp maybe_filter_requests(%{halted: true} = conn), do: conn
defp maybe_filter_requests(conn) do
- if Pleroma.Config.get([:activitypub, :authorized_fetch_mode], false) do
+ if @config_impl.get([:activitypub, :authorized_fetch_mode], false) and
+ conn.assigns[:actor_id] do
%{host: host} = URI.parse(conn.assigns.actor_id)
if MRF.subdomain_match?(rejected_domains(), host) do
@@ -142,26 +152,8 @@ defp maybe_filter_requests(conn) do
end
defp rejected_domains do
- Config.get([:instance, :rejected_instances])
+ @config_impl.get([:instance, :rejected_instances])
|> Pleroma.Web.ActivityPub.MRF.instance_list_from_tuples()
|> Pleroma.Web.ActivityPub.MRF.subdomains_regex()
end
-
- defp get_ip(conn) do
- forwarded_for =
- conn
- |> Plug.Conn.get_req_header("x-forwarded-for")
- |> List.first()
-
- if forwarded_for do
- forwarded_for
- |> String.split(",")
- |> Enum.map(&String.trim/1)
- |> List.first()
- else
- conn.remote_ip
- |> :inet_parse.ntoa()
- |> to_string()
- end
- end
end
diff --git a/lib/pleroma/web/plugs/logger_metadata_path.ex b/lib/pleroma/web/plugs/logger_metadata_path.ex
new file mode 100644
index 0000000000..a5553cfc8c
--- /dev/null
+++ b/lib/pleroma/web/plugs/logger_metadata_path.ex
@@ -0,0 +1,12 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.Plugs.LoggerMetadataPath do
+ def init(opts), do: opts
+
+ def call(conn, _) do
+ Logger.metadata(path: conn.request_path)
+ conn
+ end
+end
diff --git a/lib/pleroma/web/plugs/logger_metadata_user.ex b/lib/pleroma/web/plugs/logger_metadata_user.ex
new file mode 100644
index 0000000000..6a5c0041de
--- /dev/null
+++ b/lib/pleroma/web/plugs/logger_metadata_user.ex
@@ -0,0 +1,18 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.Plugs.LoggerMetadataUser do
+ alias Pleroma.User
+
+ def init(opts), do: opts
+
+ def call(%{assigns: %{user: user = %User{}}} = conn, _) do
+ Logger.metadata(user: user.nickname)
+ conn
+ end
+
+ def call(conn, _) do
+ conn
+ end
+end
diff --git a/lib/pleroma/web/plugs/remote_ip.ex b/lib/pleroma/web/plugs/remote_ip.ex
index 9f733a96fe..3a4bffb50d 100644
--- a/lib/pleroma/web/plugs/remote_ip.ex
+++ b/lib/pleroma/web/plugs/remote_ip.ex
@@ -8,6 +8,7 @@ defmodule Pleroma.Web.Plugs.RemoteIp do
"""
alias Pleroma.Config
+ alias Pleroma.Helpers.InetHelper
import Plug.Conn
@behaviour Plug
@@ -30,19 +31,8 @@ defp remote_ip_opts do
proxies =
Config.get([__MODULE__, :proxies], [])
|> Enum.concat(reserved)
- |> Enum.map(&maybe_add_cidr/1)
+ |> Enum.map(&InetHelper.parse_cidr/1)
{headers, proxies}
end
-
- defp maybe_add_cidr(proxy) when is_binary(proxy) do
- proxy =
- cond do
- "/" in String.codepoints(proxy) -> proxy
- InetCidr.v4?(InetCidr.parse_address!(proxy)) -> proxy <> "/32"
- InetCidr.v6?(InetCidr.parse_address!(proxy)) -> proxy <> "/128"
- end
-
- InetCidr.parse_cidr!(proxy, true)
- end
end
diff --git a/lib/pleroma/web/rich_media/backfill.ex b/lib/pleroma/web/rich_media/backfill.ex
new file mode 100644
index 0000000000..4ec50e1329
--- /dev/null
+++ b/lib/pleroma/web/rich_media/backfill.ex
@@ -0,0 +1,101 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.RichMedia.Backfill do
+ alias Pleroma.Web.RichMedia.Card
+ alias Pleroma.Web.RichMedia.Parser
+ alias Pleroma.Web.RichMedia.Parser.TTL
+ alias Pleroma.Workers.RichMediaExpirationWorker
+
+ require Logger
+
+ @backfiller Pleroma.Config.get([__MODULE__, :provider], Pleroma.Web.RichMedia.Backfill.Task)
+ @cachex Pleroma.Config.get([:cachex, :provider], Cachex)
+ @max_attempts 3
+ @retry 5_000
+
+ def start(%{url: url} = args) when is_binary(url) do
+ url_hash = Card.url_to_hash(url)
+
+ args =
+ args
+ |> Map.put(:attempt, 1)
+ |> Map.put(:url_hash, url_hash)
+
+ @backfiller.run(args)
+ end
+
+ def run(%{url: url, url_hash: url_hash, attempt: attempt} = args)
+ when attempt <= @max_attempts do
+ case Parser.parse(url) do
+ {:ok, fields} ->
+ {:ok, card} = Card.create(url, fields)
+
+ maybe_schedule_expiration(url, fields)
+
+ if Map.has_key?(args, :activity_id) do
+ stream_update(args)
+ end
+
+ warm_cache(url_hash, card)
+
+ {:error, {:invalid_metadata, fields}} ->
+ Logger.debug("Rich media incomplete or invalid metadata for #{url}: #{inspect(fields)}")
+ negative_cache(url_hash)
+
+ {:error, :body_too_large} ->
+ Logger.error("Rich media error for #{url}: :body_too_large")
+ negative_cache(url_hash)
+
+ {:error, {:content_type, type}} ->
+ Logger.debug("Rich media error for #{url}: :content_type is #{type}")
+ negative_cache(url_hash)
+
+ e ->
+ Logger.debug("Rich media error for #{url}: #{inspect(e)}")
+
+ :timer.sleep(@retry * attempt)
+
+ run(%{args | attempt: attempt + 1})
+ end
+ end
+
+ def run(%{url: url, url_hash: url_hash}) do
+ Logger.debug("Rich media failure for #{url}")
+
+ negative_cache(url_hash, :timer.minutes(15))
+ end
+
+ defp maybe_schedule_expiration(url, fields) do
+ case TTL.process(fields, url) do
+ {:ok, ttl} when is_number(ttl) ->
+ timestamp = DateTime.from_unix!(ttl)
+
+ RichMediaExpirationWorker.new(%{"url" => url}, scheduled_at: timestamp)
+ |> Oban.insert()
+
+ _ ->
+ :ok
+ end
+ end
+
+ defp stream_update(%{activity_id: activity_id}) do
+ Pleroma.Activity.get_by_id(activity_id)
+ |> Pleroma.Activity.normalize()
+ |> Pleroma.Web.ActivityPub.ActivityPub.stream_out()
+ end
+
+ defp warm_cache(key, val), do: @cachex.put(:rich_media_cache, key, val)
+ defp negative_cache(key, ttl \\ nil), do: @cachex.put(:rich_media_cache, key, nil, ttl: ttl)
+end
+
+defmodule Pleroma.Web.RichMedia.Backfill.Task do
+ alias Pleroma.Web.RichMedia.Backfill
+
+ def run(args) do
+ Task.Supervisor.start_child(Pleroma.TaskSupervisor, Backfill, :run, [args],
+ name: {:global, {:rich_media, args.url_hash}}
+ )
+ end
+end
diff --git a/lib/pleroma/web/rich_media/card.ex b/lib/pleroma/web/rich_media/card.ex
new file mode 100644
index 0000000000..36a1ae44ae
--- /dev/null
+++ b/lib/pleroma/web/rich_media/card.ex
@@ -0,0 +1,157 @@
+defmodule Pleroma.Web.RichMedia.Card do
+ use Ecto.Schema
+ import Ecto.Changeset
+ import Ecto.Query
+
+ alias Pleroma.Activity
+ alias Pleroma.HTML
+ alias Pleroma.Object
+ alias Pleroma.Repo
+ alias Pleroma.Web.RichMedia.Backfill
+ alias Pleroma.Web.RichMedia.Parser
+
+ @cachex Pleroma.Config.get([:cachex, :provider], Cachex)
+ @config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
+
+ @type t :: %__MODULE__{}
+
+ schema "rich_media_card" do
+ field(:url_hash, :binary)
+ field(:fields, :map)
+
+ timestamps()
+ end
+
+ @doc false
+ def changeset(card, attrs) do
+ card
+ |> cast(attrs, [:url_hash, :fields])
+ |> validate_required([:url_hash, :fields])
+ |> unique_constraint(:url_hash)
+ end
+
+ @spec create(String.t(), map()) :: {:ok, t()}
+ def create(url, fields) do
+ url_hash = url_to_hash(url)
+
+ fields = Map.put_new(fields, "url", url)
+
+ %__MODULE__{}
+ |> changeset(%{url_hash: url_hash, fields: fields})
+ |> Repo.insert(on_conflict: {:replace, [:fields]}, conflict_target: :url_hash)
+ end
+
+ @spec delete(String.t()) :: {:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()} | :ok
+ def delete(url) do
+ url_hash = url_to_hash(url)
+ @cachex.del(:rich_media_cache, url_hash)
+
+ case get_by_url(url) do
+ %__MODULE__{} = card -> Repo.delete(card)
+ nil -> :ok
+ end
+ end
+
+ @spec get_by_url(String.t() | nil) :: t() | nil | :error
+ def get_by_url(url) when is_binary(url) do
+ if @config_impl.get([:rich_media, :enabled]) do
+ url_hash = url_to_hash(url)
+
+ @cachex.fetch!(:rich_media_cache, url_hash, fn _ ->
+ result =
+ __MODULE__
+ |> where(url_hash: ^url_hash)
+ |> Repo.one()
+
+ case result do
+ %__MODULE__{} = card -> {:commit, card}
+ _ -> {:ignore, nil}
+ end
+ end)
+ else
+ :error
+ end
+ end
+
+ def get_by_url(nil), do: nil
+
+ @spec get_or_backfill_by_url(String.t(), map()) :: t() | nil
+ def get_or_backfill_by_url(url, backfill_opts \\ %{}) do
+ case get_by_url(url) do
+ %__MODULE__{} = card ->
+ card
+
+ nil ->
+ backfill_opts = Map.put(backfill_opts, :url, url)
+
+ Backfill.start(backfill_opts)
+
+ nil
+
+ :error ->
+ nil
+ end
+ end
+
+ @spec get_by_object(Object.t()) :: t() | nil | :error
+ def get_by_object(object) do
+ case HTML.extract_first_external_url_from_object(object) do
+ nil -> nil
+ url -> get_or_backfill_by_url(url)
+ end
+ end
+
+ @spec get_by_activity(Activity.t()) :: t() | nil | :error
+ # Fake/Draft activity
+ def get_by_activity(%Activity{id: "pleroma:fakeid"} = activity) do
+ with %Object{} = object <- Object.normalize(activity, fetch: false),
+ url when not is_nil(url) <- HTML.extract_first_external_url_from_object(object) do
+ case get_by_url(url) do
+ # Cache hit
+ %__MODULE__{} = card ->
+ card
+
+ # Cache miss, but fetch for rendering the Draft
+ _ ->
+ with {:ok, fields} <- Parser.parse(url),
+ {:ok, card} <- create(url, fields) do
+ card
+ else
+ _ -> nil
+ end
+ end
+ else
+ _ ->
+ nil
+ end
+ end
+
+ def get_by_activity(activity) do
+ with %Object{} = object <- Object.normalize(activity, fetch: false),
+ {_, nil} <- {:cached, get_cached_url(object, activity.id)} do
+ nil
+ else
+ {:cached, url} ->
+ get_or_backfill_by_url(url, %{activity_id: activity.id})
+
+ _ ->
+ :error
+ end
+ end
+
+ @spec url_to_hash(String.t()) :: String.t()
+ def url_to_hash(url) do
+ :crypto.hash(:sha256, url) |> Base.encode16(case: :lower)
+ end
+
+ defp get_cached_url(object, activity_id) do
+ key = "URL|#{activity_id}"
+
+ @cachex.fetch!(:scrubber_cache, key, fn _ ->
+ url = HTML.extract_first_external_url_from_object(object)
+ Activity.HTML.add_cache_key_for(activity_id, key)
+
+ {:commit, url}
+ end)
+ end
+end
diff --git a/lib/pleroma/web/rich_media/helpers.ex b/lib/pleroma/web/rich_media/helpers.ex
index e88778e863..b62c9749cd 100644
--- a/lib/pleroma/web/rich_media/helpers.ex
+++ b/lib/pleroma/web/rich_media/helpers.ex
@@ -3,21 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.RichMedia.Helpers do
- alias Pleroma.Activity
- alias Pleroma.HTML
- alias Pleroma.Object
- alias Pleroma.Web.RichMedia.Parser
- alias Pleroma.Web.RichMedia.Parser.Embed
-
- @cachex Pleroma.Config.get([:cachex, :provider], Cachex)
-
- @config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
-
- @options [
- pool: :media,
- max_body: 2_000_000,
- recv_timeout: 2_000
- ]
+ alias Pleroma.Config
defp headers do
user_agent =
@@ -32,53 +18,11 @@ defp headers do
[{"user-agent", user_agent}]
end
- def fetch_data_for_object(object) do
- with true <- @config_impl.get([:rich_media, :enabled]),
- {:ok, page_url} <-
- HTML.extract_first_external_url_from_object(object),
- {:ok, %Embed{} = embed} <- Parser.parse(page_url) do
- embed
- else
- _ -> nil
- end
- end
-
- def fetch_data_for_activity(%Activity{data: %{"type" => "Create"}} = activity) do
- with true <- @config_impl.get([:rich_media, :enabled]),
- %Object{} = object <- Object.normalize(activity, fetch: false) do
- if object.data["fake"] do
- fetch_data_for_object(object)
- else
- key = "URL|#{activity.id}"
-
- @cachex.fetch!(:scrubber_cache, key, fn _ ->
- result = fetch_data_for_object(object)
-
- with %Embed{} <- result do
- Activity.HTML.add_cache_key_for(activity.id, key)
- {:commit, result}
- else
- _ ->
- {:ignore, nil}
- end
- end)
- end
- else
- _ -> nil
- end
- end
-
- def fetch_data_for_activity(_), do: %{}
-
- def oembed_get(url) do
- Pleroma.HTTP.get(url, headers(), @options)
- end
-
def rich_media_get(url) do
headers = headers()
head_check =
- case Pleroma.HTTP.head(url, headers, @options) do
+ case Pleroma.HTTP.head(url, headers, http_options()) do
# If the HEAD request didn't reach the server for whatever reason,
# we assume the GET that comes right after won't either
{:error, _} = e ->
@@ -93,7 +37,7 @@ def rich_media_get(url) do
:ok
end
- with :ok <- head_check, do: Pleroma.HTTP.get(url, headers, @options)
+ with :ok <- head_check, do: Pleroma.HTTP.get(url, headers, http_options())
end
defp check_content_type(headers) do
@@ -109,12 +53,13 @@ defp check_content_type(headers) do
end
end
- @max_body @options[:max_body]
defp check_content_length(headers) do
+ max_body = Keyword.get(http_options(), :max_body)
+
case List.keyfind(headers, "content-length", 0) do
{_, maybe_content_length} ->
case Integer.parse(maybe_content_length) do
- {content_length, ""} when content_length <= @max_body -> :ok
+ {content_length, ""} when content_length <= max_body -> :ok
{_, ""} -> {:error, :body_too_large}
_ -> :ok
end
@@ -123,4 +68,11 @@ defp check_content_length(headers) do
:ok
end
end
+
+ defp http_options do
+ [
+ pool: :rich_media,
+ max_body: Config.get([:rich_media, :max_body], 5_000_000)
+ ]
+ end
end
diff --git a/lib/pleroma/web/rich_media/parser.ex b/lib/pleroma/web/rich_media/parser.ex
index f1fac44464..37cf29029b 100644
--- a/lib/pleroma/web/rich_media/parser.ex
+++ b/lib/pleroma/web/rich_media/parser.ex
@@ -4,170 +4,60 @@
defmodule Pleroma.Web.RichMedia.Parser do
require Logger
- alias Pleroma.Web.RichMedia.Parser.Card
- alias Pleroma.Web.RichMedia.Parser.Embed
- @cachex Pleroma.Config.get([:cachex, :provider], Cachex)
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
defp parsers do
Pleroma.Config.get([:rich_media, :parsers])
end
- def parse(nil), do: {:error, "No URL provided"}
+ def parse(nil), do: nil
@spec parse(String.t()) :: {:ok, map()} | {:error, any()}
def parse(url) do
with :ok <- validate_page_url(url),
- {:ok, data} <- get_cached_or_parse(url),
- {:ok, _} <- set_ttl_based_on_image(data, url) do
+ {:ok, data} <- parse_url(url) do
+ data = Map.put(data, "url", url)
{:ok, data}
end
end
- defp get_cached_or_parse(url) do
- case @cachex.fetch(:rich_media_cache, url, fn ->
- case parse_url(url) do
- {:ok, _} = res ->
- {:commit, res}
-
- {:error, reason} = e ->
- # Unfortunately we have to log errors here, instead of doing that
- # along with ttl setting at the bottom. Otherwise we can get log spam
- # if more than one process was waiting for the rich media card
- # while it was generated. Ideally we would set ttl here as well,
- # so we don't override it number_of_waiters_on_generation
- # times, but one, obviously, can't set ttl for not-yet-created entry
- # and Cachex doesn't support returning ttl from the fetch callback.
- log_error(url, reason)
- {:commit, e}
- end
- end) do
- {action, res} when action in [:commit, :ok] ->
- case res do
- {:ok, _data} = res ->
- res
-
- {:error, reason} = e ->
- if action == :commit, do: set_error_ttl(url, reason)
- e
- end
-
- {:error, e} ->
- {:error, {:cachex_error, e}}
- end
- end
-
- defp set_error_ttl(_url, :body_too_large), do: :ok
- defp set_error_ttl(_url, {:content_type, _}), do: :ok
-
- # The TTL is not set for the errors above, since they are unlikely to change
- # with time
-
- defp set_error_ttl(url, _reason) do
- ttl = Pleroma.Config.get([:rich_media, :failure_backoff], 60_000)
- @cachex.expire(:rich_media_cache, url, ttl)
- :ok
- end
-
- defp log_error(url, {:invalid_metadata, data}) do
- Logger.debug(fn -> "Incomplete or invalid metadata for #{url}: #{inspect(data)}" end)
- end
-
- defp log_error(url, reason) do
- Logger.warning(fn -> "Rich media error for #{url}: #{inspect(reason)}" end)
- end
-
- @doc """
- Set the rich media cache based on the expiration time of image.
-
- Adopt behaviour `Pleroma.Web.RichMedia.Parser.TTL`
-
- ## Example
-
- defmodule MyModule do
- @behaviour Pleroma.Web.RichMedia.Parser.TTL
- def ttl(data, url) do
- image_url = Map.get(data, :image)
- # do some parsing in the url and get the ttl of the image
- # and return ttl is unix time
- parse_ttl_from_url(image_url)
- end
- end
-
- Define the module in the config
-
- config :pleroma, :rich_media,
- ttl_setters: [MyModule]
- """
- @spec set_ttl_based_on_image(map(), String.t()) ::
- {:ok, integer() | :noop} | {:error, :no_key}
- def set_ttl_based_on_image(data, url) do
- case get_ttl_from_image(data, url) do
- ttl when is_number(ttl) ->
- ttl = ttl * 1000
-
- case @cachex.expire_at(:rich_media_cache, url, ttl) do
- {:ok, true} -> {:ok, ttl}
- {:ok, false} -> {:error, :no_key}
- end
-
- _ ->
- {:ok, :noop}
- end
- end
-
- defp get_ttl_from_image(data, url) do
- [:rich_media, :ttl_setters]
- |> Pleroma.Config.get()
- |> Enum.reduce({:ok, nil}, fn
- module, {:ok, _ttl} ->
- module.ttl(data, url)
-
- _, error ->
- error
- end)
- end
-
- def parse_url(url) do
- case maybe_fetch_oembed(url) do
- {:ok, %Embed{} = embed} -> {:ok, embed}
- _ -> fetch_document(url)
- end
- end
-
- defp maybe_fetch_oembed(url) do
- with true <- Pleroma.Config.get([:rich_media, :oembed_providers_enabled]),
- {:ok, oembed_url} <- OEmbedProviders.oembed_url(url),
- {:ok, %Tesla.Env{body: json}} <-
- Pleroma.Web.RichMedia.Helpers.oembed_get(oembed_url),
- {:ok, data} <- Jason.decode(json),
- embed <- %Embed{url: url, oembed: data},
- {:ok, %Card{}} <- Card.validate(embed) do
- {:ok, embed}
- else
- {:error, error} -> {:error, error}
- error -> {:error, error}
- end
- end
-
- defp fetch_document(url) do
+ defp parse_url(url) do
with {:ok, %Tesla.Env{body: html}} <- Pleroma.Web.RichMedia.Helpers.rich_media_get(url),
- {:ok, html} <- Floki.parse_document(html),
- %Embed{} = embed <- parse_embed(html, url) do
- {:ok, embed}
- else
- {:error, error} -> {:error, error}
- error -> {:error, error}
+ {:ok, html} <- Floki.parse_document(html) do
+ html
+ |> maybe_parse()
+ |> clean_parsed_data()
+ |> check_parsed_data()
end
end
- defp parse_embed(html, url) do
- Enum.reduce(parsers(), %Embed{url: url}, fn parser, acc ->
- parser.parse(html, acc)
+ defp maybe_parse(html) do
+ Enum.reduce_while(parsers(), %{}, fn parser, acc ->
+ case parser.parse(html, acc) do
+ data when data != %{} -> {:halt, data}
+ _ -> {:cont, acc}
+ end
end)
end
+ defp check_parsed_data(%{"title" => title} = data)
+ when is_binary(title) and title != "" do
+ {:ok, data}
+ end
+
+ defp check_parsed_data(data) do
+ {:error, {:invalid_metadata, data}}
+ end
+
+ defp clean_parsed_data(data) do
+ data
+ |> Enum.reject(fn {key, val} ->
+ not match?({:ok, _}, Jason.encode(%{key => val}))
+ end)
+ |> Map.new()
+ end
+
@spec validate_page_url(URI.t() | binary()) :: :ok | :error
defp validate_page_url(page_url) when is_binary(page_url) do
validate_tld = @config_impl.get([Pleroma.Formatter, :validate_tld])
diff --git a/lib/pleroma/web/rich_media/parser/card.ex b/lib/pleroma/web/rich_media/parser/card.ex
deleted file mode 100644
index 5288376143..0000000000
--- a/lib/pleroma/web/rich_media/parser/card.ex
+++ /dev/null
@@ -1,148 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2021 Pleroma Authors
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Web.RichMedia.Parser.Card do
- alias Pleroma.Web.RichMedia.Parser.Card
- alias Pleroma.Web.RichMedia.Parser.Embed
-
- @types ["link", "photo", "video", "rich"]
-
- # https://docs.joinmastodon.org/entities/card/
- defstruct url: nil,
- title: nil,
- description: "",
- type: "link",
- author_name: "",
- author_url: "",
- provider_name: "",
- provider_url: "",
- html: "",
- width: 0,
- height: 0,
- image: nil,
- embed_url: "",
- blurhash: nil
-
- def parse(%Embed{url: url, oembed: %{"type" => type, "title" => title} = oembed} = embed)
- when type in @types and is_binary(url) do
- uri = URI.parse(url)
-
- %Card{
- url: url,
- title: title,
- description: get_description(embed),
- type: oembed["type"],
- author_name: oembed["author_name"],
- author_url: oembed["author_url"],
- provider_name: oembed["provider_name"] || uri.host,
- provider_url: oembed["provider_url"] || "#{uri.scheme}://#{uri.host}",
- html: sanitize_html(oembed["html"]),
- width: oembed["width"],
- height: oembed["height"],
- image: get_image(oembed) |> fix_uri(url) |> proxy(),
- embed_url: oembed["url"] |> fix_uri(url) |> proxy()
- }
- |> validate()
- end
-
- def parse(%Embed{url: url} = embed) when is_binary(url) do
- uri = URI.parse(url)
-
- %Card{
- url: url,
- title: get_title(embed),
- description: get_description(embed),
- type: "link",
- provider_name: uri.host,
- provider_url: "#{uri.scheme}://#{uri.host}",
- image: get_image(embed) |> fix_uri(url) |> proxy()
- }
- |> validate()
- end
-
- def parse(card), do: {:error, {:invalid_metadata, card}}
-
- defp get_title(embed) do
- case embed do
- %{meta: %{"twitter:title" => title}} when is_binary(title) and title != "" -> title
- %{meta: %{"og:title" => title}} when is_binary(title) and title != "" -> title
- %{title: title} when is_binary(title) and title != "" -> title
- _ -> nil
- end
- end
-
- defp get_description(%{meta: meta}) do
- case meta do
- %{"twitter:description" => desc} when is_binary(desc) and desc != "" -> desc
- %{"og:description" => desc} when is_binary(desc) and desc != "" -> desc
- %{"description" => desc} when is_binary(desc) and desc != "" -> desc
- _ -> ""
- end
- end
-
- defp get_image(%{meta: meta}) do
- case meta do
- %{"twitter:image" => image} when is_binary(image) and image != "" -> image
- %{"og:image" => image} when is_binary(image) and image != "" -> image
- _ -> ""
- end
- end
-
- defp get_image(%{"thumbnail_url" => image}) when is_binary(image) and image != "", do: image
- defp get_image(%{"type" => "photo", "url" => image}), do: image
- defp get_image(_), do: ""
-
- defp sanitize_html(html) do
- with {:ok, html} <- FastSanitize.Sanitizer.scrub(html, Pleroma.HTML.Scrubber.OEmbed),
- {:ok, [{"iframe", _, _}]} <- Floki.parse_fragment(html) do
- html
- else
- _ -> ""
- end
- end
-
- def to_map(%Card{} = card) do
- card
- |> Map.from_struct()
- |> stringify_keys()
- end
-
- def to_map(%{} = card), do: stringify_keys(card)
-
- defp stringify_keys(%{} = map), do: Map.new(map, fn {k, v} -> {Atom.to_string(k), v} end)
-
- def fix_uri("http://" <> _ = uri, _base_uri), do: uri
- def fix_uri("https://" <> _ = uri, _base_uri), do: uri
- def fix_uri("/" <> _ = uri, base_uri), do: URI.merge(base_uri, uri) |> URI.to_string()
- def fix_uri("", _base_uri), do: nil
-
- def fix_uri(uri, base_uri) when is_binary(uri),
- do: URI.merge(base_uri, "/#{uri}") |> URI.to_string()
-
- def fix_uri(_uri, _base_uri), do: nil
-
- defp proxy(url) when is_binary(url), do: Pleroma.Web.MediaProxy.url(url)
- defp proxy(_), do: nil
-
- def validate(%Card{type: type, html: html} = card)
- when type in ["video", "rich"] and (is_binary(html) == false or html == "") do
- card
- |> Map.put(:type, "link")
- |> validate()
- end
-
- def validate(%Card{type: type, title: title} = card)
- when type in @types and is_binary(title) and title != "" do
- {:ok, card}
- end
-
- def validate(%Embed{} = embed) do
- case Card.parse(embed) do
- {:ok, %Card{} = card} -> validate(card)
- card -> {:error, {:invalid_metadata, card}}
- end
- end
-
- def validate(card), do: {:error, {:invalid_metadata, card}}
-end
diff --git a/lib/pleroma/web/rich_media/parser/embed.ex b/lib/pleroma/web/rich_media/parser/embed.ex
deleted file mode 100644
index f06756b1e9..0000000000
--- a/lib/pleroma/web/rich_media/parser/embed.ex
+++ /dev/null
@@ -1,10 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2021 Pleroma Authors
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Web.RichMedia.Parser.Embed do
- @moduledoc """
- Represents embedded content, including scraped markup and OEmbed.
- """
- defstruct url: nil, title: nil, meta: nil, oembed: nil
-end
diff --git a/lib/pleroma/web/rich_media/parser/meta_tags.ex b/lib/pleroma/web/rich_media/parser/meta_tags.ex
deleted file mode 100644
index ea544b1100..0000000000
--- a/lib/pleroma/web/rich_media/parser/meta_tags.ex
+++ /dev/null
@@ -1,39 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2021 Pleroma Authors
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Web.RichMedia.Parser.MetaTags do
- @doc """
- Parses a `Floki.html_tree/0` and returns a map of raw `` tag values.
- """
- @spec parse(html_tree :: Floki.html_tree()) :: map()
- def parse(html_tree) do
- html_tree
- |> Floki.find("meta")
- |> Enum.reduce(%{}, fn html_node, acc ->
- case parse_node(html_node) do
- {:ok, {name, content}} -> Map.put(acc, name, content)
- _ -> acc
- end
- end)
- |> clean_data()
- end
-
- defp parse_node({_tag, attrs, _children}) when is_list(attrs) do
- case Map.new(attrs) do
- %{"name" => name, "content" => content} -> {:ok, {name, content}}
- %{"property" => name, "content" => content} -> {:ok, {name, content}}
- _ -> {:error, :invalid_meta_tag}
- end
- end
-
- defp parse_node(_), do: {:error, :invalid_meta_tag}
-
- defp clean_data(data) do
- data
- |> Enum.reject(fn {key, val} ->
- not match?({:ok, _}, Jason.encode(%{key => val}))
- end)
- |> Map.new()
- end
-end
diff --git a/lib/pleroma/web/rich_media/parser/ttl.ex b/lib/pleroma/web/rich_media/parser/ttl.ex
index b51298bd83..7e56375ff7 100644
--- a/lib/pleroma/web/rich_media/parser/ttl.ex
+++ b/lib/pleroma/web/rich_media/parser/ttl.ex
@@ -4,4 +4,17 @@
defmodule Pleroma.Web.RichMedia.Parser.TTL do
@callback ttl(map(), String.t()) :: integer() | nil
+
+ @spec process(map(), String.t()) :: {:ok, integer() | nil}
+ def process(data, url) do
+ [:rich_media, :ttl_setters]
+ |> Pleroma.Config.get()
+ |> Enum.reduce_while({:ok, nil}, fn
+ module, acc ->
+ case module.ttl(data, url) do
+ ttl when is_number(ttl) -> {:halt, {:ok, ttl}}
+ _ -> {:cont, acc}
+ end
+ end)
+ end
end
diff --git a/lib/pleroma/web/rich_media/parser/ttl/aws_signed_url.ex b/lib/pleroma/web/rich_media/parser/ttl/aws_signed_url.ex
index a0d567c428..1172a120ad 100644
--- a/lib/pleroma/web/rich_media/parser/ttl/aws_signed_url.ex
+++ b/lib/pleroma/web/rich_media/parser/ttl/aws_signed_url.ex
@@ -7,7 +7,7 @@ defmodule Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl do
@impl true
def ttl(data, _url) do
- image = Map.get(data, :image)
+ image = Map.get(data, "image")
if aws_signed_url?(image) do
image
@@ -15,14 +15,15 @@ def ttl(data, _url) do
|> format_query_params()
|> get_expiration_timestamp()
else
- {:error, "Not aws signed url #{inspect(image)}"}
+ nil
end
end
defp aws_signed_url?(image) when is_binary(image) and image != "" do
%URI{host: host, query: query} = URI.parse(image)
- String.contains?(host, "amazonaws.com") and String.contains?(query, "X-Amz-Expires")
+ is_binary(host) and String.contains?(host, "amazonaws.com") and
+ is_binary(query) and String.contains?(query, "X-Amz-Expires")
end
defp aws_signed_url?(_), do: nil
diff --git a/lib/pleroma/web/rich_media/parser/ttl/opengraph.ex b/lib/pleroma/web/rich_media/parser/ttl/opengraph.ex
new file mode 100644
index 0000000000..b068896694
--- /dev/null
+++ b/lib/pleroma/web/rich_media/parser/ttl/opengraph.ex
@@ -0,0 +1,20 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.RichMedia.Parser.TTL.Opengraph do
+ @behaviour Pleroma.Web.RichMedia.Parser.TTL
+
+ @impl true
+ def ttl(%{"ttl" => ttl_string}, _url) when is_binary(ttl_string) do
+ try do
+ ttl = String.to_integer(ttl_string)
+ now = DateTime.utc_now() |> DateTime.to_unix()
+ now + ttl
+ rescue
+ _ -> nil
+ end
+ end
+
+ def ttl(_, _), do: nil
+end
diff --git a/lib/pleroma/web/rich_media/parsers/meta_tags_parser.ex b/lib/pleroma/web/rich_media/parsers/meta_tags_parser.ex
new file mode 100644
index 0000000000..320a5f515d
--- /dev/null
+++ b/lib/pleroma/web/rich_media/parsers/meta_tags_parser.ex
@@ -0,0 +1,46 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.RichMedia.Parsers.MetaTagsParser do
+ def parse(data, html, prefix, key_name, value_name \\ "content") do
+ html
+ |> get_elements(key_name, prefix)
+ |> Enum.reduce(data, fn el, acc ->
+ attributes = normalize_attributes(el, prefix, key_name, value_name)
+
+ Map.merge(acc, attributes)
+ end)
+ |> maybe_put_title(html)
+ end
+
+ defp get_elements(html, key_name, prefix) do
+ html |> Floki.find("meta[#{key_name}^='#{prefix}:']")
+ end
+
+ defp normalize_attributes(html_node, prefix, key_name, value_name) do
+ {_tag, attributes, _children} = html_node
+
+ data =
+ Map.new(attributes, fn {name, value} ->
+ {name, String.trim_leading(value, "#{prefix}:")}
+ end)
+
+ %{data[key_name] => data[value_name]}
+ end
+
+ defp maybe_put_title(%{"title" => _} = meta, _), do: meta
+
+ defp maybe_put_title(meta, html) when meta != %{} do
+ case get_page_title(html) do
+ "" -> meta
+ title -> Map.put_new(meta, "title", title)
+ end
+ end
+
+ defp maybe_put_title(meta, _), do: meta
+
+ defp get_page_title(html) do
+ Floki.find(html, "html head title") |> List.first() |> Floki.text()
+ end
+end
diff --git a/lib/pleroma/web/rich_media/parsers/o_embed.ex b/lib/pleroma/web/rich_media/parsers/o_embed.ex
index 467711cbbe..0f303176ce 100644
--- a/lib/pleroma/web/rich_media/parsers/o_embed.ex
+++ b/lib/pleroma/web/rich_media/parsers/o_embed.ex
@@ -3,18 +3,13 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.RichMedia.Parsers.OEmbed do
- def parse(html, data) do
+ def parse(html, _data) do
with elements = [_ | _] <- get_discovery_data(html),
oembed_url when is_binary(oembed_url) <- get_oembed_url(elements),
{:ok, oembed_data = %{"html" => html}} <- get_oembed_data(oembed_url) do
- data
- |> Map.put(
- :oembed,
- oembed_data
- |> Map.put("html", Pleroma.HTML.filter_tags(html))
- )
+ %{oembed_data | "html" => Pleroma.HTML.filter_tags(html)}
else
- _e -> data
+ _e -> %{}
end
end
@@ -27,7 +22,7 @@ defp get_oembed_url([{"link", attributes, _children} | _]) do
end
defp get_oembed_data(url) do
- with {:ok, %Tesla.Env{body: json}} <- Pleroma.Web.RichMedia.Helpers.oembed_get(url) do
+ with {:ok, %Tesla.Env{body: json}} <- Pleroma.Web.RichMedia.Helpers.rich_media_get(url) do
Jason.decode(json)
end
end
diff --git a/lib/pleroma/web/rich_media/parsers/ogp.ex b/lib/pleroma/web/rich_media/parsers/ogp.ex
index 6c72995345..b7f2b42168 100644
--- a/lib/pleroma/web/rich_media/parsers/ogp.ex
+++ b/lib/pleroma/web/rich_media/parsers/ogp.ex
@@ -4,5 +4,7 @@
defmodule Pleroma.Web.RichMedia.Parsers.OGP do
@deprecated "OGP parser is deprecated. Use TwitterCard instead."
- def parse(_html, data), do: data
+ def parse(_html, _data) do
+ %{}
+ end
end
diff --git a/lib/pleroma/web/rich_media/parsers/twitter_card.ex b/lib/pleroma/web/rich_media/parsers/twitter_card.ex
index 7b8cc33d5c..cc653729db 100644
--- a/lib/pleroma/web/rich_media/parsers/twitter_card.ex
+++ b/lib/pleroma/web/rich_media/parsers/twitter_card.ex
@@ -3,22 +3,13 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.RichMedia.Parsers.TwitterCard do
- alias Pleroma.Web.RichMedia.Parser.MetaTags
+ alias Pleroma.Web.RichMedia.Parsers.MetaTagsParser
- @spec parse(Floki.html_tree(), map()) :: map()
+ @spec parse(list(), map()) :: map()
def parse(html, data) do
data
- |> Map.put(:title, get_page_title(html))
- |> Map.put(:meta, MetaTags.parse(html))
- end
-
- def get_page_title(html) do
- with [node | _] <- Floki.find(html, "html head title"),
- title when is_binary(title) and title != "" <- Floki.text(node),
- true <- String.valid?(title) do
- title
- else
- _ -> nil
- end
+ |> MetaTagsParser.parse(html, "og", "property")
+ |> MetaTagsParser.parse(html, "twitter", "name")
+ |> MetaTagsParser.parse(html, "twitter", "property")
end
end
diff --git a/lib/pleroma/web/router.ex b/lib/pleroma/web/router.ex
index c52bca06b2..7190d2e157 100644
--- a/lib/pleroma/web/router.ex
+++ b/lib/pleroma/web/router.ex
@@ -29,6 +29,7 @@ defmodule Pleroma.Web.Router do
pipeline :browser do
plug(:accepts, ["html"])
plug(:fetch_session)
+ plug(Pleroma.Web.Plugs.LoggerMetadataUser)
end
pipeline :oauth do
@@ -67,12 +68,14 @@ defmodule Pleroma.Web.Router do
plug(:fetch_session)
plug(:authenticate)
plug(OpenApiSpex.Plug.PutApiSpec, module: Pleroma.Web.ApiSpec)
+ plug(Pleroma.Web.Plugs.LoggerMetadataUser)
end
pipeline :no_auth_or_privacy_expectations_api do
plug(:base_api)
plug(:after_auth)
plug(Pleroma.Web.Plugs.IdempotencyPlug)
+ plug(Pleroma.Web.Plugs.LoggerMetadataUser)
end
# Pipeline for app-related endpoints (no user auth checks — app-bound tokens must be supported)
@@ -83,12 +86,14 @@ defmodule Pleroma.Web.Router do
pipeline :api do
plug(:expect_public_instance_or_user_authentication)
plug(:no_auth_or_privacy_expectations_api)
+ plug(Pleroma.Web.Plugs.LoggerMetadataUser)
end
pipeline :authenticated_api do
plug(:expect_user_authentication)
plug(:no_auth_or_privacy_expectations_api)
plug(Pleroma.Web.Plugs.EnsureAuthenticatedPlug)
+ plug(Pleroma.Web.Plugs.LoggerMetadataUser)
end
pipeline :admin_api do
@@ -99,6 +104,7 @@ defmodule Pleroma.Web.Router do
plug(Pleroma.Web.Plugs.EnsureAuthenticatedPlug)
plug(Pleroma.Web.Plugs.UserIsStaffPlug)
plug(Pleroma.Web.Plugs.IdempotencyPlug)
+ plug(Pleroma.Web.Plugs.LoggerMetadataUser)
end
pipeline :require_admin do
@@ -179,6 +185,7 @@ defmodule Pleroma.Web.Router do
plug(:browser)
plug(:authenticate)
plug(Pleroma.Web.Plugs.EnsureUserTokenAssignsPlug)
+ plug(Pleroma.Web.Plugs.LoggerMetadataUser)
end
pipeline :well_known do
@@ -193,6 +200,7 @@ defmodule Pleroma.Web.Router do
pipeline :pleroma_api do
plug(:accepts, ["html", "json"])
plug(OpenApiSpex.Plug.PutApiSpec, module: Pleroma.Web.ApiSpec)
+ plug(Pleroma.Web.Plugs.LoggerMetadataUser)
end
pipeline :mailbox_preview do
@@ -929,7 +937,6 @@ defmodule Pleroma.Web.Router do
get("/statuses", StatusController, :index)
get("/statuses/:id", StatusController, :show)
get("/statuses/:id/context", StatusController, :context)
- get("/statuses/:id/card", StatusController, :card)
get("/statuses/:id/favourited_by", StatusController, :favourited_by)
get("/statuses/:id/reblogged_by", StatusController, :reblogged_by)
get("/statuses/:id/history", StatusController, :show_history)
diff --git a/lib/pleroma/webhook.ex b/lib/pleroma/webhook.ex
index b9d9c5d296..67dd5d333c 100644
--- a/lib/pleroma/webhook.ex
+++ b/lib/pleroma/webhook.ex
@@ -31,35 +31,44 @@ def get_by_type(type) do
|> Repo.all()
end
- def changeset(%__MODULE__{} = webhook, params) do
+ def changeset(%__MODULE__{} = webhook, params, opts \\ []) do
webhook
|> cast(params, [:url, :events, :enabled])
+ |> maybe_update_internal(params, opts)
|> validate_required([:url, :events])
|> unique_constraint(:url)
|> strip_events()
|> put_secret()
end
- def update_changeset(%__MODULE__{} = webhook, params \\ %{}) do
+ def update_changeset(%__MODULE__{} = webhook, params \\ %{}, opts \\ []) do
webhook
|> cast(params, [:url, :events, :enabled])
+ |> maybe_update_internal(params, opts)
|> unique_constraint(:url)
|> strip_events()
end
- def create(params) do
+ defp maybe_update_internal(webhook, params, update_internal: true) do
+ webhook
+ |> cast(params, [:internal])
+ end
+
+ defp maybe_update_internal(webhook, _params, _opts), do: webhook
+
+ def create(params, opts \\ []) do
{:ok, webhook} =
%__MODULE__{}
- |> changeset(params)
+ |> changeset(params, opts)
|> Repo.insert()
webhook
end
- def update(%__MODULE__{} = webhook, params) do
+ def update(%__MODULE__{} = webhook, params, opts \\ []) do
{:ok, webhook} =
webhook
- |> update_changeset(params)
+ |> update_changeset(params, opts)
|> Repo.update()
webhook
diff --git a/lib/pleroma/workers/attachments_cleanup_worker.ex b/lib/pleroma/workers/attachments_cleanup_worker.ex
index 4c17640537..0b570b70b0 100644
--- a/lib/pleroma/workers/attachments_cleanup_worker.ex
+++ b/lib/pleroma/workers/attachments_cleanup_worker.ex
@@ -8,7 +8,7 @@ defmodule Pleroma.Workers.AttachmentsCleanupWorker do
alias Pleroma.Object
alias Pleroma.Repo
- use Pleroma.Workers.WorkerHelper, queue: "attachments_cleanup"
+ use Pleroma.Workers.WorkerHelper, queue: "slow"
@impl Oban.Worker
def perform(%Job{
diff --git a/lib/pleroma/workers/backup_worker.ex b/lib/pleroma/workers/backup_worker.ex
index a485ddb4b4..54ac31a3c6 100644
--- a/lib/pleroma/workers/backup_worker.ex
+++ b/lib/pleroma/workers/backup_worker.ex
@@ -3,7 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.BackupWorker do
- use Oban.Worker, queue: :backup, max_attempts: 1
+ use Oban.Worker, queue: :slow, max_attempts: 1
alias Oban.Job
alias Pleroma.User.Backup
diff --git a/lib/pleroma/workers/cron/new_users_digest_worker.ex b/lib/pleroma/workers/cron/new_users_digest_worker.ex
index 1c3e445aa9..d2abb2d3b1 100644
--- a/lib/pleroma/workers/cron/new_users_digest_worker.ex
+++ b/lib/pleroma/workers/cron/new_users_digest_worker.ex
@@ -9,7 +9,7 @@ defmodule Pleroma.Workers.Cron.NewUsersDigestWorker do
import Ecto.Query
- use Pleroma.Workers.WorkerHelper, queue: "mailer"
+ use Pleroma.Workers.WorkerHelper, queue: "background"
@impl Oban.Worker
def perform(_job) do
diff --git a/lib/pleroma/workers/mailer_worker.ex b/lib/pleroma/workers/mailer_worker.ex
index 940716558f..652bf77e01 100644
--- a/lib/pleroma/workers/mailer_worker.ex
+++ b/lib/pleroma/workers/mailer_worker.ex
@@ -3,7 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.MailerWorker do
- use Pleroma.Workers.WorkerHelper, queue: "mailer"
+ use Pleroma.Workers.WorkerHelper, queue: "background"
@impl Oban.Worker
def perform(%Job{args: %{"op" => "email", "encoded_email" => encoded_email, "config" => config}}) do
diff --git a/lib/pleroma/workers/mute_expire_worker.ex b/lib/pleroma/workers/mute_expire_worker.ex
index 8ce458d488..8ad287a7f9 100644
--- a/lib/pleroma/workers/mute_expire_worker.ex
+++ b/lib/pleroma/workers/mute_expire_worker.ex
@@ -3,7 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.MuteExpireWorker do
- use Pleroma.Workers.WorkerHelper, queue: "mute_expire"
+ use Pleroma.Workers.WorkerHelper, queue: "background"
@impl Oban.Worker
def perform(%Job{args: %{"op" => "unmute_user", "muter_id" => muter_id, "mutee_id" => mutee_id}}) do
diff --git a/lib/pleroma/workers/notification_worker.ex b/lib/pleroma/workers/notification_worker.ex
index a94c5a70e5..92ac7a033e 100644
--- a/lib/pleroma/workers/notification_worker.ex
+++ b/lib/pleroma/workers/notification_worker.ex
@@ -14,8 +14,9 @@ defmodule Pleroma.Workers.NotificationWorker do
@impl Oban.Worker
@spec perform(Oban.Job.t()) :: {:error, :activity_not_found} | {:ok, [Pleroma.Notification.t()]}
def perform(%Job{args: %{"op" => "create", "activity_id" => activity_id}}) do
- with %Activity{} = activity <- find_activity(activity_id) do
- Notification.create_notifications(activity)
+ with %Activity{} = activity <- find_activity(activity_id),
+ {:ok, notifications} <- Notification.create_notifications(activity) do
+ Notification.send(notifications)
end
end
diff --git a/lib/pleroma/workers/poll_worker.ex b/lib/pleroma/workers/poll_worker.ex
index 022d026f87..70df541931 100644
--- a/lib/pleroma/workers/poll_worker.ex
+++ b/lib/pleroma/workers/poll_worker.ex
@@ -6,7 +6,7 @@ defmodule Pleroma.Workers.PollWorker do
@moduledoc """
Generates notifications when a poll ends.
"""
- use Pleroma.Workers.WorkerHelper, queue: "poll_notifications"
+ use Pleroma.Workers.WorkerHelper, queue: "background"
alias Pleroma.Activity
alias Pleroma.Notification
diff --git a/lib/pleroma/workers/purge_expired_activity.ex b/lib/pleroma/workers/purge_expired_activity.ex
index e554684feb..a65593b6e5 100644
--- a/lib/pleroma/workers/purge_expired_activity.ex
+++ b/lib/pleroma/workers/purge_expired_activity.ex
@@ -7,7 +7,7 @@ defmodule Pleroma.Workers.PurgeExpiredActivity do
Worker which purges expired activity.
"""
- use Oban.Worker, queue: :activity_expiration, max_attempts: 1, unique: [period: :infinity]
+ use Oban.Worker, queue: :slow, max_attempts: 1, unique: [period: :infinity]
import Ecto.Query
@@ -59,7 +59,7 @@ defp find_user(ap_id) do
def get_expiration(id) do
from(j in Oban.Job,
where: j.state == "scheduled",
- where: j.queue == "activity_expiration",
+ where: j.queue == "slow",
where: fragment("?->>'activity_id' = ?", j.args, ^id)
)
|> Pleroma.Repo.one()
diff --git a/lib/pleroma/workers/purge_expired_filter.ex b/lib/pleroma/workers/purge_expired_filter.ex
index 9114aeb7f4..1f6931e4c2 100644
--- a/lib/pleroma/workers/purge_expired_filter.ex
+++ b/lib/pleroma/workers/purge_expired_filter.ex
@@ -7,7 +7,7 @@ defmodule Pleroma.Workers.PurgeExpiredFilter do
Worker which purges expired filters
"""
- use Oban.Worker, queue: :filter_expiration, max_attempts: 1, unique: [period: :infinity]
+ use Oban.Worker, queue: :background, max_attempts: 1, unique: [period: :infinity]
import Ecto.Query
@@ -38,7 +38,7 @@ def timeout(_job), do: :timer.seconds(5)
def get_expiration(id) do
from(j in Job,
where: j.state == "scheduled",
- where: j.queue == "filter_expiration",
+ where: j.queue == "background",
where: fragment("?->'filter_id' = ?", j.args, ^id)
)
|> Repo.one()
diff --git a/lib/pleroma/workers/purge_expired_token.ex b/lib/pleroma/workers/purge_expired_token.ex
index 2ccd9e80b9..1854bf5619 100644
--- a/lib/pleroma/workers/purge_expired_token.ex
+++ b/lib/pleroma/workers/purge_expired_token.ex
@@ -7,7 +7,7 @@ defmodule Pleroma.Workers.PurgeExpiredToken do
Worker which purges expired OAuth tokens
"""
- use Oban.Worker, queue: :token_expiration, max_attempts: 1
+ use Oban.Worker, queue: :background, max_attempts: 1
@spec enqueue(%{token_id: integer(), valid_until: DateTime.t(), mod: module()}) ::
{:ok, Oban.Job.t()} | {:error, Ecto.Changeset.t()}
diff --git a/lib/pleroma/workers/receiver_worker.ex b/lib/pleroma/workers/receiver_worker.ex
index 1dddd8d2e3..8b2052c232 100644
--- a/lib/pleroma/workers/receiver_worker.ex
+++ b/lib/pleroma/workers/receiver_worker.ex
@@ -52,7 +52,8 @@ defp process_errors(errors) do
{:error, {:reject, reason}} -> {:cancel, reason}
{:signature, false} -> {:cancel, :invalid_signature}
{:error, {:error, reason = "Object has been deleted"}} -> {:cancel, reason}
- e -> e
+ {:error, _} = e -> e
+ e -> {:error, e}
end
end
end
diff --git a/lib/pleroma/workers/remote_fetcher_worker.ex b/lib/pleroma/workers/remote_fetcher_worker.ex
index c264184833..ed04c54b2b 100644
--- a/lib/pleroma/workers/remote_fetcher_worker.ex
+++ b/lib/pleroma/workers/remote_fetcher_worker.ex
@@ -5,7 +5,7 @@
defmodule Pleroma.Workers.RemoteFetcherWorker do
alias Pleroma.Object.Fetcher
- use Pleroma.Workers.WorkerHelper, queue: "remote_fetcher"
+ use Pleroma.Workers.WorkerHelper, queue: "background"
@impl Oban.Worker
def perform(%Job{args: %{"op" => "fetch_remote", "id" => id} = args}) do
diff --git a/lib/pleroma/workers/rich_media_expiration_worker.ex b/lib/pleroma/workers/rich_media_expiration_worker.ex
new file mode 100644
index 0000000000..0b74687cfd
--- /dev/null
+++ b/lib/pleroma/workers/rich_media_expiration_worker.ex
@@ -0,0 +1,15 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Workers.RichMediaExpirationWorker do
+ alias Pleroma.Web.RichMedia.Card
+
+ use Oban.Worker,
+ queue: :background
+
+ @impl Oban.Worker
+ def perform(%Job{args: %{"url" => url} = _args}) do
+ Card.delete(url)
+ end
+end
diff --git a/lib/pleroma/workers/scheduled_activity_worker.ex b/lib/pleroma/workers/scheduled_activity_worker.ex
index 4df84d00f9..ab62686f42 100644
--- a/lib/pleroma/workers/scheduled_activity_worker.ex
+++ b/lib/pleroma/workers/scheduled_activity_worker.ex
@@ -7,7 +7,7 @@ defmodule Pleroma.Workers.ScheduledActivityWorker do
The worker to post scheduled activity.
"""
- use Pleroma.Workers.WorkerHelper, queue: "scheduled_activities"
+ use Pleroma.Workers.WorkerHelper, queue: "federator_outgoing"
alias Pleroma.Repo
alias Pleroma.ScheduledActivity
diff --git a/lib/soapbox.ex b/lib/soapbox.ex
deleted file mode 100644
index 88b597147d..0000000000
--- a/lib/soapbox.ex
+++ /dev/null
@@ -1,5 +0,0 @@
-defmodule Soapbox do
- @version "3.0.0"
-
- def version, do: @version
-end
diff --git a/mix.exs b/mix.exs
index 5852f672d8..295bdbb402 100644
--- a/mix.exs
+++ b/mix.exs
@@ -6,10 +6,10 @@ defmodule Pleroma.Mixfile do
def project do
[
app: :pleroma,
- name: "Rebased",
+ name: "pl",
compat_name: "Pleroma",
version: version("2.6.52"),
- elixir: "~> 1.11",
+ elixir: "~> 1.13",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: Mix.compilers(),
elixirc_options: [warnings_as_errors: warnings_as_errors()],
@@ -183,7 +183,6 @@ defp deps do
ref: "e0f16822d578866e186a0974d65ad58cddc1e2ab"},
{:restarter, path: "./restarter"},
{:majic, "~> 1.0"},
- {:oembed_providers, "~> 0.1.0"},
{:open_api_spex, "~> 3.16"},
{:ecto_psql_extras, "~> 0.6"},
{:vix, "~> 0.26.0"},
diff --git a/mix.lock b/mix.lock
index e4e9ccb62d..53a5b26d65 100644
--- a/mix.lock
+++ b/mix.lock
@@ -99,7 +99,6 @@
"oauther": {:hex, :oauther, "1.3.0", "82b399607f0ca9d01c640438b34d74ebd9e4acd716508f868e864537ecdb1f76", [:mix], [], "hexpm", "78eb888ea875c72ca27b0864a6f550bc6ee84f2eeca37b093d3d833fbcaec04e"},
"oban": {:hex, :oban, "2.13.6", "a0cb1bce3bd393770512231fb5a3695fa19fd3af10d7575bf73f837aee7abf43", [:mix], [{:ecto_sql, "~> 3.6", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16", [hex: :postgrex, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "3c1c5eb16f377b3cbbf2ea14be24d20e3d91285af9d1ac86260b7c2af5464887"},
"octo_fetch": {:hex, :octo_fetch, "0.4.0", "074b5ecbc08be10b05b27e9db08bc20a3060142769436242702931c418695b19", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm", "cf8be6f40cd519d7000bb4e84adcf661c32e59369ca2827c4e20042eda7a7fc6"},
- "oembed_providers": {:hex, :oembed_providers, "0.1.0", "9b336ee5f3ca20ee4ed005383c74b154d30d0abeb98e95828855c0e2841ae46b", [:mix], [{:glob, "~> 1.0", [hex: :glob, repo: "hexpm", optional: false]}, {:jason, "~> 1.2", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "ac1dda0f743aa6fdead3eef59decfefc9de91d550bf0805b8fce16ed10d421ba"},
"open_api_spex": {:hex, :open_api_spex, "3.18.2", "8c855e83bfe8bf81603d919d6e892541eafece3720f34d1700b58024dadde247", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:poison, "~> 3.0 or ~> 4.0 or ~> 5.0", [hex: :poison, repo: "hexpm", optional: true]}, {:ymlr, "~> 2.0 or ~> 3.0 or ~> 4.0", [hex: :ymlr, repo: "hexpm", optional: true]}], "hexpm", "aa3e6dcfc0ad6a02596b2172662da21c9dd848dac145ea9e603f54e3d81b8d2b"},
"parallel_stream": {:hex, :parallel_stream, "1.0.6", "b967be2b23f0f6787fab7ed681b4c45a215a81481fb62b01a5b750fa8f30f76c", [:mix], [], "hexpm", "639b2e8749e11b87b9eb42f2ad325d161c170b39b288ac8d04c4f31f8f0823eb"},
"parse_trans": {:hex, :parse_trans, "3.4.1", "6e6aa8167cb44cc8f39441d05193be6e6f4e7c2946cb2759f015f8c56b76e5ff", [:rebar3], [], "hexpm", "620a406ce75dada827b82e453c19cf06776be266f5a67cff34e1ef2cbb60e49a"},
diff --git a/priv/gettext/config_descriptions.pot b/priv/gettext/config_descriptions.pot
index 4f60e1c854..b4792868b6 100644
--- a/priv/gettext/config_descriptions.pot
+++ b/priv/gettext/config_descriptions.pot
@@ -5973,3 +5973,87 @@ msgstr ""
msgctxt "config label at :pleroma-:instance > :languages"
msgid "Languages"
msgstr ""
+
+#: lib/pleroma/docs/translator.ex:5
+#, elixir-autogen, elixir-format
+msgctxt "config description at :pleroma-:mrf_emoji"
+msgid "Reject or force-unlisted emojis whose URLs or names match a keyword or [Regex](https://hexdocs.pm/elixir/Regex.html)."
+msgstr ""
+
+#: lib/pleroma/docs/translator.ex:5
+#, elixir-autogen, elixir-format
+msgctxt "config description at :pleroma-:mrf_emoji > :federated_timeline_removal_shortcode"
+msgid " A list of patterns which result in message with emojis whose shortcodes match being removed from federated timelines (a.k.a unlisted). This will apply only to statuses.\n\n Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.\n"
+msgstr ""
+
+#: lib/pleroma/docs/translator.ex:5
+#, elixir-autogen, elixir-format
+msgctxt "config description at :pleroma-:mrf_emoji > :federated_timeline_removal_url"
+msgid " A list of patterns which result in message with emojis whose URLs match being removed from federated timelines (a.k.a unlisted). This will apply only to statuses.\n\n Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.\n"
+msgstr ""
+
+#: lib/pleroma/docs/translator.ex:5
+#, elixir-autogen, elixir-format
+msgctxt "config description at :pleroma-:mrf_emoji > :remove_shortcode"
+msgid " A list of patterns which result in emoji whose shortcode matches being removed from the message. This will apply to statuses, emoji reactions, and user profiles.\n\n Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.\n"
+msgstr ""
+
+#: lib/pleroma/docs/translator.ex:5
+#, elixir-autogen, elixir-format
+msgctxt "config description at :pleroma-:mrf_emoji > :remove_url"
+msgid " A list of patterns which result in emoji whose URL matches being removed from the message. This will apply to statuses, emoji reactions, and user profiles.\n\n Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.\n"
+msgstr ""
+
+#: lib/pleroma/docs/translator.ex:5
+#, elixir-autogen, elixir-format
+msgctxt "config description at :pleroma-Pleroma.User.Backup > :process_chunk_size"
+msgid "The number of activities to fetch in the backup job for each chunk."
+msgstr ""
+
+#: lib/pleroma/docs/translator.ex:5
+#, elixir-autogen, elixir-format
+msgctxt "config description at :pleroma-Pleroma.User.Backup > :process_wait_time"
+msgid "The amount of time to wait for backup to report progress, in milliseconds. If no progress is received from the backup job for that much time, terminate it and deem it failed."
+msgstr ""
+
+#: lib/pleroma/docs/translator.ex:5
+#, elixir-autogen, elixir-format
+msgctxt "config label at :pleroma-:mrf_emoji"
+msgid "MRF Emoji"
+msgstr ""
+
+#: lib/pleroma/docs/translator.ex:5
+#, elixir-autogen, elixir-format
+msgctxt "config label at :pleroma-:mrf_emoji > :federated_timeline_removal_shortcode"
+msgid "Federated timeline removal shortcode"
+msgstr ""
+
+#: lib/pleroma/docs/translator.ex:5
+#, elixir-autogen, elixir-format
+msgctxt "config label at :pleroma-:mrf_emoji > :federated_timeline_removal_url"
+msgid "Federated timeline removal url"
+msgstr ""
+
+#: lib/pleroma/docs/translator.ex:5
+#, elixir-autogen, elixir-format
+msgctxt "config label at :pleroma-:mrf_emoji > :remove_shortcode"
+msgid "Remove shortcode"
+msgstr ""
+
+#: lib/pleroma/docs/translator.ex:5
+#, elixir-autogen, elixir-format
+msgctxt "config label at :pleroma-:mrf_emoji > :remove_url"
+msgid "Remove url"
+msgstr ""
+
+#: lib/pleroma/docs/translator.ex:5
+#, elixir-autogen, elixir-format
+msgctxt "config label at :pleroma-Pleroma.User.Backup > :process_chunk_size"
+msgid "Process Chunk Size"
+msgstr ""
+
+#: lib/pleroma/docs/translator.ex:5
+#, elixir-autogen, elixir-format
+msgctxt "config label at :pleroma-Pleroma.User.Backup > :process_wait_time"
+msgid "Process Wait Time"
+msgstr ""
diff --git a/priv/gettext/errors.pot b/priv/gettext/errors.pot
index d320ee1bdd..aca77f8fa6 100644
--- a/priv/gettext/errors.pot
+++ b/priv/gettext/errors.pot
@@ -110,7 +110,7 @@ msgstr ""
msgid "Can't display this activity"
msgstr ""
-#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:334
+#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:346
#, elixir-autogen, elixir-format
msgid "Can't find user"
msgstr ""
@@ -198,7 +198,7 @@ msgstr ""
msgid "Invalid password."
msgstr ""
-#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:267
+#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:279
#, elixir-autogen, elixir-format
msgid "Invalid request"
msgstr ""
@@ -225,7 +225,7 @@ msgstr ""
#: lib/pleroma/web/feed/tag_controller.ex:16
#: lib/pleroma/web/feed/user_controller.ex:69
#: lib/pleroma/web/o_status/o_status_controller.ex:132
-#: lib/pleroma/web/plugs/uploaded_media.ex:104
+#: lib/pleroma/web/plugs/uploaded_media.ex:84
#, elixir-autogen, elixir-format
msgid "Not found"
msgstr ""
@@ -235,7 +235,7 @@ msgstr ""
msgid "Poll's author can't vote"
msgstr ""
-#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:499
+#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:511
#: lib/pleroma/web/mastodon_api/controllers/fallback_controller.ex:20
#: lib/pleroma/web/mastodon_api/controllers/poll_controller.ex:39
#: lib/pleroma/web/mastodon_api/controllers/poll_controller.ex:51
@@ -341,7 +341,7 @@ msgstr ""
msgid "CAPTCHA expired"
msgstr ""
-#: lib/pleroma/web/plugs/uploaded_media.ex:77
+#: lib/pleroma/web/plugs/uploaded_media.ex:57
#, elixir-autogen, elixir-format
msgid "Failed"
msgstr ""
@@ -361,7 +361,7 @@ msgstr ""
msgid "Insufficient permissions: %{permissions}."
msgstr ""
-#: lib/pleroma/web/plugs/uploaded_media.ex:131
+#: lib/pleroma/web/plugs/uploaded_media.ex:111
#, elixir-autogen, elixir-format
msgid "Internal Error"
msgstr ""
@@ -557,7 +557,7 @@ msgstr ""
msgid "Access denied"
msgstr ""
-#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:331
+#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:343
#, elixir-autogen, elixir-format
msgid "This API requires an authenticated user"
msgstr ""
@@ -567,7 +567,7 @@ msgstr ""
msgid "User is not an admin."
msgstr ""
-#: lib/pleroma/user/backup.ex:73
+#: lib/pleroma/user/backup.ex:78
#, elixir-format
msgid "Last export was less than a day ago"
msgid_plural "Last export was less than %{days} days ago"
@@ -607,3 +607,23 @@ msgstr ""
#, elixir-autogen, elixir-format
msgid "User isn't privileged."
msgstr ""
+
+#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:267
+#, elixir-autogen, elixir-format
+msgid "Bio is too long"
+msgstr ""
+
+#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:270
+#, elixir-autogen, elixir-format
+msgid "Name is too long"
+msgstr ""
+
+#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:273
+#, elixir-autogen, elixir-format
+msgid "One or more field entries are too long"
+msgstr ""
+
+#: lib/pleroma/web/mastodon_api/controllers/account_controller.ex:276
+#, elixir-autogen, elixir-format
+msgid "Too many field entries"
+msgstr ""
diff --git a/priv/gettext/oauth_scopes.pot b/priv/gettext/oauth_scopes.pot
index 50ad0dd9ed..83328770e5 100644
--- a/priv/gettext/oauth_scopes.pot
+++ b/priv/gettext/oauth_scopes.pot
@@ -219,3 +219,43 @@ msgstr ""
#, elixir-autogen, elixir-format
msgid "read:mutes"
msgstr ""
+
+#: lib/pleroma/web/api_spec/scopes/translator.ex:5
+#, elixir-autogen, elixir-format
+msgid "push"
+msgstr ""
+
+#: lib/pleroma/web/api_spec/scopes/translator.ex:5
+#, elixir-autogen, elixir-format
+msgid "read:backups"
+msgstr ""
+
+#: lib/pleroma/web/api_spec/scopes/translator.ex:5
+#, elixir-autogen, elixir-format
+msgid "read:chats"
+msgstr ""
+
+#: lib/pleroma/web/api_spec/scopes/translator.ex:5
+#, elixir-autogen, elixir-format
+msgid "read:media"
+msgstr ""
+
+#: lib/pleroma/web/api_spec/scopes/translator.ex:5
+#, elixir-autogen, elixir-format
+msgid "read:reports"
+msgstr ""
+
+#: lib/pleroma/web/api_spec/scopes/translator.ex:5
+#, elixir-autogen, elixir-format
+msgid "write:chats"
+msgstr ""
+
+#: lib/pleroma/web/api_spec/scopes/translator.ex:5
+#, elixir-autogen, elixir-format
+msgid "write:follow"
+msgstr ""
+
+#: lib/pleroma/web/api_spec/scopes/translator.ex:5
+#, elixir-autogen, elixir-format
+msgid "write:reports"
+msgstr ""
diff --git a/priv/repo/migrations/20220319000000_add_status_to_notifications_enum.exs b/priv/repo/migrations/20220319000000_add_status_to_notifications_enum.exs
index 62c0afb63a..cea32f6c33 100644
--- a/priv/repo/migrations/20220319000000_add_status_to_notifications_enum.exs
+++ b/priv/repo/migrations/20220319000000_add_status_to_notifications_enum.exs
@@ -36,7 +36,7 @@ def down do
'reblog',
'favourite',
'pleroma:report',
- 'poll
+ 'poll',
)
"""
|> execute()
diff --git a/priv/repo/migrations/20220819171321_add_pleroma_participation_accepted_to_notifications_enum.exs b/priv/repo/migrations/20220819171321_add_pleroma_participation_accepted_to_notifications_enum.exs
index 0ad342040d..60b55d2ef4 100644
--- a/priv/repo/migrations/20220819171321_add_pleroma_participation_accepted_to_notifications_enum.exs
+++ b/priv/repo/migrations/20220819171321_add_pleroma_participation_accepted_to_notifications_enum.exs
@@ -52,7 +52,8 @@ def down do
'favourite',
'pleroma:report',
'poll',
- 'status'
+ 'status',
+ 'update'
)
"""
|> execute()
diff --git a/priv/repo/migrations/20240207035927_create_rich_media_card.exs b/priv/repo/migrations/20240207035927_create_rich_media_card.exs
new file mode 100644
index 0000000000..b5e48bccb6
--- /dev/null
+++ b/priv/repo/migrations/20240207035927_create_rich_media_card.exs
@@ -0,0 +1,14 @@
+defmodule Pleroma.Repo.Migrations.CreateRichMediaCard do
+ use Ecto.Migration
+
+ def change do
+ create table(:rich_media_card) do
+ add(:url_hash, :bytea)
+ add(:fields, :map)
+
+ timestamps()
+ end
+
+ create(unique_index(:rich_media_card, [:url_hash]))
+ end
+end
diff --git a/priv/repo/migrations/20240406000000_add_hint_to_rules.exs b/priv/repo/migrations/20240406000000_add_hint_to_rules.exs
new file mode 100644
index 0000000000..2732905602
--- /dev/null
+++ b/priv/repo/migrations/20240406000000_add_hint_to_rules.exs
@@ -0,0 +1,13 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2024 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Repo.Migrations.AddHintToRules do
+ use Ecto.Migration
+
+ def change do
+ alter table(:rules) do
+ add_if_not_exists(:hint, :text)
+ end
+ end
+end
diff --git a/priv/repo/migrations/20240527144418_oban_queues_refactor.exs b/priv/repo/migrations/20240527144418_oban_queues_refactor.exs
new file mode 100644
index 0000000000..355a463f5b
--- /dev/null
+++ b/priv/repo/migrations/20240527144418_oban_queues_refactor.exs
@@ -0,0 +1,33 @@
+defmodule Pleroma.Repo.Migrations.ObanQueuesRefactor do
+ use Ecto.Migration
+
+ @changed_queues [
+ {"attachments_cleanup", "slow"},
+ {"check_domain_resolve", "slow"},
+ {"mailer", "background"},
+ {"mute_expire", "background"},
+ {"poll_notifications", "background"},
+ {"activity_expiration", "slow"},
+ {"filter_expiration", "background"},
+ {"token_expiration", "background"},
+ {"remote_fetcher", "background"},
+ {"rich_media_expiration", "background"}
+ ]
+
+ def up do
+ Enum.each(@changed_queues, fn {old, new} ->
+ execute("UPDATE oban_jobs SET queue = '#{new}' WHERE queue = '#{old}';")
+ end)
+
+ # Handled special as reverting this would not be ideal and leaving it is harmless
+ execute(
+ "UPDATE oban_jobs SET queue = 'federator_outgoing' WHERE queue = 'scheduled_activities';"
+ )
+ end
+
+ def down do
+ # Just move all slow queue jobs to background queue if we are reverting
+ # as the slow queue will not be processing jobs
+ execute("UPDATE oban_jobs SET queue = 'background' WHERE queue = 'slow';")
+ end
+end
diff --git a/priv/static/schemas/litepub-0.1.jsonld b/priv/static/schemas/litepub-0.1.jsonld
index 572b690302..6d3ae5686f 100644
--- a/priv/static/schemas/litepub-0.1.jsonld
+++ b/priv/static/schemas/litepub-0.1.jsonld
@@ -2,6 +2,7 @@
"@context": [
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1",
+ "https://purl.archive.org/socialweb/webfinger",
{
"Emoji": "toot:Emoji",
"Hashtag": "as:Hashtag",
diff --git a/rel/files/bin/pleroma_ctl b/rel/files/bin/pleroma_ctl
index 87c4865140..6f0dba3a8f 100755
--- a/rel/files/bin/pleroma_ctl
+++ b/rel/files/bin/pleroma_ctl
@@ -134,7 +134,7 @@ if [ -z "$1" ] || [ "$1" = "help" ]; then
"
else
- SCRIPT=$(readlink -f "$0")
+ SCRIPT=$(realpath "$0")
SCRIPTPATH=$(dirname "$SCRIPT")
FULL_ARGS="$*"
diff --git a/supplemental/search/fastembed-api/Dockerfile b/supplemental/search/fastembed-api/Dockerfile
new file mode 100644
index 0000000000..c1e0ef51f3
--- /dev/null
+++ b/supplemental/search/fastembed-api/Dockerfile
@@ -0,0 +1,9 @@
+FROM python:3.9
+
+WORKDIR /code
+COPY fastembed-server.py /workdir/fastembed-server.py
+COPY requirements.txt /workdir/requirements.txt
+
+RUN pip install -r /workdir/requirements.txt
+
+CMD ["python", "/workdir/fastembed-server.py"]
diff --git a/supplemental/search/fastembed-api/README.md b/supplemental/search/fastembed-api/README.md
new file mode 100644
index 0000000000..63a037207d
--- /dev/null
+++ b/supplemental/search/fastembed-api/README.md
@@ -0,0 +1,6 @@
+# About
+This is a minimal implementation of the [OpenAI Embeddings API](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings) meant to be used with the QdrantSearch backend.
+
+# Usage
+
+The easiest way to run it is to just use docker compose with `docker compose up`. This starts the server on the default configured port. Different models can be used, for a full list of supported models, check the [fastembed documentation](https://qdrant.github.io/fastembed/examples/Supported_Models/). The first time a model is requested it will be downloaded, which can take a few seconds.
diff --git a/supplemental/search/fastembed-api/compose.yml b/supplemental/search/fastembed-api/compose.yml
new file mode 100644
index 0000000000..d4cb31722f
--- /dev/null
+++ b/supplemental/search/fastembed-api/compose.yml
@@ -0,0 +1,5 @@
+services:
+ web:
+ build: .
+ ports:
+ - "11345:11345"
diff --git a/supplemental/search/fastembed-api/fastembed-server.py b/supplemental/search/fastembed-api/fastembed-server.py
new file mode 100644
index 0000000000..02da69db26
--- /dev/null
+++ b/supplemental/search/fastembed-api/fastembed-server.py
@@ -0,0 +1,27 @@
+from fastembed import TextEmbedding
+from fastapi import FastAPI
+from pydantic import BaseModel
+
+models = {}
+
+app = FastAPI()
+
+class EmbeddingRequest(BaseModel):
+ model: str
+ input: str
+
+@app.post("/v1/embeddings")
+def embeddings(request: EmbeddingRequest):
+ model = models.get(request.model) or TextEmbedding(request.model)
+ models[request.model] = model
+ embeddings = next(model.embed(request.input)).tolist()
+ return {"data": [{"embedding": embeddings}]}
+
+@app.get("/health")
+def health():
+ return {"status": "ok"}
+
+if __name__ == "__main__":
+ import uvicorn
+
+ uvicorn.run(app, host="0.0.0.0", port=11345)
diff --git a/supplemental/search/fastembed-api/requirements.txt b/supplemental/search/fastembed-api/requirements.txt
new file mode 100644
index 0000000000..db67a84022
--- /dev/null
+++ b/supplemental/search/fastembed-api/requirements.txt
@@ -0,0 +1,4 @@
+fastapi==0.111.0
+fastembed==0.2.7
+pydantic==1.10.15
+uvicorn==0.29.0
diff --git a/test/fixtures/rich_media/reddit.html b/test/fixtures/rich_media/reddit.html
new file mode 100644
index 0000000000..a99bb6884f
--- /dev/null
+++ b/test/fixtures/rich_media/reddit.html
@@ -0,0 +1,392 @@
+Twitter/X is getting weirder; where now for security news and analysis? : cybersecuritythis post was submitted on
241 points (92% upvoted)
shortlink:
joinleave688,076 readers592 users here now
a community for
π Rendered by PID 29 on reddit-service-r2-slowlane-65c5c76ff5-v258h at 2024-02-19 03:13:22.575220+00:00 running 5b0a0b2 country code: US.
\ No newline at end of file
diff --git a/test/fixtures/webfinger/graf-imposter-webfinger.json b/test/fixtures/webfinger/graf-imposter-webfinger.json
new file mode 100644
index 0000000000..e7010f606d
--- /dev/null
+++ b/test/fixtures/webfinger/graf-imposter-webfinger.json
@@ -0,0 +1,41 @@
+{
+ "subject": "acct:graf@poa.st",
+ "aliases": [
+ "https://fba.ryona.agenc/webfingertest"
+ ],
+ "links": [
+ {
+ "rel": "http://webfinger.net/rel/profile-page",
+ "type": "text/html",
+ "href": "https://fba.ryona.agenc/webfingertest"
+ },
+ {
+ "rel": "self",
+ "type": "application/activity+json",
+ "href": "https://fba.ryona.agenc/webfingertest"
+ },
+ {
+ "rel": "http://ostatus.org/schema/1.0/subscribe",
+ "template": "https://fba.ryona.agenc/contact/follow?url={uri}"
+ },
+ {
+ "rel": "http://schemas.google.com/g/2010#updates-from",
+ "type": "application/atom+xml",
+ "href": ""
+ },
+ {
+ "rel": "salmon",
+ "href": "https://fba.ryona.agenc/salmon/friendica"
+ },
+ {
+ "rel": "http://microformats.org/profile/hcard",
+ "type": "text/html",
+ "href": "https://fba.ryona.agenc/hcard/friendica"
+ },
+ {
+ "rel": "http://joindiaspora.com/seed_location",
+ "type": "text/html",
+ "href": "https://fba.ryona.agenc"
+ }
+ ]
+}
diff --git a/test/pleroma/html_test.exs b/test/pleroma/html_test.exs
index b99689903e..1be1619711 100644
--- a/test/pleroma/html_test.exs
+++ b/test/pleroma/html_test.exs
@@ -202,7 +202,7 @@ test "extracts the url" do
})
object = Object.normalize(activity, fetch: false)
- {:ok, url} = HTML.extract_first_external_url_from_object(object)
+ url = HTML.extract_first_external_url_from_object(object)
assert url == "https://github.com/komeiji-satori/Dress"
end
@@ -217,7 +217,7 @@ test "skips mentions" do
})
object = Object.normalize(activity, fetch: false)
- {:ok, url} = HTML.extract_first_external_url_from_object(object)
+ url = HTML.extract_first_external_url_from_object(object)
assert url == "https://github.com/syuilo/misskey/blob/develop/docs/setup.en.md"
@@ -233,7 +233,7 @@ test "skips hashtags" do
})
object = Object.normalize(activity, fetch: false)
- {:ok, url} = HTML.extract_first_external_url_from_object(object)
+ url = HTML.extract_first_external_url_from_object(object)
assert url == "https://www.pixiv.net/member_illust.php?mode=medium&illust_id=72255140"
end
@@ -249,7 +249,7 @@ test "skips microformats hashtags" do
})
object = Object.normalize(activity, fetch: false)
- {:ok, url} = HTML.extract_first_external_url_from_object(object)
+ url = HTML.extract_first_external_url_from_object(object)
assert url == "https://www.pixiv.net/member_illust.php?mode=medium&illust_id=72255140"
end
@@ -261,7 +261,7 @@ test "does not crash when there is an HTML entity in a link" do
object = Object.normalize(activity, fetch: false)
- assert {:ok, nil} = HTML.extract_first_external_url_from_object(object)
+ assert nil == HTML.extract_first_external_url_from_object(object)
end
test "skips attachment links" do
@@ -275,7 +275,7 @@ test "skips attachment links" do
object = Object.normalize(activity, fetch: false)
- assert {:ok, nil} = HTML.extract_first_external_url_from_object(object)
+ assert nil == HTML.extract_first_external_url_from_object(object)
end
end
end
diff --git a/test/pleroma/multi_language_test.exs b/test/pleroma/multi_language_test.exs
new file mode 100644
index 0000000000..3db87be65f
--- /dev/null
+++ b/test/pleroma/multi_language_test.exs
@@ -0,0 +1,15 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.MultiLanguageTest do
+ use Pleroma.DataCase, async: true
+
+ alias Pleroma.MultiLanguage
+
+ describe "str_to_map" do
+ test "" do
+ assert MultiLanguage.str_to_map("foo") == %{"und" => "foo"}
+ end
+ end
+end
diff --git a/test/pleroma/notification_test.exs b/test/pleroma/notification_test.exs
index 62ff2bfde6..287ef686c3 100644
--- a/test/pleroma/notification_test.exs
+++ b/test/pleroma/notification_test.exs
@@ -6,7 +6,6 @@ defmodule Pleroma.NotificationTest do
use Pleroma.DataCase, async: false
import Pleroma.Factory
- import Mock
alias Pleroma.FollowingRelationship
alias Pleroma.Notification
@@ -18,7 +17,6 @@ defmodule Pleroma.NotificationTest do
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.CommonAPI
alias Pleroma.Web.MastodonAPI.NotificationView
- alias Pleroma.Web.Push
alias Pleroma.Web.Streamer
setup do
@@ -207,6 +205,21 @@ test "doesn't create notification for events without participation approval" do
assert length(user_notifications) == 0
end
+ test "does not create subscriber notification if mentioned" do
+ user = insert(:user)
+ subscriber = insert(:user)
+
+ User.subscribe(subscriber, user)
+
+ {:ok, status} = CommonAPI.post(user, %{status: "mentioning @#{subscriber.nickname}"})
+ {:ok, [notification] = notifications} = Notification.create_notifications(status)
+
+ assert length(notifications) == 1
+
+ assert notification.user_id == subscriber.id
+ assert notification.type == "mention"
+ end
+
test "it sends edited notifications to those who repeated a status" do
user = insert(:user)
repeated_user = insert(:user)
@@ -304,171 +317,7 @@ test "create_poll_notifications/1" do
assert [user2.id, user3.id, user1.id] == Enum.map(notifications, & &1.user_id)
end
- describe "CommonApi.post/2 notification-related functionality" do
- test_with_mock "creates but does NOT send notification to blocker user",
- Push,
- [:passthrough],
- [] do
- user = insert(:user)
- blocker = insert(:user)
- {:ok, _user_relationship} = User.block(blocker, user)
-
- {:ok, _activity} = CommonAPI.post(user, %{status: "hey @#{blocker.nickname}!"})
-
- Pleroma.Tests.ObanHelpers.perform_all()
-
- blocker_id = blocker.id
- assert [%Notification{user_id: ^blocker_id}] = Repo.all(Notification)
- refute called(Push.send(:_))
- end
-
- test_with_mock "creates but does NOT send notification to notification-muter user",
- Push,
- [:passthrough],
- [] do
- user = insert(:user)
- muter = insert(:user)
- {:ok, _user_relationships} = User.mute(muter, user)
-
- {:ok, _activity} = CommonAPI.post(user, %{status: "hey @#{muter.nickname}!"})
- Pleroma.Tests.ObanHelpers.perform_all()
-
- muter_id = muter.id
- assert [%Notification{user_id: ^muter_id}] = Repo.all(Notification)
- refute called(Push.send(:_))
- end
-
- test_with_mock "creates but does NOT send notification to thread-muter user",
- Push,
- [:passthrough],
- [] do
- user = insert(:user)
- thread_muter = insert(:user)
-
- {:ok, activity} = CommonAPI.post(user, %{status: "hey @#{thread_muter.nickname}!"})
-
- Pleroma.Tests.ObanHelpers.perform_all()
- [pre_mute_notification] = Repo.all(Notification)
-
- {:ok, _} = CommonAPI.add_mute(thread_muter, activity)
-
- {:ok, _same_context_activity} =
- CommonAPI.post(user, %{
- status: "hey-hey-hey @#{thread_muter.nickname}!",
- in_reply_to_status_id: activity.id
- })
-
- Pleroma.Tests.ObanHelpers.perform_all()
-
- [post_mute_notification] =
- Repo.all(
- from(n in Notification,
- where: n.id != ^pre_mute_notification.id and n.user_id == ^thread_muter.id,
- order_by: n.id
- )
- )
-
- pre_mute_notification_id = pre_mute_notification.id
- post_mute_notification_id = post_mute_notification.id
-
- assert called(
- Push.send(
- :meck.is(fn
- %Notification{id: ^pre_mute_notification_id} -> true
- _ -> false
- end)
- )
- )
-
- refute called(
- Push.send(
- :meck.is(fn
- %Notification{id: ^post_mute_notification_id} -> true
- _ -> false
- end)
- )
- )
- end
- end
-
describe "create_notification" do
- @tag needs_streamer: true
- test "it creates a notification for user and send to the 'user' and the 'user:notification' stream" do
- %{user: user, token: oauth_token} = oauth_access(["read"])
-
- task =
- Task.async(fn ->
- {:ok, _topic} = Streamer.get_topic_and_add_socket("user", user, oauth_token)
- assert_receive {:render_with_user, _, _, _, _}, 4_000
- end)
-
- task_user_notification =
- Task.async(fn ->
- {:ok, _topic} =
- Streamer.get_topic_and_add_socket("user:notification", user, oauth_token)
-
- assert_receive {:render_with_user, _, _, _, _}, 4_000
- end)
-
- activity = insert(:note_activity)
-
- notify = Notification.create_notification(activity, user)
- assert notify.user_id == user.id
- Task.await(task)
- Task.await(task_user_notification)
- end
-
- test "it creates a notification for user if the user blocks the activity author" do
- activity = insert(:note_activity)
- author = User.get_cached_by_ap_id(activity.data["actor"])
- user = insert(:user)
- {:ok, _user_relationship} = User.block(user, author)
-
- assert Notification.create_notification(activity, user)
- end
-
- test "it creates a notification for the user if the user mutes the activity author" do
- muter = insert(:user)
- muted = insert(:user)
- {:ok, _} = User.mute(muter, muted)
- muter = Repo.get(User, muter.id)
- {:ok, activity} = CommonAPI.post(muted, %{status: "Hi @#{muter.nickname}"})
-
- notification = Notification.create_notification(activity, muter)
-
- assert notification.id
- assert notification.seen
- end
-
- test "notification created if user is muted without notifications" do
- muter = insert(:user)
- muted = insert(:user)
-
- {:ok, _user_relationships} = User.mute(muter, muted, %{notifications: false})
-
- {:ok, activity} = CommonAPI.post(muted, %{status: "Hi @#{muter.nickname}"})
-
- assert Notification.create_notification(activity, muter)
- end
-
- test "it creates a notification for an activity from a muted thread" do
- muter = insert(:user)
- other_user = insert(:user)
- {:ok, activity} = CommonAPI.post(muter, %{status: "hey"})
- CommonAPI.add_mute(muter, activity)
-
- {:ok, activity} =
- CommonAPI.post(other_user, %{
- status: "Hi @#{muter.nickname}",
- in_reply_to_status_id: activity.id
- })
-
- notification = Notification.create_notification(activity, muter)
-
- assert notification.id
- assert notification.seen
- end
-
test "it disables notifications from strangers" do
follower = insert(:user)
@@ -748,9 +597,7 @@ test "it sets all notifications as read up to a specified notification ID" do
Pleroma.Tests.ObanHelpers.perform_all()
- [_, read_notification] = Notification.set_read_up_to(other_user, n2.id)
-
- assert read_notification.activity.object
+ Notification.set_read_up_to(other_user, n2.id)
[n3, n2, n1] = Notification.for_user(other_user)
@@ -856,7 +703,7 @@ test "it sends notifications to addressed users in new messages" do
status: "hey @#{other_user.nickname}!"
})
- {enabled_receivers, _disabled_receivers} = Notification.get_notified_from_activity(activity)
+ enabled_receivers = Notification.get_notified_from_activity(activity)
assert other_user in enabled_receivers
end
@@ -888,7 +735,7 @@ test "it sends notifications to mentioned users in new messages" do
{:ok, activity} = Transmogrifier.handle_incoming(create_activity)
- {enabled_receivers, _disabled_receivers} = Notification.get_notified_from_activity(activity)
+ enabled_receivers = Notification.get_notified_from_activity(activity)
assert other_user in enabled_receivers
end
@@ -915,7 +762,7 @@ test "it does not send notifications to users who are only cc in new messages" d
{:ok, activity} = Transmogrifier.handle_incoming(create_activity)
- {enabled_receivers, _disabled_receivers} = Notification.get_notified_from_activity(activity)
+ enabled_receivers = Notification.get_notified_from_activity(activity)
assert other_user not in enabled_receivers
end
@@ -932,8 +779,7 @@ test "it does not send notification to mentioned users in likes" do
{:ok, activity_two} = CommonAPI.favorite(third_user, activity_one.id)
- {enabled_receivers, _disabled_receivers} =
- Notification.get_notified_from_activity(activity_two)
+ enabled_receivers = Notification.get_notified_from_activity(activity_two)
assert other_user not in enabled_receivers
end
@@ -955,7 +801,7 @@ test "it only notifies the post's author in likes" do
|> Map.put("to", [other_user.ap_id | like_data["to"]])
|> ActivityPub.persist(local: true)
- {enabled_receivers, _disabled_receivers} = Notification.get_notified_from_activity(like)
+ enabled_receivers = Notification.get_notified_from_activity(like)
assert other_user not in enabled_receivers
end
@@ -972,39 +818,36 @@ test "it does not send notification to mentioned users in announces" do
{:ok, activity_two} = CommonAPI.repeat(activity_one.id, third_user)
- {enabled_receivers, _disabled_receivers} =
- Notification.get_notified_from_activity(activity_two)
+ enabled_receivers = Notification.get_notified_from_activity(activity_two)
assert other_user not in enabled_receivers
end
- test "it returns blocking recipient in disabled recipients list" do
+ test "it does not return blocking recipient in recipients list" do
user = insert(:user)
other_user = insert(:user)
{:ok, _user_relationship} = User.block(other_user, user)
{:ok, activity} = CommonAPI.post(user, %{status: "hey @#{other_user.nickname}!"})
- {enabled_receivers, disabled_receivers} = Notification.get_notified_from_activity(activity)
+ enabled_receivers = Notification.get_notified_from_activity(activity)
assert [] == enabled_receivers
- assert [other_user] == disabled_receivers
end
- test "it returns notification-muting recipient in disabled recipients list" do
+ test "it does not return notification-muting recipient in recipients list" do
user = insert(:user)
other_user = insert(:user)
{:ok, _user_relationships} = User.mute(other_user, user)
{:ok, activity} = CommonAPI.post(user, %{status: "hey @#{other_user.nickname}!"})
- {enabled_receivers, disabled_receivers} = Notification.get_notified_from_activity(activity)
+ enabled_receivers = Notification.get_notified_from_activity(activity)
assert [] == enabled_receivers
- assert [other_user] == disabled_receivers
end
- test "it returns thread-muting recipient in disabled recipients list" do
+ test "it does not return thread-muting recipient in recipients list" do
user = insert(:user)
other_user = insert(:user)
@@ -1018,14 +861,12 @@ test "it returns thread-muting recipient in disabled recipients list" do
in_reply_to_status_id: activity.id
})
- {enabled_receivers, disabled_receivers} =
- Notification.get_notified_from_activity(same_context_activity)
+ enabled_receivers = Notification.get_notified_from_activity(same_context_activity)
- assert [other_user] == disabled_receivers
refute other_user in enabled_receivers
end
- test "it returns non-following domain-blocking recipient in disabled recipients list" do
+ test "it does not return non-following domain-blocking recipient in recipients list" do
blocked_domain = "blocked.domain"
user = insert(:user, %{ap_id: "https://#{blocked_domain}/@actor"})
other_user = insert(:user)
@@ -1034,10 +875,9 @@ test "it returns non-following domain-blocking recipient in disabled recipients
{:ok, activity} = CommonAPI.post(user, %{status: "hey @#{other_user.nickname}!"})
- {enabled_receivers, disabled_receivers} = Notification.get_notified_from_activity(activity)
+ enabled_receivers = Notification.get_notified_from_activity(activity)
assert [] == enabled_receivers
- assert [other_user] == disabled_receivers
end
test "it returns following domain-blocking recipient in enabled recipients list" do
@@ -1050,10 +890,9 @@ test "it returns following domain-blocking recipient in enabled recipients list"
{:ok, activity} = CommonAPI.post(user, %{status: "hey @#{other_user.nickname}!"})
- {enabled_receivers, disabled_receivers} = Notification.get_notified_from_activity(activity)
+ enabled_receivers = Notification.get_notified_from_activity(activity)
assert [other_user] == enabled_receivers
- assert [] == disabled_receivers
end
test "it sends edited notifications to those who repeated a status" do
@@ -1073,11 +912,10 @@ test "it sends edited notifications to those who repeated a status" do
status: "hey @#{other_user.nickname}! mew mew"
})
- {enabled_receivers, _disabled_receivers} =
- Notification.get_notified_from_activity(edit_activity)
+ enabled_receivers = Notification.get_notified_from_activity(edit_activity)
assert repeated_user in enabled_receivers
- assert other_user not in enabled_receivers
+ refute other_user in enabled_receivers
end
end
@@ -1354,7 +1192,7 @@ test "it doesn't return notifications for muted thread", %{user: user} do
assert Notification.for_user(user) == []
end
- test "it returns notifications from a muted user when with_muted is set", %{user: user} do
+ test "it doesn't return notifications from a muted user when with_muted is set", %{user: user} do
muted = insert(:user)
{:ok, _user_relationships} = User.mute(user, muted)
@@ -1362,7 +1200,7 @@ test "it returns notifications from a muted user when with_muted is set", %{user
Pleroma.Tests.ObanHelpers.perform_all()
- assert length(Notification.for_user(user, %{with_muted: true})) == 1
+ assert Enum.empty?(Notification.for_user(user, %{with_muted: true}))
end
test "it doesn't return notifications from a blocked user when with_muted is set", %{
diff --git a/test/pleroma/scheduled_activity_test.exs b/test/pleroma/scheduled_activity_test.exs
index 4818e8bcf1..aaf643cfc5 100644
--- a/test/pleroma/scheduled_activity_test.exs
+++ b/test/pleroma/scheduled_activity_test.exs
@@ -31,8 +31,7 @@ test "scheduled activities with jobs when ScheduledActivity enabled" do
{:ok, sa1} = ScheduledActivity.create(user, attrs)
{:ok, sa2} = ScheduledActivity.create(user, attrs)
- jobs =
- Repo.all(from(j in Oban.Job, where: j.queue == "scheduled_activities", select: j.args))
+ jobs = Repo.all(from(j in Oban.Job, where: j.queue == "federator_outgoing", select: j.args))
assert jobs == [%{"activity_id" => sa1.id}, %{"activity_id" => sa2.id}]
end
diff --git a/test/pleroma/search/healthcheck_test.exs b/test/pleroma/search/healthcheck_test.exs
new file mode 100644
index 0000000000..e7649d9495
--- /dev/null
+++ b/test/pleroma/search/healthcheck_test.exs
@@ -0,0 +1,49 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2024 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Search.HealthcheckTest do
+ use Pleroma.DataCase
+
+ import Tesla.Mock
+
+ alias Pleroma.Search.Healthcheck
+
+ @good1 "http://good1.example.com/healthz"
+ @good2 "http://good2.example.com/health"
+ @bad "http://bad.example.com/healthy"
+
+ setup do
+ mock(fn
+ %{method: :get, url: @good1} ->
+ %Tesla.Env{
+ status: 200,
+ body: ""
+ }
+
+ %{method: :get, url: @good2} ->
+ %Tesla.Env{
+ status: 200,
+ body: ""
+ }
+
+ %{method: :get, url: @bad} ->
+ %Tesla.Env{
+ status: 503,
+ body: ""
+ }
+ end)
+
+ :ok
+ end
+
+ test "true for 200 responses" do
+ assert Healthcheck.check([@good1])
+ assert Healthcheck.check([@good1, @good2])
+ end
+
+ test "false if any response is not a 200" do
+ refute Healthcheck.check([@bad])
+ refute Healthcheck.check([@good1, @bad])
+ end
+end
diff --git a/test/pleroma/search/qdrant_search_test.exs b/test/pleroma/search/qdrant_search_test.exs
new file mode 100644
index 0000000000..47a77a3912
--- /dev/null
+++ b/test/pleroma/search/qdrant_search_test.exs
@@ -0,0 +1,199 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2021 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Search.QdrantSearchTest do
+ use Pleroma.DataCase, async: true
+ use Oban.Testing, repo: Pleroma.Repo
+
+ import Pleroma.Factory
+ import Mox
+
+ alias Pleroma.Search.QdrantSearch
+ alias Pleroma.UnstubbedConfigMock, as: Config
+ alias Pleroma.Web.CommonAPI
+ alias Pleroma.Workers.SearchIndexingWorker
+
+ describe "Qdrant search" do
+ test "returns the correct healthcheck endpoints" do
+ # No openai healthcheck URL
+ Config
+ |> expect(:get, 2, fn
+ [Pleroma.Search.QdrantSearch, key], nil ->
+ %{qdrant_url: "https://qdrant.url"}[key]
+ end)
+
+ [health_endpoint] = QdrantSearch.healthcheck_endpoints()
+
+ assert "https://qdrant.url/healthz" == health_endpoint
+
+ # Set openai healthcheck URL
+ Config
+ |> expect(:get, 2, fn
+ [Pleroma.Search.QdrantSearch, key], nil ->
+ %{qdrant_url: "https://qdrant.url", openai_healthcheck_url: "https://openai.url/health"}[
+ key
+ ]
+ end)
+
+ [_, health_endpoint] = QdrantSearch.healthcheck_endpoints()
+
+ assert "https://openai.url/health" == health_endpoint
+ end
+
+ test "searches for a term by encoding it and sending it to qdrant" do
+ user = insert(:user)
+
+ {:ok, activity} =
+ CommonAPI.post(user, %{
+ status: "guys i just don't wanna leave the swamp",
+ visibility: "public"
+ })
+
+ Config
+ |> expect(:get, 3, fn
+ [Pleroma.Search, :module], nil ->
+ QdrantSearch
+
+ [Pleroma.Search.QdrantSearch, key], nil ->
+ %{
+ openai_model: "a_model",
+ openai_url: "https://openai.url",
+ qdrant_url: "https://qdrant.url"
+ }[key]
+ end)
+
+ Tesla.Mock.mock(fn
+ %{url: "https://openai.url/v1/embeddings", method: :post} ->
+ Tesla.Mock.json(%{
+ data: [%{embedding: [1, 2, 3]}]
+ })
+
+ %{url: "https://qdrant.url/collections/posts/points/search", method: :post, body: body} ->
+ data = Jason.decode!(body)
+ refute data["filter"]
+
+ Tesla.Mock.json(%{
+ result: [%{"id" => activity.id |> FlakeId.from_string() |> Ecto.UUID.cast!()}]
+ })
+ end)
+
+ results = QdrantSearch.search(nil, "guys i just don't wanna leave the swamp", %{})
+
+ assert results == [activity]
+ end
+
+ test "for a given actor, ask for only relevant matches" do
+ user = insert(:user)
+
+ {:ok, activity} =
+ CommonAPI.post(user, %{
+ status: "guys i just don't wanna leave the swamp",
+ visibility: "public"
+ })
+
+ Config
+ |> expect(:get, 3, fn
+ [Pleroma.Search, :module], nil ->
+ QdrantSearch
+
+ [Pleroma.Search.QdrantSearch, key], nil ->
+ %{
+ openai_model: "a_model",
+ openai_url: "https://openai.url",
+ qdrant_url: "https://qdrant.url"
+ }[key]
+ end)
+
+ Tesla.Mock.mock(fn
+ %{url: "https://openai.url/v1/embeddings", method: :post} ->
+ Tesla.Mock.json(%{
+ data: [%{embedding: [1, 2, 3]}]
+ })
+
+ %{url: "https://qdrant.url/collections/posts/points/search", method: :post, body: body} ->
+ data = Jason.decode!(body)
+
+ assert data["filter"] == %{
+ "must" => [%{"key" => "actor", "match" => %{"value" => user.ap_id}}]
+ }
+
+ Tesla.Mock.json(%{
+ result: [%{"id" => activity.id |> FlakeId.from_string() |> Ecto.UUID.cast!()}]
+ })
+ end)
+
+ results =
+ QdrantSearch.search(nil, "guys i just don't wanna leave the swamp", %{author: user})
+
+ assert results == [activity]
+ end
+
+ test "indexes a public post on creation, deletes from the index on deletion" do
+ user = insert(:user)
+
+ Tesla.Mock.mock(fn
+ %{method: :post, url: "https://openai.url/v1/embeddings"} ->
+ send(self(), "posted_to_openai")
+
+ Tesla.Mock.json(%{
+ data: [%{embedding: [1, 2, 3]}]
+ })
+
+ %{method: :put, url: "https://qdrant.url/collections/posts/points", body: body} ->
+ send(self(), "posted_to_qdrant")
+
+ data = Jason.decode!(body)
+ %{"points" => [%{"vector" => vector, "payload" => payload}]} = data
+
+ assert vector == [1, 2, 3]
+ assert payload["actor"]
+ assert payload["published_at"]
+
+ Tesla.Mock.json("ok")
+
+ %{method: :post, url: "https://qdrant.url/collections/posts/points/delete"} ->
+ send(self(), "deleted_from_qdrant")
+ Tesla.Mock.json("ok")
+ end)
+
+ Config
+ |> expect(:get, 6, fn
+ [Pleroma.Search, :module], nil ->
+ QdrantSearch
+
+ [Pleroma.Search.QdrantSearch, key], nil ->
+ %{
+ openai_model: "a_model",
+ openai_url: "https://openai.url",
+ qdrant_url: "https://qdrant.url"
+ }[key]
+ end)
+
+ {:ok, activity} =
+ CommonAPI.post(user, %{
+ status: "guys i just don't wanna leave the swamp",
+ visibility: "public"
+ })
+
+ args = %{"op" => "add_to_index", "activity" => activity.id}
+
+ assert_enqueued(
+ worker: SearchIndexingWorker,
+ args: args
+ )
+
+ assert :ok = perform_job(SearchIndexingWorker, args)
+ assert_received("posted_to_openai")
+ assert_received("posted_to_qdrant")
+
+ {:ok, _} = CommonAPI.delete(activity.id, user)
+
+ delete_args = %{"op" => "remove_from_index", "object" => activity.object.id}
+ assert_enqueued(worker: SearchIndexingWorker, args: delete_args)
+ assert :ok = perform_job(SearchIndexingWorker, delete_args)
+
+ assert_received("deleted_from_qdrant")
+ end
+ end
+end
diff --git a/test/pleroma/uploaders/ipfs_test.exs b/test/pleroma/uploaders/ipfs_test.exs
new file mode 100644
index 0000000000..cf325b54f2
--- /dev/null
+++ b/test/pleroma/uploaders/ipfs_test.exs
@@ -0,0 +1,158 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Uploaders.IPFSTest do
+ use Pleroma.DataCase
+
+ alias Pleroma.Uploaders.IPFS
+ alias Tesla.Multipart
+
+ import ExUnit.CaptureLog
+ import Mock
+ import Mox
+
+ alias Pleroma.UnstubbedConfigMock, as: Config
+
+ describe "get_final_url" do
+ setup do
+ Config
+ |> expect(:get, fn [Pleroma.Uploaders.IPFS] ->
+ [post_gateway_url: "http://localhost:5001"]
+ end)
+
+ :ok
+ end
+
+ test "it returns the final url for put_file" do
+ assert IPFS.put_file_endpoint() == "http://localhost:5001/api/v0/add"
+ end
+
+ test "it returns the final url for delete_file" do
+ assert IPFS.delete_file_endpoint() == "http://localhost:5001/api/v0/files/rm"
+ end
+ end
+
+ describe "get_file/1" do
+ setup do
+ Config
+ |> expect(:get, fn [Pleroma.Upload, :uploader] -> Pleroma.Uploaders.IPFS end)
+ |> expect(:get, fn [Pleroma.Upload, :base_url] -> nil end)
+ |> expect(:get, fn [Pleroma.Uploaders.IPFS, :public_endpoint] -> nil end)
+
+ :ok
+ end
+
+ test "it returns path to ipfs file with cid as subdomain" do
+ Config
+ |> expect(:get, fn [Pleroma.Uploaders.IPFS, :get_gateway_url] ->
+ "https://{CID}.ipfs.mydomain.com"
+ end)
+
+ assert IPFS.get_file("testcid") == {
+ :ok,
+ {:url, "https://testcid.ipfs.mydomain.com"}
+ }
+ end
+
+ test "it returns path to ipfs file with cid as path" do
+ Config
+ |> expect(:get, fn [Pleroma.Uploaders.IPFS, :get_gateway_url] ->
+ "https://ipfs.mydomain.com/ipfs/{CID}"
+ end)
+
+ assert IPFS.get_file("testcid") == {
+ :ok,
+ {:url, "https://ipfs.mydomain.com/ipfs/testcid"}
+ }
+ end
+ end
+
+ describe "put_file/1" do
+ setup do
+ Config
+ |> expect(:get, fn [Pleroma.Uploaders.IPFS] ->
+ [post_gateway_url: "http://localhost:5001"]
+ end)
+
+ file_upload = %Pleroma.Upload{
+ name: "image-tet.jpg",
+ content_type: "image/jpeg",
+ path: "test_folder/image-tet.jpg",
+ tempfile: Path.absname("test/instance_static/add/shortcode.png")
+ }
+
+ mp =
+ Multipart.new()
+ |> Multipart.add_content_type_param("charset=utf-8")
+ |> Multipart.add_file(file_upload.tempfile)
+
+ [file_upload: file_upload, mp: mp]
+ end
+
+ test "save file", %{file_upload: file_upload} do
+ with_mock Pleroma.HTTP,
+ post: fn "http://localhost:5001/api/v0/add", _mp, [], params: ["cid-version": "1"] ->
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body:
+ "{\"Name\":\"image-tet.jpg\",\"Size\":\"5000\", \"Hash\":\"bafybeicrh7ltzx52yxcwrvxxckfmwhqdgsb6qym6dxqm2a4ymsakeshwoi\"}"
+ }}
+ end do
+ assert IPFS.put_file(file_upload) ==
+ {:ok, {:file, "bafybeicrh7ltzx52yxcwrvxxckfmwhqdgsb6qym6dxqm2a4ymsakeshwoi"}}
+ end
+ end
+
+ test "returns error", %{file_upload: file_upload} do
+ with_mock Pleroma.HTTP,
+ post: fn "http://localhost:5001/api/v0/add", _mp, [], params: ["cid-version": "1"] ->
+ {:error, "IPFS Gateway upload failed"}
+ end do
+ assert capture_log(fn ->
+ assert IPFS.put_file(file_upload) == {:error, "IPFS Gateway upload failed"}
+ end) =~ "Elixir.Pleroma.Uploaders.IPFS: {:error, \"IPFS Gateway upload failed\"}"
+ end
+ end
+
+ test "returns error if JSON decode fails", %{file_upload: file_upload} do
+ with_mock Pleroma.HTTP, [],
+ post: fn "http://localhost:5001/api/v0/add", _mp, [], params: ["cid-version": "1"] ->
+ {:ok, %Tesla.Env{status: 200, body: "invalid"}}
+ end do
+ assert capture_log(fn ->
+ assert IPFS.put_file(file_upload) == {:error, "JSON decode failed"}
+ end) =~
+ "Elixir.Pleroma.Uploaders.IPFS: {:error, %Jason.DecodeError"
+ end
+ end
+
+ test "returns error if JSON body doesn't contain Hash key", %{file_upload: file_upload} do
+ with_mock Pleroma.HTTP, [],
+ post: fn "http://localhost:5001/api/v0/add", _mp, [], params: ["cid-version": "1"] ->
+ {:ok, %Tesla.Env{status: 200, body: "{\"key\": \"value\"}"}}
+ end do
+ assert IPFS.put_file(file_upload) == {:error, "JSON doesn't contain Hash key"}
+ end
+ end
+ end
+
+ describe "delete_file/1" do
+ setup do
+ Config
+ |> expect(:get, fn [Pleroma.Uploaders.IPFS] ->
+ [post_gateway_url: "http://localhost:5001"]
+ end)
+
+ :ok
+ end
+
+ test_with_mock "deletes file", Pleroma.HTTP,
+ post: fn "http://localhost:5001/api/v0/files/rm", "", [], params: [arg: "image.jpg"] ->
+ {:ok, %{status: 204}}
+ end do
+ assert :ok = IPFS.delete_file("image.jpg")
+ end
+ end
+end
diff --git a/test/pleroma/web/activity_pub/activity_pub_test.exs b/test/pleroma/web/activity_pub/activity_pub_test.exs
index 8e91d00e00..d4ca14c2c2 100644
--- a/test/pleroma/web/activity_pub/activity_pub_test.exs
+++ b/test/pleroma/web/activity_pub/activity_pub_test.exs
@@ -1410,6 +1410,13 @@ test "sets a description if given", %{test_file: file} do
assert object.data["name"] == "a cool file"
end
+ test "sets a multilang description if given", %{test_file: file} do
+ {:ok, %Object{} = object} =
+ ActivityPub.upload(file, description_map: %{"a" => "mew", "b" => "lol"}, language: "a")
+
+ assert object.data["nameMap"] == %{"a" => "mew", "b" => "lol"}
+ end
+
test "it sets the default description depending on the configuration", %{test_file: file} do
clear_config([Pleroma.Upload, :default_description])
diff --git a/test/pleroma/web/activity_pub/builder_test.exs b/test/pleroma/web/activity_pub/builder_test.exs
index 52058a0a34..a7ba1ba2b3 100644
--- a/test/pleroma/web/activity_pub/builder_test.exs
+++ b/test/pleroma/web/activity_pub/builder_test.exs
@@ -45,6 +45,33 @@ test "returns note data" do
assert {:ok, ^expected, []} = Builder.note(draft)
end
+ test "accepts multilang" do
+ user = insert(:user)
+
+ draft = %ActivityDraft{
+ user: user,
+ to: [user.ap_id],
+ context: "2hu",
+ content_html_map: %{"a" => "mew", "b" => "lol"},
+ tags: [],
+ summary_map: %{"a" => "mew", "b" => "lol"},
+ cc: [],
+ extra: %{},
+ language: "a"
+ }
+
+ assert {:ok,
+ %{
+ "contentMap" => %{"a" => "mew", "b" => "lol"},
+ "content" => content,
+ "summaryMap" => %{"a" => "mew", "b" => "lol"},
+ "summary" => summary
+ }, []} = Builder.note(draft)
+
+ assert is_binary(content)
+ assert is_binary(summary)
+ end
+
test "quote post" do
user = insert(:user)
note = insert(:note)
diff --git a/test/pleroma/web/activity_pub/mrf/ensure_re_prepended_test.exs b/test/pleroma/web/activity_pub/mrf/ensure_re_prepended_test.exs
index 5afab0cf98..f56e0faf37 100644
--- a/test/pleroma/web/activity_pub/mrf/ensure_re_prepended_test.exs
+++ b/test/pleroma/web/activity_pub/mrf/ensure_re_prepended_test.exs
@@ -24,6 +24,41 @@ test "it adds `re:` to summary object when child summary and parent summary equa
assert res["object"]["summary"] == "re: object-summary"
end
+ test "it adds `re:` to summaryMap object when child summary and parent summary for some language equal" do
+ message = %{
+ "type" => "Create",
+ "object" => %{
+ "summary" => "object-summary",
+ "summaryMap" => %{
+ "a" => "object-summary",
+ "b" => "some-object-summary",
+ "c" => "another-object-summary"
+ },
+ "inReplyTo" => %Activity{
+ object: %Object{
+ data: %{
+ "summary" => "object-summary",
+ "summaryMap" => %{
+ "a" => "unrelated-summary",
+ "b" => "some-object-summary"
+ }
+ }
+ }
+ }
+ }
+ }
+
+ assert {:ok, res} = EnsureRePrepended.filter(message)
+
+ assert res["object"]["summaryMap"] == %{
+ "a" => "object-summary",
+ "b" => "re: some-object-summary",
+ "c" => "another-object-summary"
+ }
+
+ assert res["object"]["summary"] == "re: object-summary"
+ end
+
test "it adds `re:` to summary object when child summary contains re-subject of parent summary " do
message = %{
"type" => "Create",
diff --git a/test/pleroma/web/activity_pub/mrf/force_mentions_in_content_test.exs b/test/pleroma/web/activity_pub/mrf/force_mentions_in_content_test.exs
index 811ef105cd..5e4963701f 100644
--- a/test/pleroma/web/activity_pub/mrf/force_mentions_in_content_test.exs
+++ b/test/pleroma/web/activity_pub/mrf/force_mentions_in_content_test.exs
@@ -87,6 +87,57 @@ test "the replied-to user is sorted to the left" do
"@luigi @mario WHA-HA!"
end
+ test "supports mulitlang" do
+ [mario, luigi, wario] = [
+ insert(:user, nickname: "mario"),
+ insert(:user, nickname: "luigi"),
+ insert(:user, nickname: "wario")
+ ]
+
+ {:ok, post1} = CommonAPI.post(mario, %{status: "Letsa go!"})
+
+ {:ok, post2} =
+ CommonAPI.post(luigi, %{status: "Oh yaah", in_reply_to_id: post1.id, to: [mario.ap_id]})
+
+ activity = %{
+ "type" => "Create",
+ "actor" => wario.ap_id,
+ "object" => %{
+ "type" => "Note",
+ "actor" => wario.ap_id,
+ "content" => "WHA-HA!",
+ "contentMap" => %{
+ "a" => "mew mew",
+ "b" => "lol lol"
+ },
+ "to" => [
+ mario.ap_id,
+ luigi.ap_id,
+ Constants.as_public()
+ ],
+ "inReplyTo" => Object.normalize(post2).data["id"]
+ }
+ }
+
+ {:ok,
+ %{
+ "object" => %{
+ "content" => content,
+ "contentMap" => %{
+ "a" => content_a,
+ "b" => content_b
+ }
+ }
+ }} = ForceMentionsInContent.filter(activity)
+
+ mentions_part =
+ "@luigi @mario "
+
+ assert content_a == mentions_part <> "mew mew"
+ assert content_b == mentions_part <> "lol lol"
+ assert content == mentions_part <> "WHA-HA!"
+ end
+
test "don't mention self" do
mario = insert(:user, nickname: "mario")
diff --git a/test/pleroma/web/activity_pub/mrf/keyword_policy_test.exs b/test/pleroma/web/activity_pub/mrf/keyword_policy_test.exs
index a0e77d7b9e..65a1840328 100644
--- a/test/pleroma/web/activity_pub/mrf/keyword_policy_test.exs
+++ b/test/pleroma/web/activity_pub/mrf/keyword_policy_test.exs
@@ -295,6 +295,41 @@ test "replaces keyword if regex matches in summary" do
end)
end
+ test "replaces keyword in *Map" do
+ clear_config([:mrf_keyword, :replace], [{"opensource", "free software"}])
+
+ message = %{
+ "type" => "Create",
+ "to" => ["https://www.w3.org/ns/activitystreams#Public"],
+ "object" => %{
+ "content" => "unrelevant",
+ "contentMap" => %{
+ "a" => "ZFS is opensource",
+ "b" => "mew mew is also opensource"
+ },
+ "summary" => "unrelevant",
+ "summaryMap" => %{
+ "a" => "ZFS is very opensource",
+ "b" => "mew mew is also very opensource"
+ }
+ }
+ }
+
+ {:ok,
+ %{
+ "object" => %{
+ "contentMap" => %{
+ "a" => "ZFS is free software",
+ "b" => "mew mew is also free software"
+ },
+ "summaryMap" => %{
+ "a" => "ZFS is very free software",
+ "b" => "mew mew is also very free software"
+ }
+ }
+ }} = KeywordPolicy.filter(message)
+ end
+
test "replaces keyword if string matches in history" do
clear_config([:mrf_keyword, :replace], [{"opensource", "free software"}])
diff --git a/test/pleroma/web/activity_pub/mrf/no_empty_policy_test.exs b/test/pleroma/web/activity_pub/mrf/no_empty_policy_test.exs
index 386ed395fd..3d5fa3a47b 100644
--- a/test/pleroma/web/activity_pub/mrf/no_empty_policy_test.exs
+++ b/test/pleroma/web/activity_pub/mrf/no_empty_policy_test.exs
@@ -129,6 +129,66 @@ test "Notes with only mentions are denied" do
assert NoEmptyPolicy.filter(message) == {:reject, "[NoEmptyPolicy]"}
end
+ test "Notes with only mentions in source.contentMap are denied" do
+ message = %{
+ "actor" => "http://localhost:4001/users/testuser",
+ "cc" => ["http://localhost:4001/users/testuser/followers"],
+ "object" => %{
+ "actor" => "http://localhost:4001/users/testuser",
+ "attachment" => [],
+ "cc" => ["http://localhost:4001/users/testuser/followers"],
+ "source" => %{
+ "contentMap" => %{
+ "a" => "@user2",
+ "b" => "@user2"
+ }
+ },
+ "to" => [
+ "https://www.w3.org/ns/activitystreams#Public",
+ "http://localhost:4001/users/user2"
+ ],
+ "type" => "Note"
+ },
+ "to" => [
+ "https://www.w3.org/ns/activitystreams#Public",
+ "http://localhost:4001/users/user2"
+ ],
+ "type" => "Create"
+ }
+
+ assert NoEmptyPolicy.filter(message) == {:reject, "[NoEmptyPolicy]"}
+ end
+
+ test "Notes with mentions and other content in source.contentMap are allowed" do
+ message = %{
+ "actor" => "http://localhost:4001/users/testuser",
+ "cc" => ["http://localhost:4001/users/testuser/followers"],
+ "object" => %{
+ "actor" => "http://localhost:4001/users/testuser",
+ "attachment" => [],
+ "cc" => ["http://localhost:4001/users/testuser/followers"],
+ "source" => %{
+ "contentMap" => %{
+ "a" => "@user2",
+ "b" => "@user2 lol"
+ }
+ },
+ "to" => [
+ "https://www.w3.org/ns/activitystreams#Public",
+ "http://localhost:4001/users/user2"
+ ],
+ "type" => "Note"
+ },
+ "to" => [
+ "https://www.w3.org/ns/activitystreams#Public",
+ "http://localhost:4001/users/user2"
+ ],
+ "type" => "Create"
+ }
+
+ assert {:ok, _} = NoEmptyPolicy.filter(message)
+ end
+
test "Notes with no content are denied" do
message = %{
"actor" => "http://localhost:4001/users/testuser",
diff --git a/test/pleroma/web/activity_pub/mrf/no_placeholder_text_policy_test.exs b/test/pleroma/web/activity_pub/mrf/no_placeholder_text_policy_test.exs
index 3533c2bc8c..cf7a3bce19 100644
--- a/test/pleroma/web/activity_pub/mrf/no_placeholder_text_policy_test.exs
+++ b/test/pleroma/web/activity_pub/mrf/no_placeholder_text_policy_test.exs
@@ -21,6 +21,30 @@ test "it clears content object" do
assert res["object"]["content"] == ""
end
+ test "multilang aware" do
+ message = %{
+ "type" => "Create",
+ "object" => %{
+ "content" => ".",
+ "contentMap" => %{"a" => ".", "b" => "lol"},
+ "attachment" => "image"
+ }
+ }
+
+ assert {:ok, res} = NoPlaceholderTextPolicy.filter(message)
+ assert res["object"]["content"] == ""
+ assert res["object"]["contentMap"] == %{"b" => "lol"}
+
+ message = %{
+ "type" => "Create",
+ "object" => %{"content" => ".", "contentMap" => %{"a" => "."}, "attachment" => "image"}
+ }
+
+ assert {:ok, res} = NoPlaceholderTextPolicy.filter(message)
+ assert res["object"]["content"] == ""
+ assert res["object"]["contentMap"] == nil
+ end
+
test "history-aware" do
message = %{
"type" => "Create",
diff --git a/test/pleroma/web/activity_pub/mrf/normalize_markup_test.exs b/test/pleroma/web/activity_pub/mrf/normalize_markup_test.exs
index 530c5f4a08..7478eac941 100644
--- a/test/pleroma/web/activity_pub/mrf/normalize_markup_test.exs
+++ b/test/pleroma/web/activity_pub/mrf/normalize_markup_test.exs
@@ -38,6 +38,24 @@ test "it filter html tags" do
assert res["object"]["content"] == @expected
end
+ test "multilang-aware" do
+ message = %{
+ "type" => "Create",
+ "object" => %{
+ "content" => "some",
+ "contentMap" => %{
+ "a" => @html_sample,
+ "b" => @html_sample
+ }
+ }
+ }
+
+ assert {:ok, res} = NormalizeMarkup.filter(message)
+ assert res["object"]["contentMap"] == %{"a" => @expected, "b" => @expected}
+
+ assert res["object"]["content"] == "some"
+ end
+
test "history-aware" do
message = %{
"type" => "Create",
diff --git a/test/pleroma/web/activity_pub/object_validators/article_note_page_validator_test.exs b/test/pleroma/web/activity_pub/object_validators/article_note_page_validator_test.exs
index 86be92bef7..824a0a78e2 100644
--- a/test/pleroma/web/activity_pub/object_validators/article_note_page_validator_test.exs
+++ b/test/pleroma/web/activity_pub/object_validators/article_note_page_validator_test.exs
@@ -37,6 +37,47 @@ test "a note from factory validates" do
note = insert(:note)
%{valid?: true} = ArticleNotePageValidator.cast_and_validate(note.data, [])
end
+
+ test "Note with contentMap and summaryMap", %{note: note} do
+ summary_map = %{
+ "en-US" => "mew",
+ "en-GB" => "meow"
+ }
+
+ content_map = %{
+ "en-US" => "mew mew",
+ "en-GB" => "meow meow"
+ }
+
+ note =
+ note
+ |> Map.put("summaryMap", summary_map)
+ |> Map.put("contentMap", content_map)
+
+ assert %{
+ valid?: true,
+ changes: %{
+ summaryMap: ^summary_map,
+ contentMap: ^content_map
+ }
+ } = ArticleNotePageValidator.cast_and_validate(note)
+ end
+
+ test "Note with empty *Map", %{note: note} do
+ note =
+ note
+ |> Map.put("summaryMap", %{"und" => "mew"})
+ |> Map.put("contentMap", %{})
+
+ assert %{
+ valid?: true,
+ changes: changes
+ } = ArticleNotePageValidator.cast_and_validate(note)
+
+ assert changes.content == note["content"]
+ assert Map.has_key?(changes, :summaryMap)
+ refute Map.has_key?(changes, :contentMap)
+ end
end
describe "Note with history" do
@@ -197,7 +238,7 @@ test "it detects language from JSON-LD context" do
"actor" => user.ap_id
}
- {:ok, _create_activity, meta} = ObjectValidator.validate(note_activity, []) |> IO.inspect()
+ {:ok, _create_activity, meta} = ObjectValidator.validate(note_activity, [])
assert meta[:object_data]["language"] == "pl"
end
diff --git a/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs b/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs
index a615c1d9ac..e78dee2991 100644
--- a/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs
+++ b/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs
@@ -27,19 +27,42 @@ test "fails without url" do
end
test "works with honkerific attachments" do
- attachment = %{
+ honk = %{
+ "mediaType" => "",
+ "summary" => "Select your spirit chonk",
+ "name" => "298p3RG7j27tfsZ9RQ.jpg",
+ "type" => "Document",
+ "url" => "https://honk.tedunangst.com/d/298p3RG7j27tfsZ9RQ.jpg"
+ }
+
+ assert {:ok, attachment} =
+ honk
+ |> AttachmentValidator.cast_and_validate()
+ |> Ecto.Changeset.apply_action(:insert)
+
+ assert attachment.mediaType == "application/octet-stream"
+ assert attachment.summary == "Select your spirit chonk"
+ assert attachment.name == "298p3RG7j27tfsZ9RQ.jpg"
+ end
+
+ test "works with nameMap" do
+ attachment_data = %{
"mediaType" => "",
"name" => "",
+ "nameMap" => %{
+ "en-US" => "mew mew",
+ "en-GB" => "meow meow"
+ },
"summary" => "298p3RG7j27tfsZ9RQ.jpg",
"type" => "Document",
"url" => "https://honk.tedunangst.com/d/298p3RG7j27tfsZ9RQ.jpg"
}
assert {:ok, attachment} =
- AttachmentValidator.cast_and_validate(attachment)
+ AttachmentValidator.cast_and_validate(attachment_data)
|> Ecto.Changeset.apply_action(:insert)
- assert attachment.mediaType == "application/octet-stream"
+ assert attachment.nameMap == attachment_data["nameMap"]
end
test "works with an unknown but valid mime type" do
diff --git a/test/pleroma/web/activity_pub/object_validators/question_options_validator_test.exs b/test/pleroma/web/activity_pub/object_validators/question_options_validator_test.exs
new file mode 100644
index 0000000000..8c2c51f9b1
--- /dev/null
+++ b/test/pleroma/web/activity_pub/object_validators/question_options_validator_test.exs
@@ -0,0 +1,27 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.ActivityPub.ObjectValidators.QuestionOptionsValidatorTest do
+ use Pleroma.DataCase, async: true
+
+ alias Pleroma.Web.ActivityPub.ObjectValidators.QuestionOptionsValidator
+
+ describe "Validates Question options" do
+ test "" do
+ name_map = %{
+ "en-US" => "mew",
+ "en-GB" => "meow"
+ }
+
+ data = %{
+ "type" => "Note",
+ "name" => "mew",
+ "nameMap" => name_map
+ }
+
+ assert %{valid?: true, changes: %{nameMap: ^name_map, name: _}} =
+ QuestionOptionsValidator.changeset(%QuestionOptionsValidator{}, data)
+ end
+ end
+end
diff --git a/test/pleroma/web/activity_pub/side_effects_test.exs b/test/pleroma/web/activity_pub/side_effects_test.exs
index 2dde0a6e0b..32a7a08bb4 100644
--- a/test/pleroma/web/activity_pub/side_effects_test.exs
+++ b/test/pleroma/web/activity_pub/side_effects_test.exs
@@ -854,31 +854,6 @@ test "creates a notification", %{announce: announce, poster: poster} do
{:ok, announce, _} = SideEffects.handle(announce)
assert Repo.get_by(Notification, user_id: poster.id, activity_id: announce.id)
end
-
- test "it streams out the announce", %{announce: announce} do
- with_mocks([
- {
- Pleroma.Web.Streamer,
- [],
- [
- stream: fn _, _ -> nil end
- ]
- },
- {
- Pleroma.Web.Push,
- [],
- [
- send: fn _ -> nil end
- ]
- }
- ]) do
- {:ok, announce, _} = SideEffects.handle(announce)
-
- assert called(Pleroma.Web.Streamer.stream(["user", "list"], announce))
-
- assert called(Pleroma.Web.Push.send(:_))
- end
- end
end
describe "removing a follower" do
diff --git a/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs b/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs
index 85dce57db0..9bc3d4a114 100644
--- a/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs
+++ b/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs
@@ -251,7 +251,7 @@ test "it only uses contentMap if content is not present" do
assert object.data["content"] == "Hi"
end
- test "it works for incoming notices with a nil contentMap (firefish)" do
+ test "it works for incoming notices with a nil content (firefish)" do
data =
File.read!("test/fixtures/mastodon-post-activity-contentmap.json")
|> Jason.decode!()
diff --git a/test/pleroma/web/activity_pub/transmogrifier_test.exs b/test/pleroma/web/activity_pub/transmogrifier_test.exs
index 241ed6033a..f7857c2284 100644
--- a/test/pleroma/web/activity_pub/transmogrifier_test.exs
+++ b/test/pleroma/web/activity_pub/transmogrifier_test.exs
@@ -349,18 +349,6 @@ test "custom emoji urls are URI encoded" do
assert url == "http://localhost:4001/emoji/dino%20walking.gif"
end
- test "it adds contentMap if language is specified" do
- user = insert(:user)
-
- {:ok, activity} = CommonAPI.post(user, %{status: "тест", language: "uk"})
-
- {:ok, prepared} = Transmogrifier.prepare_outgoing(activity.data)
-
- assert prepared["object"]["contentMap"] == %{
- "uk" => "тест"
- }
- end
-
test "it prepares a quote post" do
user = insert(:user)
diff --git a/test/pleroma/web/activity_pub/views/user_view_test.exs b/test/pleroma/web/activity_pub/views/user_view_test.exs
index f9068db074..c75149dab7 100644
--- a/test/pleroma/web/activity_pub/views/user_view_test.exs
+++ b/test/pleroma/web/activity_pub/views/user_view_test.exs
@@ -91,6 +91,13 @@ test "renders AKAs" do
assert %{"alsoKnownAs" => ^akas} = UserView.render("user.json", %{user: user})
end
+ test "renders full nickname" do
+ clear_config([Pleroma.Web.WebFinger, :domain], "plemora.dev")
+
+ user = insert(:user, nickname: "user")
+ assert %{"webfinger" => "acct:user@plemora.dev"} = UserView.render("user.json", %{user: user})
+ end
+
describe "endpoints" do
test "local users have a usable endpoints structure" do
user = insert(:user)
diff --git a/test/pleroma/web/admin_api/controllers/report_controller_test.exs b/test/pleroma/web/admin_api/controllers/report_controller_test.exs
index 81632c9e9a..9fbb608c42 100644
--- a/test/pleroma/web/admin_api/controllers/report_controller_test.exs
+++ b/test/pleroma/web/admin_api/controllers/report_controller_test.exs
@@ -11,6 +11,7 @@ defmodule Pleroma.Web.AdminAPI.ReportControllerTest do
alias Pleroma.ModerationLog
alias Pleroma.Repo
alias Pleroma.ReportNote
+ alias Pleroma.Rule
alias Pleroma.Web.CommonAPI
setup do
@@ -468,6 +469,34 @@ test "returns 403 when requested by anonymous" do
"error" => "Invalid credentials."
}
end
+
+ test "returns reports with specified role_id", %{conn: conn} do
+ [reporter, target_user] = insert_pair(:user)
+
+ %{id: rule_id} = Rule.create(%{text: "Example rule"})
+
+ rule_id = to_string(rule_id)
+
+ {:ok, %{id: report_id}} =
+ CommonAPI.report(reporter, %{
+ account_id: target_user.id,
+ comment: "",
+ rule_ids: [rule_id]
+ })
+
+ {:ok, _report} =
+ CommonAPI.report(reporter, %{
+ account_id: target_user.id,
+ comment: ""
+ })
+
+ response =
+ conn
+ |> get("/api/pleroma/admin/reports?rule_id=#{rule_id}")
+ |> json_response_and_validate_schema(:ok)
+
+ assert %{"reports" => [%{"id" => ^report_id}]} = response
+ end
end
describe "POST /api/pleroma/admin/reports/assign_account" do
diff --git a/test/pleroma/web/admin_api/controllers/rule_controller_test.exs b/test/pleroma/web/admin_api/controllers/rule_controller_test.exs
index c5c72d293a..96b52b2722 100644
--- a/test/pleroma/web/admin_api/controllers/rule_controller_test.exs
+++ b/test/pleroma/web/admin_api/controllers/rule_controller_test.exs
@@ -27,6 +27,10 @@ test "sorts rules by priority", %{conn: conn} do
%{id: id2} = Rule.create(%{text: "Second rule", priority: 2})
%{id: id3} = Rule.create(%{text: "Third rule", priority: 1})
+ id1 = to_string(id1)
+ id2 = to_string(id2)
+ id3 = to_string(id3)
+
response =
conn
|> get("/api/pleroma/admin/rules")
diff --git a/test/pleroma/web/admin_api/controllers/webhook_controller_test.exs b/test/pleroma/web/admin_api/controllers/webhook_controller_test.exs
index 0cd00e7534..24d56346d8 100644
--- a/test/pleroma/web/admin_api/controllers/webhook_controller_test.exs
+++ b/test/pleroma/web/admin_api/controllers/webhook_controller_test.exs
@@ -67,7 +67,9 @@ test "edits a webhook", %{conn: conn} do
test "can't edit an internal webhook", %{conn: conn} do
%{id: id} =
- Webhook.create(%{url: "https://example.com/webhook1", events: [], internal: true})
+ Webhook.create(%{url: "https://example.com/webhook1", events: [], internal: true},
+ update_internal: true
+ )
conn
|> put_req_header("content-type", "application/json")
diff --git a/test/pleroma/web/admin_api/views/report_view_test.exs b/test/pleroma/web/admin_api/views/report_view_test.exs
index f70ef4f865..6e155ef586 100644
--- a/test/pleroma/web/admin_api/views/report_view_test.exs
+++ b/test/pleroma/web/admin_api/views/report_view_test.exs
@@ -178,15 +178,17 @@ test "renders included rules" do
user = insert(:user)
other_user = insert(:user)
- %{id: id, text: text} = Rule.create(%{text: "Example rule"})
+ %{id: rule_id, text: text} = Rule.create(%{text: "Example rule"})
+
+ rule_id = to_string(rule_id)
{:ok, activity} =
CommonAPI.report(user, %{
account_id: other_user.id,
- rule_ids: [id]
+ rule_ids: [rule_id]
})
- assert %{rules: [%{id: ^id, text: ^text}]} =
+ assert %{rules: [%{id: ^rule_id, text: ^text}]} =
ReportView.render("show.json", Report.extract_report_info(activity))
end
end
diff --git a/test/pleroma/web/common_api/activity_draft_test.exs b/test/pleroma/web/common_api/activity_draft_test.exs
index 02bc6cf3b1..a56320de17 100644
--- a/test/pleroma/web/common_api/activity_draft_test.exs
+++ b/test/pleroma/web/common_api/activity_draft_test.exs
@@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server
-# Copyright © 2017-2021 Pleroma Authors
+# Copyright © 2017-2022 Pleroma Authors
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.CommonAPI.ActivityDraftTest do
@@ -10,6 +10,31 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraftTest do
import Pleroma.Factory
+ describe "multilang processing" do
+ setup do
+ [user: insert(:user)]
+ end
+
+ test "content", %{user: user} do
+ {:ok, draft} =
+ ActivityDraft.create(user, %{
+ status_map: %{"a" => "mew mew", "b" => "lol lol"},
+ spoiler_text_map: %{"a" => "mew", "b" => "lol"},
+ language: "a"
+ })
+
+ assert %{
+ "contentMap" => %{"a" => "mew mew", "b" => "lol lol"},
+ "content" => content,
+ "summaryMap" => %{"a" => "mew", "b" => "lol"},
+ "summary" => summary
+ } = draft.object
+
+ assert is_binary(content)
+ assert is_binary(summary)
+ end
+ end
+
test "create/2 with a quote post" do
user = insert(:user)
another_user = insert(:user)
diff --git a/test/pleroma/web/common_api/utils_test.exs b/test/pleroma/web/common_api/utils_test.exs
index 27b1da1e32..b02fa2687c 100644
--- a/test/pleroma/web/common_api/utils_test.exs
+++ b/test/pleroma/web/common_api/utils_test.exs
@@ -81,6 +81,47 @@ test "works for bare text/plain" do
assert output == expected
end
+ test "works for multilang" do
+ draft = %ActivityDraft{
+ status_map: %{
+ "a" => "mew",
+ "b" => "lol"
+ }
+ }
+
+ expected = %{"a" => "mew", "b" => "lol"}
+
+ {output, [], []} = Utils.format_input(draft, "text/plain")
+
+ assert output == expected
+ end
+
+ test "works for multilang, mentions and tags" do
+ user1 = insert(:user)
+ user2 = insert(:user)
+ user3 = insert(:user)
+
+ draft = %ActivityDraft{
+ status_map: %{
+ "a" => "mew, @#{user1.nickname} @#{user2.nickname} #foo #bar",
+ "b" => "lol, @#{user2.nickname} @#{user3.nickname} #bar #lol"
+ }
+ }
+
+ {_, mentions, tags} = Utils.format_input(draft, "text/plain")
+ mentions = Enum.map(mentions, fn {_, user} -> user.ap_id end)
+ tags = Enum.map(tags, fn {_, tag} -> tag end)
+
+ assert [_, _, _] = mentions
+ assert user1.ap_id in mentions
+ assert user2.ap_id in mentions
+ assert user3.ap_id in mentions
+ assert [_, _, _] = tags
+ assert "foo" in tags
+ assert "bar" in tags
+ assert "lol" in tags
+ end
+
test "works for bare text/html" do
text = "hello world!
"
expected = "hello world!
"
@@ -688,4 +729,27 @@ test "adds attachments to parsed results" do
}
end
end
+
+ describe "make_poll_data/1" do
+ test "multilang support" do
+ {:ok, {poll, _}} =
+ Utils.make_poll_data(%{
+ poll: %{
+ options_map: [
+ %{"a" => "foo", "b" => "1"},
+ %{"a" => "bar", "c" => "2"}
+ ],
+ expires_in: 600
+ },
+ language: "a"
+ })
+
+ assert %{"oneOf" => choices} = poll
+
+ assert [
+ %{"name" => _, "nameMap" => %{"a" => "foo", "b" => "1"}},
+ %{"name" => _, "nameMap" => %{"a" => "bar", "c" => "2"}}
+ ] = choices
+ end
+ end
end
diff --git a/test/pleroma/web/mastodon_api/controllers/instance_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/instance_controller_test.exs
index 2617c93d5a..c6299f0345 100644
--- a/test/pleroma/web/mastodon_api/controllers/instance_controller_test.exs
+++ b/test/pleroma/web/mastodon_api/controllers/instance_controller_test.exs
@@ -58,7 +58,6 @@ test "get instance information", %{conn: conn} do
assert result["pleroma"]["vapid_public_key"]
assert result["pleroma"]["stats"]["mau"] == 0
assert result["pleroma"]["oauth_consumer_strategies"] == []
- assert result["soapbox"]["version"] =~ "."
assert email == from_config_email
assert thumbnail == from_config_thumbnail
@@ -104,20 +103,6 @@ test "get peers", %{conn: conn} do
assert ["peer1.com", "peer2.com"] == Enum.sort(result)
end
- test "get instance rules", %{conn: conn} do
- Rule.create(%{text: "Example rule"})
- Rule.create(%{text: "Second rule"})
- Rule.create(%{text: "Third rule"})
-
- conn = get(conn, "/api/v1/instance")
-
- assert result = json_response_and_validate_schema(conn, 200)
-
- rules = result["rules"]
-
- assert length(rules) == 3
- end
-
test "get instance configuration", %{conn: conn} do
clear_config([:instance, :limit], 476)
@@ -265,4 +250,29 @@ test "instance domains", %{conn: conn} do
|> get("/api/v1/instance")
|> json_response_and_validate_schema(200)
end
+
+ test "get instance rules", %{conn: conn} do
+ Rule.create(%{text: "Example rule", hint: "Rule description", priority: 1})
+ Rule.create(%{text: "Third rule", priority: 2})
+ Rule.create(%{text: "Second rule", priority: 1})
+
+ conn = get(conn, "/api/v1/instance")
+
+ assert result = json_response_and_validate_schema(conn, 200)
+
+ assert [
+ %{
+ "text" => "Example rule",
+ "hint" => "Rule description"
+ },
+ %{
+ "text" => "Second rule",
+ "hint" => ""
+ },
+ %{
+ "text" => "Third rule",
+ "hint" => ""
+ }
+ ] = result["rules"]
+ end
end
diff --git a/test/pleroma/web/mastodon_api/controllers/media_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/media_controller_test.exs
index b92fd8afad..d4912691ae 100644
--- a/test/pleroma/web/mastodon_api/controllers/media_controller_test.exs
+++ b/test/pleroma/web/mastodon_api/controllers/media_controller_test.exs
@@ -49,6 +49,45 @@ test "/api/v1/media", %{conn: conn, image: image} do
assert object.data["actor"] == User.ap_id(conn.assigns[:user])
end
+ test "/api/v1/media, multilang", %{conn: conn, image: image} do
+ media =
+ conn
+ |> put_req_header("content-type", "multipart/form-data")
+ |> post("/api/v1/media", %{
+ "file" => image,
+ "description_map" => %{"a" => "mew", "b" => "lol"},
+ "language" => "a"
+ })
+ |> json_response_and_validate_schema(:ok)
+
+ assert media["type"] == "image"
+ assert media["description_map"] == %{"a" => "mew", "b" => "lol"}
+ assert media["id"]
+
+ object = Object.get_by_id(media["id"])
+ assert object.data["actor"] == User.ap_id(conn.assigns[:user])
+ end
+
+ test "/api/v1/media, multilang, invalid description_map", %{conn: conn, image: image} do
+ conn
+ |> put_req_header("content-type", "multipart/form-data")
+ |> post("/api/v1/media", %{
+ "file" => image,
+ "description_map" => %{"a" => "mew", "b_" => "lol"}
+ })
+ |> json_response_and_validate_schema(422)
+ end
+
+ test "/api/v1/media, multilang, empty description_map", %{conn: conn, image: image} do
+ conn
+ |> put_req_header("content-type", "multipart/form-data")
+ |> post("/api/v1/media", %{
+ "file" => image,
+ "description_map" => %{}
+ })
+ |> json_response_and_validate_schema(422)
+ end
+
test "/api/v2/media", %{conn: conn, user: user, image: image} do
desc = "Description of the image"
@@ -75,6 +114,45 @@ test "/api/v2/media", %{conn: conn, user: user, image: image} do
assert object.data["actor"] == user.ap_id
end
+ test "/api/v2/media, multilang", %{conn: conn, image: image} do
+ media =
+ conn
+ |> put_req_header("content-type", "multipart/form-data")
+ |> post("/api/v2/media", %{
+ "file" => image,
+ "description_map" => %{"a" => "mew", "b" => "lol"},
+ "language" => "a"
+ })
+ |> json_response_and_validate_schema(202)
+
+ assert media["type"] == "image"
+ assert media["description_map"] == %{"a" => "mew", "b" => "lol"}
+ assert media["id"]
+
+ object = Object.get_by_id(media["id"])
+ assert object.data["actor"] == User.ap_id(conn.assigns[:user])
+ end
+
+ test "/api/v2/media, multilang, invalid description_map", %{conn: conn, image: image} do
+ conn
+ |> put_req_header("content-type", "multipart/form-data")
+ |> post("/api/v2/media", %{
+ "file" => image,
+ "description_map" => %{"a" => "mew", "b_" => "lol"}
+ })
+ |> json_response_and_validate_schema(422)
+ end
+
+ test "/api/v2/media, multilang, empty description_map", %{conn: conn, image: image} do
+ conn
+ |> put_req_header("content-type", "multipart/form-data")
+ |> post("/api/v2/media", %{
+ "file" => image,
+ "description_map" => %{}
+ })
+ |> json_response_and_validate_schema(422)
+ end
+
test "/api/v2/media, upload_limit", %{conn: conn, user: user} do
desc = "Description of the binary"
@@ -97,7 +175,7 @@ test "/api/v2/media, upload_limit", %{conn: conn, user: user} do
"file" => large_binary,
"description" => desc
})
- |> json_response_and_validate_schema(400)
+ |> json_response_and_validate_schema(422)
end) =~
"[error] Elixir.Pleroma.Upload store (using Pleroma.Uploaders.Local) failed: :file_too_large"
@@ -179,6 +257,36 @@ test "/api/v1/media/:id good request", %{conn: conn, object: object} do
assert media["description"] == "test-media"
assert refresh_record(object).data["name"] == "test-media"
end
+
+ test "/api/v1/media/:id description_map", %{conn: conn, object: object} do
+ media =
+ conn
+ |> put_req_header("content-type", "multipart/form-data")
+ |> put("/api/v1/media/#{object.id}", %{
+ "description_map" => %{"a" => "test-media", "b" => "xxx"},
+ "language" => "a"
+ })
+ |> json_response_and_validate_schema(:ok)
+
+ assert media["description_map"] == %{"a" => "test-media", "b" => "xxx"}
+ assert refresh_record(object).data["nameMap"] == %{"a" => "test-media", "b" => "xxx"}
+ end
+
+ test "/api/v1/media/:id description_map, invalid", %{conn: conn, object: object} do
+ conn
+ |> put_req_header("content-type", "multipart/form-data")
+ |> put("/api/v1/media/#{object.id}", %{
+ "description_map" => %{"a" => "test-media", "b_" => "xxx"}
+ })
+ |> json_response_and_validate_schema(422)
+ end
+
+ test "/api/v1/media/:id description_map, empty", %{conn: conn, object: object} do
+ conn
+ |> put_req_header("content-type", "multipart/form-data")
+ |> put("/api/v1/media/#{object.id}", %{"description_map" => %{}})
+ |> json_response_and_validate_schema(422)
+ end
end
describe "Get media by id (/api/v1/media/:id)" do
diff --git a/test/pleroma/web/mastodon_api/controllers/report_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/report_controller_test.exs
index da9c46ec69..4ab5d07715 100644
--- a/test/pleroma/web/mastodon_api/controllers/report_controller_test.exs
+++ b/test/pleroma/web/mastodon_api/controllers/report_controller_test.exs
@@ -88,6 +88,8 @@ test "submit a report with rule_ids", %{
} do
%{id: rule_id} = Rule.create(%{text: "There are no rules"})
+ rule_id = to_string(rule_id)
+
assert %{"action_taken" => false, "id" => id} =
conn
|> put_req_header("content-type", "application/json")
@@ -101,6 +103,23 @@ test "submit a report with rule_ids", %{
assert %Activity{data: %{"rules" => [^rule_id]}} = Activity.get_report(id)
end
+ test "rules field is empty if provided wrong rule id", %{
+ conn: conn,
+ target_user: target_user
+ } do
+ assert %{"id" => id} =
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> post("/api/v1/reports", %{
+ "account_id" => target_user.id,
+ "forward" => "false",
+ "rule_ids" => ["-1"]
+ })
+ |> json_response_and_validate_schema(200)
+
+ assert %Activity{data: %{"rules" => []}} = Activity.get_report(id)
+ end
+
test "account_id is required", %{
conn: conn,
activity: activity
diff --git a/test/pleroma/web/mastodon_api/controllers/scheduled_activity_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/scheduled_activity_controller_test.exs
index 632242221e..2d6b2aee23 100644
--- a/test/pleroma/web/mastodon_api/controllers/scheduled_activity_controller_test.exs
+++ b/test/pleroma/web/mastodon_api/controllers/scheduled_activity_controller_test.exs
@@ -3,6 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.MastodonAPI.ScheduledActivityControllerTest do
+ use Oban.Testing, repo: Pleroma.Repo
use Pleroma.Web.ConnCase, async: true
alias Pleroma.Repo
@@ -78,7 +79,7 @@ test "updates a scheduled activity" do
}
)
- job = Repo.one(from(j in Oban.Job, where: j.queue == "scheduled_activities"))
+ job = Repo.one(from(j in Oban.Job, where: j.queue == "federator_outgoing"))
assert job.args == %{"activity_id" => scheduled_activity.id}
assert DateTime.truncate(job.scheduled_at, :second) == to_datetime(scheduled_at)
@@ -124,9 +125,11 @@ test "deletes a scheduled activity" do
}
)
- job = Repo.one(from(j in Oban.Job, where: j.queue == "scheduled_activities"))
-
- assert job.args == %{"activity_id" => scheduled_activity.id}
+ assert_enqueued(
+ worker: Pleroma.Workers.ScheduledActivityWorker,
+ args: %{"activity_id" => scheduled_activity.id},
+ queue: :federator_outgoing
+ )
res_conn =
conn
@@ -135,7 +138,11 @@ test "deletes a scheduled activity" do
assert %{} = json_response_and_validate_schema(res_conn, 200)
refute Repo.get(ScheduledActivity, scheduled_activity.id)
- refute Repo.get(Oban.Job, job.id)
+
+ refute_enqueued(
+ worker: Pleroma.Workers.ScheduledActivityWorker,
+ args: %{"activity_id" => scheduled_activity.id}
+ )
res_conn =
conn
diff --git a/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs
index 3d5e39866a..a456c5c1da 100644
--- a/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs
+++ b/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs
@@ -83,8 +83,13 @@ test "posting a status", %{conn: conn} do
"sensitive" => "0"
})
- assert %{"content" => "cofe", "id" => id, "spoiler_text" => "2hu", "sensitive" => false} =
- json_response_and_validate_schema(conn_one, 200)
+ assert %{
+ "content" => "cofe",
+ "id" => id,
+ "spoiler_text" => "2hu",
+ "sensitive" => false,
+ "language" => nil
+ } = json_response_and_validate_schema(conn_one, 200)
assert Activity.get_by_id(id)
@@ -139,6 +144,200 @@ test "posting a status", %{conn: conn} do
)
end
+ test "posting a single lang status ", %{conn: conn} do
+ idempotency_key = "Pikachu rocks!"
+
+ conn_one =
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> put_req_header("idempotency-key", idempotency_key)
+ |> post("/api/v1/statuses", %{
+ "status" => "mew mew",
+ "spoiler_text" => "mew",
+ "sensitive" => "0",
+ "language" => "a"
+ })
+
+ assert %{
+ "content" => "mew mew",
+ "content_map" => %{"a" => "mew mew"},
+ "id" => id,
+ "spoiler_text" => "mew",
+ "spoiler_text_map" => %{"a" => "mew"},
+ "sensitive" => false,
+ "language" => "a"
+ } = json_response_and_validate_schema(conn_one, 200)
+
+ assert Activity.get_by_id(id)
+ end
+
+ test "posting a single lang status, bad language code", %{conn: conn} do
+ idempotency_key = "Pikachu rocks!"
+
+ conn_one =
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> put_req_header("idempotency-key", idempotency_key)
+ |> post("/api/v1/statuses", %{
+ "status" => "mew mew",
+ "spoiler_text" => "mew",
+ "sensitive" => "0",
+ "language" => "a_"
+ })
+
+ assert %{
+ "error" => _
+ } = json_response_and_validate_schema(conn_one, 422)
+ end
+
+ test "posting a multilang status", %{conn: conn} do
+ idempotency_key = "Pikachu rocks!"
+
+ conn_one =
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> put_req_header("idempotency-key", idempotency_key)
+ |> post("/api/v1/statuses", %{
+ "status_map" => %{"a" => "mew mew", "b" => "lol lol"},
+ "spoiler_text_map" => %{"a" => "mew", "b" => "lol"},
+ "sensitive" => "0"
+ })
+
+ assert %{
+ "content" => _content,
+ "content_map" => %{"a" => "mew mew", "b" => "lol lol"},
+ "id" => id,
+ "spoiler_text" => _spoiler_text,
+ "spoiler_text_map" => %{"a" => "mew", "b" => "lol"},
+ "sensitive" => false
+ } = json_response_and_validate_schema(conn_one, 200)
+
+ assert Activity.get_by_id(id)
+ end
+
+ test "posting a multilang status, invalid language code in status_map", %{conn: conn} do
+ idempotency_key = "Pikachu rocks!"
+
+ conn_one =
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> put_req_header("idempotency-key", idempotency_key)
+ |> post("/api/v1/statuses", %{
+ "status_map" => %{"a" => "mew mew", "b_" => "lol lol"},
+ "spoiler_text_map" => %{"a" => "mew", "b" => "lol"},
+ "sensitive" => "0"
+ })
+
+ assert %{
+ "error" => _
+ } = json_response_and_validate_schema(conn_one, 422)
+ end
+
+ test "posting a multilang status, empty status_map", %{conn: conn} do
+ idempotency_key = "Pikachu rocks!"
+
+ conn_one =
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> put_req_header("idempotency-key", idempotency_key)
+ |> post("/api/v1/statuses", %{
+ "status_map" => %{},
+ "spoiler_text_map" => %{"a" => "mew", "b" => "lol"},
+ "sensitive" => "0"
+ })
+
+ assert %{
+ "error" => _
+ } = json_response_and_validate_schema(conn_one, 422)
+ end
+
+ test "posting a multilang status, invalid language code in spoiler_text_map", %{conn: conn} do
+ idempotency_key = "Pikachu rocks!"
+
+ conn_one =
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> put_req_header("idempotency-key", idempotency_key)
+ |> post("/api/v1/statuses", %{
+ "status_map" => %{"a" => "mew mew", "b" => "lol lol"},
+ "spoiler_text_map" => %{"a" => "mew", "b_" => "lol"},
+ "sensitive" => "0"
+ })
+
+ assert %{
+ "error" => _
+ } = json_response_and_validate_schema(conn_one, 422)
+ end
+
+ test "posting a multilang status, empty spoiler_text_map", %{conn: conn} do
+ idempotency_key = "Pikachu rocks!"
+
+ conn_one =
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> put_req_header("idempotency-key", idempotency_key)
+ |> post("/api/v1/statuses", %{
+ "status_map" => %{"a" => "mew mew", "b" => "lol lol"},
+ "spoiler_text_map" => %{},
+ "sensitive" => "0"
+ })
+
+ assert %{
+ "error" => _
+ } = json_response_and_validate_schema(conn_one, 422)
+ end
+
+ test "posting a multilang status with singlelang summary", %{conn: conn} do
+ idempotency_key = "Pikachu rocks!"
+
+ conn_one =
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> put_req_header("idempotency-key", idempotency_key)
+ |> post("/api/v1/statuses", %{
+ "status_map" => %{"a" => "mew mew", "b" => "lol lol"},
+ "spoiler_text" => "mewlol",
+ "sensitive" => "0"
+ })
+
+ assert %{
+ "content" => _content,
+ "content_map" => %{"a" => "mew mew", "b" => "lol lol"},
+ "id" => id,
+ "spoiler_text" => "mewlol",
+ "spoiler_text_map" => %{},
+ "sensitive" => false
+ } = json_response_and_validate_schema(conn_one, 200)
+
+ assert Activity.get_by_id(id)
+ end
+
+ test "posting a multilang summary with singlelang status", %{conn: conn} do
+ idempotency_key = "Pikachu rocks!"
+
+ conn_one =
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> put_req_header("idempotency-key", idempotency_key)
+ |> post("/api/v1/statuses", %{
+ "spoiler_text_map" => %{"a" => "mew mew", "b" => "lol lol"},
+ "language" => "a",
+ "status" => "mewlol",
+ "sensitive" => "0"
+ })
+
+ assert %{
+ "content" => "mewlol",
+ "content_map" => %{},
+ "id" => id,
+ "spoiler_text" => _,
+ "spoiler_text_map" => %{"a" => "mew mew", "b" => "lol lol"},
+ "sensitive" => false
+ } = json_response_and_validate_schema(conn_one, 200)
+
+ assert Activity.get_by_id(id)
+ end
+
test "posting a quote post", %{conn: conn} do
user = insert(:user)
@@ -235,6 +434,16 @@ test "replying to a status", %{user: user, conn: conn} do
assert Activity.get_in_reply_to_activity(activity).id == replied_to.id
end
+ test "replying to a deleted status", %{user: user, conn: conn} do
+ {:ok, status} = CommonAPI.post(user, %{status: "cofe"})
+ {:ok, _deleted_status} = CommonAPI.delete(status.id, user)
+
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> post("/api/v1/statuses", %{"status" => "xD", "in_reply_to_id" => status.id})
+ |> json_response_and_validate_schema(422)
+ end
+
test "replying to a direct message with visibility other than direct", %{
user: user,
conn: conn
@@ -329,62 +538,6 @@ test "posting a fake status", %{conn: conn} do
assert real_status == fake_status
end
- test "fake statuses' preview card is not cached", %{conn: conn} do
- Pleroma.StaticStubbedConfigMock
- |> stub(:get, fn
- [:rich_media, :enabled] -> true
- path -> Pleroma.Test.StaticConfig.get(path)
- end)
-
- Tesla.Mock.mock_global(fn
- env ->
- apply(HttpRequestMock, :request, [env])
- end)
-
- conn1 =
- conn
- |> put_req_header("content-type", "application/json")
- |> post("/api/v1/statuses", %{
- "status" => "https://example.com/ogp",
- "preview" => true
- })
-
- conn2 =
- conn
- |> put_req_header("content-type", "application/json")
- |> post("/api/v1/statuses", %{
- "status" => "https://example.com/twitter-card",
- "preview" => true
- })
-
- assert %{"card" => %{"title" => "The Rock"}} = json_response_and_validate_schema(conn1, 200)
-
- assert %{"card" => %{"title" => "Small Island Developing States Photo Submission"}} =
- json_response_and_validate_schema(conn2, 200)
- end
-
- test "posting a status with OGP link preview", %{conn: conn} do
- Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
-
- Pleroma.StaticStubbedConfigMock
- |> stub(:get, fn
- [:rich_media, :enabled] -> true
- path -> Pleroma.Test.StaticConfig.get(path)
- end)
-
- conn =
- conn
- |> put_req_header("content-type", "application/json")
- |> post("/api/v1/statuses", %{
- "status" => "https://example.com/ogp"
- })
-
- assert %{"id" => id, "card" => %{"title" => "The Rock"}} =
- json_response_and_validate_schema(conn, 200)
-
- assert Activity.get_by_id(id)
- end
-
test "posting a direct status", %{conn: conn} do
user2 = insert(:user)
content = "direct cofe @#{user2.nickname}"
@@ -657,6 +810,122 @@ test "posting a poll", %{conn: conn} do
assert question.data["closed"] =~ "Z"
end
+ test "posting a single-language poll", %{conn: conn} do
+ conn =
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> post("/api/v1/statuses", %{
+ "status" => "Who is the #bestgrill?",
+ "poll" => %{
+ "options" => ["Rei", "Asuka", "Misato"],
+ "expires_in" => 420
+ },
+ "language" => "a"
+ })
+
+ response = json_response_and_validate_schema(conn, 200)
+
+ assert Enum.all?(response["poll"]["options"], fn %{"title_map" => title} ->
+ title in [
+ %{"a" => "Rei"},
+ %{"a" => "Asuka"},
+ %{"a" => "Misato"}
+ ]
+ end)
+ end
+
+ test "posting a single-language poll, invalid language code", %{conn: conn} do
+ conn =
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> post("/api/v1/statuses", %{
+ "status" => "Who is the #bestgrill?",
+ "poll" => %{
+ "options" => ["Rei", "Asuka", "Misato"],
+ "expires_in" => 420
+ },
+ "language" => "a_"
+ })
+
+ json_response_and_validate_schema(conn, 422)
+ end
+
+ test "posting a multilang poll", %{conn: conn} do
+ time = NaiveDateTime.utc_now()
+
+ conn =
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> post("/api/v1/statuses", %{
+ "status" => "Who is the #bestgrill?",
+ "poll" => %{
+ "options_map" => [
+ %{"a" => "Rei", "b" => "1"},
+ %{"a" => "Asuka", "b" => "2"},
+ %{"a" => "Misato", "b" => "3"}
+ ],
+ "expires_in" => 420
+ },
+ "language" => "a"
+ })
+
+ response = json_response_and_validate_schema(conn, 200)
+
+ assert Enum.all?(response["poll"]["options"], fn %{"title_map" => title} ->
+ title in [
+ %{"a" => "Rei", "b" => "1"},
+ %{"a" => "Asuka", "b" => "2"},
+ %{"a" => "Misato", "b" => "3"}
+ ]
+ end)
+
+ assert NaiveDateTime.diff(NaiveDateTime.from_iso8601!(response["poll"]["expires_at"]), time) in 420..430
+ assert response["poll"]["expired"] == false
+
+ question = Object.get_by_id(response["poll"]["id"])
+
+ # closed contains utc timezone
+ assert question.data["closed"] =~ "Z"
+ end
+
+ test "posting a multilang poll, invalid lang code", %{conn: conn} do
+ conn =
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> post("/api/v1/statuses", %{
+ "status" => "Who is the #bestgrill?",
+ "poll" => %{
+ "options_map" => [
+ %{"a" => "Rei", "b" => "1"},
+ %{"a" => "Asuka", "b_" => "2"},
+ %{"a" => "Misato", "b" => "3"}
+ ],
+ "expires_in" => 420
+ }
+ })
+
+ assert %{"error" => _} = json_response_and_validate_schema(conn, 422)
+ end
+
+ test "posting a multilang poll, empty map", %{conn: conn} do
+ conn =
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> post("/api/v1/statuses", %{
+ "status" => "Who is the #bestgrill?",
+ "poll" => %{
+ "options_map" => [
+ %{"a" => "Rei", "b" => "1"},
+ %{},
+ %{"a" => "Misato", "b" => "3"}
+ ],
+ "expires_in" => 420
+ }
+ })
+
+ assert %{"error" => _} = json_response_and_validate_schema(conn, 422)
+ end
+
test "option limit is enforced", %{conn: conn} do
limit = Config.get([:instance, :poll_limits, :max_options])
@@ -1699,88 +1968,6 @@ test "on pin removes deletion job, on unpin reschedule deletion" do
end
end
- describe "cards" do
- setup do
- Pleroma.StaticStubbedConfigMock
- |> stub(:get, fn
- [:rich_media, :enabled] -> true
- path -> Pleroma.Test.StaticConfig.get(path)
- end)
-
- oauth_access(["read:statuses"])
- end
-
- test "returns rich-media card", %{conn: conn, user: user} do
- Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
-
- {:ok, activity} = CommonAPI.post(user, %{status: "https://example.com/ogp"})
-
- card_data = %{
- "image" => "http://ia.media-imdb.com/images/rock.jpg",
- "provider_name" => "example.com",
- "provider_url" => "https://example.com",
- "title" => "The Rock",
- "type" => "link",
- "url" => "https://example.com/ogp",
- "description" =>
- "Directed by Michael Bay. With Sean Connery, Nicolas Cage, Ed Harris, John Spencer.",
- "author_name" => "",
- "author_url" => "",
- "blurhash" => nil,
- "embed_url" => "",
- "height" => 0,
- "html" => "",
- "width" => 0
- }
-
- response =
- conn
- |> get("/api/v1/statuses/#{activity.id}/card")
- |> json_response_and_validate_schema(200)
-
- assert response == card_data
-
- # works with private posts
- {:ok, activity} =
- CommonAPI.post(user, %{status: "https://example.com/ogp", visibility: "direct"})
-
- response_two =
- conn
- |> get("/api/v1/statuses/#{activity.id}/card")
- |> json_response_and_validate_schema(200)
-
- assert response_two == card_data
- end
-
- test "replaces missing description with an empty string", %{conn: conn, user: user} do
- Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
-
- {:ok, activity} = CommonAPI.post(user, %{status: "https://example.com/ogp-missing-data"})
-
- response =
- conn
- |> get("/api/v1/statuses/#{activity.id}/card")
- |> json_response_and_validate_schema(:ok)
-
- assert response == %{
- "type" => "link",
- "title" => "Pleroma",
- "description" => "",
- "image" => nil,
- "provider_name" => "example.com",
- "provider_url" => "https://example.com",
- "url" => "https://example.com/ogp-missing-data",
- "author_name" => "",
- "author_url" => "",
- "blurhash" => nil,
- "embed_url" => "",
- "height" => 0,
- "html" => "",
- "width" => 0
- }
- end
- end
-
test "bookmarks" do
bookmarks_uri = "/api/v1/bookmarks"
diff --git a/test/pleroma/web/mastodon_api/views/notification_view_test.exs b/test/pleroma/web/mastodon_api/views/notification_view_test.exs
index 73b2ab2364..0c14a3edff 100644
--- a/test/pleroma/web/mastodon_api/views/notification_view_test.exs
+++ b/test/pleroma/web/mastodon_api/views/notification_view_test.exs
@@ -332,4 +332,31 @@ test "muted notification" do
test_notifications_rendering([notification], user, [expected])
end
+
+ test "Subscribed status notification" do
+ user = insert(:user)
+ subscriber = insert(:user)
+
+ User.subscribe(subscriber, user)
+
+ {:ok, activity} = CommonAPI.post(user, %{status: "hi"})
+ {:ok, [notification]} = Notification.create_notifications(activity)
+
+ user = User.get_cached_by_id(user.id)
+
+ expected = %{
+ id: to_string(notification.id),
+ pleroma: %{is_seen: false, is_muted: false},
+ type: "status",
+ account:
+ AccountView.render("show.json", %{
+ user: user,
+ for: subscriber
+ }),
+ status: StatusView.render("show.json", %{activity: activity, for: subscriber}),
+ created_at: Utils.to_masto_date(notification.inserted_at)
+ }
+
+ test_notifications_rendering([notification], subscriber, [expected])
+ end
end
diff --git a/test/pleroma/web/mastodon_api/views/poll_view_test.exs b/test/pleroma/web/mastodon_api/views/poll_view_test.exs
index 4c0e2ed41a..2bde8f3e4d 100644
--- a/test/pleroma/web/mastodon_api/views/poll_view_test.exs
+++ b/test/pleroma/web/mastodon_api/views/poll_view_test.exs
@@ -37,10 +37,10 @@ test "renders a poll" do
id: to_string(object.id),
multiple: false,
options: [
- %{title: "absolutely!", votes_count: 0},
- %{title: "sure", votes_count: 0},
- %{title: "yes", votes_count: 0},
- %{title: "why are you even asking?", votes_count: 0}
+ %{title: "absolutely!", title_map: %{}, votes_count: 0},
+ %{title: "sure", title_map: %{}, votes_count: 0},
+ %{title: "yes", title_map: %{}, votes_count: 0},
+ %{title: "why are you even asking?", title_map: %{}, votes_count: 0}
],
votes_count: 0,
voters_count: 0,
@@ -167,6 +167,30 @@ test "doesn't strips HTML tags" do
} = PollView.render("show.json", %{object: object})
end
+ describe "multilang" do
+ test "renders multilang" do
+ object = %Object{
+ id: 123,
+ data: %{
+ "oneOf" => [
+ %{
+ "name" => "mew",
+ "nameMap" => %{"en" => "mew", "cmn" => "喵"}
+ },
+ %{"name" => "mew mew", "nameMap" => %{"en" => "mew mew", "cmn" => "喵喵"}}
+ ]
+ }
+ }
+
+ assert %{
+ options: [
+ %{title: "mew", title_map: %{"en" => "mew", "cmn" => "喵"}},
+ %{title: "mew mew", title_map: %{"en" => "mew mew", "cmn" => "喵喵"}}
+ ]
+ } = PollView.render("show.json", %{object: object})
+ end
+ end
+
test "displays correct voters count" do
object = Object.normalize("https://friends.grishka.me/posts/54642", fetch: true)
result = PollView.render("show.json", %{object: object})
@@ -181,7 +205,7 @@ test "displays correct voters count basing on voters array" do
assert result[:voters_count] == 4
end
- test "detects that poll is non anonymous" do
+ test "that poll is non anonymous" do
object = Object.normalize("https://friends.grishka.me/posts/54642", fetch: true)
result = PollView.render("show.json", %{object: object})
diff --git a/test/pleroma/web/mastodon_api/views/status_view_test.exs b/test/pleroma/web/mastodon_api/views/status_view_test.exs
index 5245240373..2ef1a3c00b 100644
--- a/test/pleroma/web/mastodon_api/views/status_view_test.exs
+++ b/test/pleroma/web/mastodon_api/views/status_view_test.exs
@@ -17,7 +17,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusViewTest do
alias Pleroma.Web.CommonAPI
alias Pleroma.Web.MastodonAPI.AccountView
alias Pleroma.Web.MastodonAPI.StatusView
- alias Pleroma.Web.RichMedia.Parser.Embed
+ alias Pleroma.Web.RichMedia.Card
require Bitwise
@@ -292,7 +292,9 @@ test "a note activity" do
card: nil,
reblog: nil,
content: HTML.filter_tags(object_data["content"]),
+ content_map: %{},
text: nil,
+ text_map: nil,
created_at: created_at,
edited_at: nil,
reblogs_count: 0,
@@ -306,6 +308,7 @@ test "a note activity" do
sensitive: false,
poll: nil,
spoiler_text: HTML.filter_tags(object_data["summary"]),
+ spoiler_text_map: %{},
visibility: "public",
media_attachments: [],
mentions: [],
@@ -335,7 +338,9 @@ test "a note activity" do
quote_url: nil,
quote_visible: false,
content: %{"text/plain" => HTML.strip_tags(object_data["content"])},
+ content_map: %{"text/plain" => %{}},
spoiler_text: %{"text/plain" => HTML.strip_tags(object_data["summary"])},
+ spoiler_text_map: %{"text/plain" => %{}},
expires_at: nil,
direct_conversation_id: nil,
thread_muted: false,
@@ -353,6 +358,70 @@ test "a note activity" do
assert_schema(status, "Status", Pleroma.Web.ApiSpec.spec())
end
+ test "a note activity with multiple languages" do
+ user = insert(:user)
+
+ note_obj =
+ insert(:note,
+ data: %{
+ "content" => "mew mew",
+ "contentMap" => %{"en" => "mew mew", "cmn" => "喵喵"},
+ "summary" => "mew",
+ "summaryMap" => %{"en" => "mew", "cmn" => "喵"}
+ }
+ )
+
+ note = insert(:note_activity, note: note_obj, user: user)
+
+ status = StatusView.render("show.json", %{activity: note})
+
+ assert %{
+ content: "mew mew",
+ content_map: %{"en" => "mew mew", "cmn" => "喵喵"},
+ spoiler_text: "mew",
+ spoiler_text_map: %{"en" => "mew", "cmn" => "喵"},
+ pleroma: %{
+ content: %{"text/plain" => "mew mew"},
+ content_map: %{"text/plain" => %{"en" => "mew mew", "cmn" => "喵喵"}},
+ spoiler_text: %{"text/plain" => "mew"},
+ spoiler_text_map: %{"text/plain" => %{"en" => "mew", "cmn" => "喵"}}
+ }
+ } = status
+ end
+
+ test "a note activity with single language" do
+ user = insert(:user)
+
+ note_obj =
+ insert(:note,
+ data: %{
+ "content" => "mew mew",
+ "contentMap" => %{"en" => "mew mew"},
+ "summary" => "mew",
+ "summaryMap" => %{"en" => "mew"},
+ "language" => "en"
+ }
+ )
+
+ note = insert(:note_activity, note: note_obj, user: user)
+
+ status = StatusView.render("show.json", %{activity: note})
+
+ assert %{
+ content: "mew mew",
+ content_map: %{"en" => "mew mew"},
+ spoiler_text: "mew",
+ spoiler_text_map: %{"en" => "mew"},
+ language: "en",
+ pleroma: %{
+ content: %{"text/plain" => "mew mew"},
+ content_map: %{"text/plain" => %{"en" => "mew mew"}},
+ spoiler_text: %{"text/plain" => "mew"},
+ spoiler_text_map: %{"text/plain" => %{"en" => "mew"}}
+ }
+ } = status
+ end
+
test "tells if the message is muted for some reason" do
user = insert(:user)
other_user = insert(:user)
@@ -448,7 +517,7 @@ test "a quote post" do
assert status.pleroma.quote_url == Object.normalize(quote_post).data["id"]
assert status.pleroma.quote_visible
- # Quotes don't go more than one level deep\
+ # Quotes don't go more than one level deep
refute status.pleroma.quote.pleroma.quote
assert status.pleroma.quote.pleroma.quote_id == to_string(post.id)
assert status.pleroma.quote.pleroma.quote_url == Object.normalize(post).data["id"]
@@ -593,7 +662,83 @@ test "create mentions from the 'tag' field" do
assert mention.url == recipient.ap_id
end
- test "attachments" do
+ describe "attachments" do
+ test "Complete Mastodon style" do
+ object = %{
+ "type" => "Image",
+ "url" => [
+ %{
+ "mediaType" => "image/png",
+ "href" => "someurl",
+ "width" => 200,
+ "height" => 100
+ }
+ ],
+ "blurhash" => "UJJ8X[xYW,%Jtq%NNFbXB5j]IVM|9GV=WHRn",
+ "uuid" => 6
+ }
+
+ expected = %{
+ id: "1638338801",
+ type: "image",
+ url: "someurl",
+ remote_url: "someurl",
+ preview_url: "someurl",
+ text_url: "someurl",
+ description: nil,
+ description_map: %{},
+ pleroma: %{mime_type: "image/png"},
+ meta: %{original: %{width: 200, height: 100, aspect: 2}},
+ blurhash: "UJJ8X[xYW,%Jtq%NNFbXB5j]IVM|9GV=WHRn"
+ }
+
+ api_spec = Pleroma.Web.ApiSpec.spec()
+
+ assert expected == StatusView.render("attachment.json", %{attachment: object})
+ assert_schema(expected, "Attachment", api_spec)
+
+ # If theres a "id", use that instead of the generated one
+ object = Map.put(object, "id", 2)
+ result = StatusView.render("attachment.json", %{attachment: object})
+
+ assert %{id: "2"} = result
+ assert_schema(result, "Attachment", api_spec)
+ end
+
+ test "Honkerific" do
+ object = %{
+ "type" => "Image",
+ "url" => [
+ %{
+ "mediaType" => "image/png",
+ "href" => "someurl"
+ }
+ ],
+ "name" => "fool.jpeg",
+ "summary" => "they have played us for absolute fools."
+ }
+
+ expected = %{
+ blurhash: nil,
+ description: "they have played us for absolute fools.",
+ description_map: %{},
+ id: "1638338801",
+ pleroma: %{mime_type: "image/png", name: "fool.jpeg"},
+ preview_url: "someurl",
+ remote_url: "someurl",
+ text_url: "someurl",
+ type: "image",
+ url: "someurl"
+ }
+
+ api_spec = Pleroma.Web.ApiSpec.spec()
+
+ assert expected == StatusView.render("attachment.json", %{attachment: object})
+ assert_schema(expected, "Attachment", api_spec)
+ end
+ end
+
+ test "attachments with multilang" do
object = %{
"type" => "Image",
"url" => [
@@ -604,34 +749,14 @@ test "attachments" do
"height" => 100
}
],
+ "name" => "mew mew",
+ "nameMap" => %{"en" => "mew mew", "cmn" => "喵喵"},
"blurhash" => "UJJ8X[xYW,%Jtq%NNFbXB5j]IVM|9GV=WHRn",
"uuid" => 6
}
- expected = %{
- id: "1638338801",
- type: "image",
- url: "someurl",
- remote_url: "someurl",
- preview_url: "someurl",
- text_url: "someurl",
- description: nil,
- pleroma: %{mime_type: "image/png"},
- meta: %{original: %{width: 200, height: 100, aspect: 2}},
- blurhash: "UJJ8X[xYW,%Jtq%NNFbXB5j]IVM|9GV=WHRn"
- }
-
- api_spec = Pleroma.Web.ApiSpec.spec()
-
- assert expected == StatusView.render("attachment.json", %{attachment: object})
- assert_schema(expected, "Attachment", api_spec)
-
- # If theres a "id", use that instead of the generated one
- object = Map.put(object, "id", 2)
- result = StatusView.render("attachment.json", %{attachment: object})
-
- assert %{id: "2"} = result
- assert_schema(result, "Attachment", api_spec)
+ assert %{description: "mew mew", description_map: %{"en" => "mew mew", "cmn" => "喵喵"}} =
+ StatusView.render("attachment.json", %{attachment: object})
end
test "put the url advertised in the Activity in to the url attribute" do
@@ -756,45 +881,72 @@ test "it returns a a dictionary tags" do
describe "rich media cards" do
test "a rich media card without a site name renders correctly" do
- embed = %Embed{
- url: "http://example.com",
- title: "Example website",
- meta: %{"twitter:image" => "http://example.com/example.jpg"}
- }
+ page_url = "https://example.com"
- %{"provider_name" => "example.com"} = StatusView.render("card.json", %{embed: embed})
+ {:ok, card} =
+ Card.create(page_url, %{image: page_url <> "/example.jpg", title: "Example website"})
+
+ assert match?(%{provider_name: "example.com"}, StatusView.render("card.json", card))
end
test "a rich media card without a site name or image renders correctly" do
- embed = %Embed{
- url: "http://example.com",
- title: "Example website"
+ page_url = "https://example.com"
+
+ fields = %{
+ "url" => page_url,
+ "title" => "Example website"
}
- %{"provider_name" => "example.com"} = StatusView.render("card.json", %{embed: embed})
+ {:ok, card} = Card.create(page_url, fields)
+
+ assert match?(%{provider_name: "example.com"}, StatusView.render("card.json", card))
end
test "a rich media card without an image renders correctly" do
- embed = %Embed{
- url: "http://example.com",
- title: "Example website",
- meta: %{"twitter:title" => "Example site name"}
+ page_url = "https://example.com"
+
+ fields = %{
+ "url" => page_url,
+ "site_name" => "Example site name",
+ "title" => "Example website"
}
- %{"provider_name" => "example.com"} = StatusView.render("card.json", %{embed: embed})
+ {:ok, card} = Card.create(page_url, fields)
+
+ assert match?(%{provider_name: "example.com"}, StatusView.render("card.json", card))
+ end
+
+ test "a rich media card without descriptions returns the fields with empty strings" do
+ page_url = "https://example.com"
+
+ fields = %{
+ "url" => page_url,
+ "site_name" => "Example site name",
+ "title" => "Example website"
+ }
+
+ {:ok, card} = Card.create(page_url, fields)
+
+ assert match?(
+ %{description: "", image_description: ""},
+ StatusView.render("card.json", card)
+ )
end
test "a rich media card with all relevant data renders correctly" do
- embed = %Embed{
- url: "http://example.com",
- title: "Example website",
- meta: %{
- "twitter:title" => "Example site name",
- "twitter:image" => "http://example.com/example.jpg"
- }
+ page_url = "https://example.com"
+
+ fields = %{
+ "url" => page_url,
+ "site_name" => "Example site name",
+ "title" => "Example website",
+ "image" => page_url <> "/example.jpg",
+ "description" => "Example description"
}
- %{"provider_name" => "example.com"} = StatusView.render("card.json", %{embed: embed})
+ {:ok, card} = Card.create(page_url, fields)
+
+ assert match?(%{provider_name: "example.com"}, StatusView.render("card.json", card))
end
test "a rich media card has all media proxied" do
@@ -804,25 +956,25 @@ test "a rich media card has all media proxied" do
ConfigMock
|> stub_with(Pleroma.Test.StaticConfig)
- page_url = "http://example.com"
+ page_url = "https://example.com"
- card = %{
- url: page_url,
- site_name: "Example site name",
- title: "Example website",
- image: page_url <> "/example.jpg",
- audio: page_url <> "/example.ogg",
- video: page_url <> "/example.mp4",
- description: "Example description"
+ fields = %{
+ "url" => page_url,
+ "site_name" => "Example site name",
+ "title" => "Example website",
+ "image" => page_url <> "/example.jpg",
+ "audio" => page_url <> "/example.ogg",
+ "video" => page_url <> "/example.mp4",
+ "description" => "Example description"
}
- strcard = for {k, v} <- card, into: %{}, do: {to_string(k), v}
+ {:ok, card} = Card.create(page_url, fields)
%{
provider_name: "example.com",
image: image,
pleroma: %{opengraph: og}
- } = StatusView.render("card.json", %{page_url: page_url, rich_media: strcard})
+ } = StatusView.render("card.json", card)
assert String.match?(image, ~r/\/proxy\//)
assert String.match?(og["image"], ~r/\/proxy\//)
@@ -962,4 +1114,70 @@ test "with a source string, renders source and put text/plain as the content typ
assert status.content_type == "text/plain"
end
end
+
+ describe "history items" do
+ test "renders multilang" do
+ user = insert(:user)
+
+ note_obj =
+ insert(:note,
+ data: %{
+ "content" => "mew mew",
+ "contentMap" => %{"en" => "mew mew", "cmn" => "喵喵"},
+ "summary" => "mew",
+ "summaryMap" => %{"en" => "mew", "cmn" => "喵"}
+ }
+ )
+
+ note = insert(:note_activity, note: note_obj, user: user)
+
+ status =
+ StatusView.render("history_item.json", %{
+ activity: note,
+ user: user,
+ hashtags: [],
+ item: %{object: note_obj, chrono_order: 0}
+ })
+
+ assert %{
+ content: "mew mew",
+ content_map: %{"en" => "mew mew", "cmn" => "喵喵"},
+ spoiler_text: "mew",
+ spoiler_text_map: %{"en" => "mew", "cmn" => "喵"}
+ } = status
+ end
+ end
+
+ describe "source" do
+ test "renders multilang" do
+ user = insert(:user)
+
+ note_obj =
+ insert(:note,
+ data: %{
+ "source" => %{
+ "content" => "mew mew",
+ "contentMap" => %{"en" => "mew mew", "cmn" => "喵喵"},
+ "mediaType" => "text/plain"
+ },
+ "summary" => "mew",
+ "summaryMap" => %{"en" => "mew", "cmn" => "喵"}
+ }
+ )
+
+ note = insert(:note_activity, note: note_obj, user: user)
+
+ status =
+ StatusView.render("source.json", %{
+ activity: note
+ })
+
+ assert %{
+ text: "mew mew",
+ text_map: %{"en" => "mew mew", "cmn" => "喵喵"},
+ spoiler_text: "mew",
+ spoiler_text_map: %{"en" => "mew", "cmn" => "喵"}
+ } = status
+ end
+ end
end
diff --git a/test/pleroma/web/pleroma_api/controllers/notification_controller_test.exs b/test/pleroma/web/pleroma_api/controllers/notification_controller_test.exs
index 2e2935d8e6..9ccae2f212 100644
--- a/test/pleroma/web/pleroma_api/controllers/notification_controller_test.exs
+++ b/test/pleroma/web/pleroma_api/controllers/notification_controller_test.exs
@@ -26,13 +26,11 @@ test "it marks a single notification as read", %{user: user1, conn: conn} do
{:ok, [notification1]} = Notification.create_notifications(activity1)
{:ok, [notification2]} = Notification.create_notifications(activity2)
- response =
- conn
- |> put_req_header("content-type", "application/json")
- |> post("/api/v1/pleroma/notifications/read", %{id: notification1.id})
- |> json_response_and_validate_schema(:ok)
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> post("/api/v1/pleroma/notifications/read", %{id: notification1.id})
+ |> json_response_and_validate_schema(:ok)
- assert %{"pleroma" => %{"is_seen" => true}} = response
assert Repo.get(Notification, notification1.id).seen
refute Repo.get(Notification, notification2.id).seen
end
@@ -46,14 +44,17 @@ test "it marks multiple notifications as read", %{user: user1, conn: conn} do
[notification3, notification2, notification1] = Notification.for_user(user1, %{limit: 3})
- [response1, response2] =
- conn
- |> put_req_header("content-type", "application/json")
- |> post("/api/v1/pleroma/notifications/read", %{max_id: notification2.id})
- |> json_response_and_validate_schema(:ok)
+ refute Repo.get(Notification, notification1.id).seen
+ refute Repo.get(Notification, notification2.id).seen
+ refute Repo.get(Notification, notification3.id).seen
+
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> post("/api/v1/pleroma/notifications/read", %{max_id: notification2.id})
+ |> json_response_and_validate_schema(:ok)
+
+ [notification3, notification2, notification1] = Notification.for_user(user1, %{limit: 3})
- assert %{"pleroma" => %{"is_seen" => true}} = response1
- assert %{"pleroma" => %{"is_seen" => true}} = response2
assert Repo.get(Notification, notification1.id).seen
assert Repo.get(Notification, notification2.id).seen
refute Repo.get(Notification, notification3.id).seen
diff --git a/test/pleroma/web/pleroma_api/views/chat_message_reference_view_test.exs b/test/pleroma/web/pleroma_api/views/chat_message_reference_view_test.exs
index 6452ab1390..f17add774a 100644
--- a/test/pleroma/web/pleroma_api/views/chat_message_reference_view_test.exs
+++ b/test/pleroma/web/pleroma_api/views/chat_message_reference_view_test.exs
@@ -3,38 +3,29 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.PleromaAPI.ChatMessageReferenceViewTest do
- use Pleroma.DataCase, async: false
+ alias Pleroma.NullCache
+ use Pleroma.DataCase, async: true
alias Pleroma.Chat
alias Pleroma.Chat.MessageReference
alias Pleroma.Object
- alias Pleroma.StaticStubbedConfigMock, as: ConfigMock
+ alias Pleroma.UnstubbedConfigMock, as: ConfigMock
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.CommonAPI
alias Pleroma.Web.PleromaAPI.Chat.MessageReferenceView
import Mox
import Pleroma.Factory
- import Tesla.Mock
- test "crawls valid, complete URLs" do
- mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
-
- ConfigMock
- |> stub(:get, fn
- [:rich_media, :enabled] -> true
- path -> Pleroma.Test.StaticConfig.get(path)
- end)
-
- Pleroma.UnstubbedConfigMock
- |> stub(:get, fn
- [:rich_media, :enabled] -> true
- path -> Pleroma.Test.StaticConfig.get(path)
- end)
+ setup do: clear_config([:rich_media, :enabled], true)
+ test "it displays a chat message" do
user = insert(:user)
recipient = insert(:user)
+ ConfigMock
+ |> stub_with(Pleroma.Test.StaticConfig)
+
file = %Plug.Upload{
content_type: "image/jpeg",
path: Path.absname("test/fixtures/image.jpg"),
@@ -52,14 +43,14 @@ test "crawls valid, complete URLs" do
cm_ref = MessageReference.for_chat_and_object(chat, object)
- {:ok, activity} =
- CommonAPI.post(user, %{
- status: "[test](https://example.com/ogp)",
- content_type: "text/markdown"
- })
+ id = cm_ref.id
- assert %{url: "https://example.com/ogp", meta: %{} = _} =
- Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
+ Pleroma.CachexMock
+ |> stub(:get, fn
+ :chat_message_id_idempotency_key_cache, ^id -> {:ok, "123"}
+ cache, key -> NullCache.get(cache, key)
+ end)
+ |> stub(:fetch, fn :rich_media_cache, _, _ -> {:ok, {:ok, %{}}} end)
chat_message = MessageReferenceView.render("show.json", chat_message_reference: cm_ref)
@@ -70,6 +61,9 @@ test "crawls valid, complete URLs" do
assert chat_message[:created_at]
assert chat_message[:unread] == false
assert match?([%{shortcode: "firefox"}], chat_message[:emojis])
+ assert chat_message[:idempotency_key] == "123"
+
+ Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
{:ok, activity} =
CommonAPI.post_chat_message(recipient, user, "gkgkgk https://example.com/ogp",
diff --git a/test/pleroma/web/plugs/http_security_plug_test.exs b/test/pleroma/web/plugs/http_security_plug_test.exs
index c79170382c..11a351a41f 100644
--- a/test/pleroma/web/plugs/http_security_plug_test.exs
+++ b/test/pleroma/web/plugs/http_security_plug_test.exs
@@ -3,14 +3,52 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.Plugs.HTTPSecurityPlugTest do
- use Pleroma.Web.ConnCase
+ use Pleroma.Web.ConnCase, async: true
alias Plug.Conn
- describe "http security enabled" do
- setup do: clear_config([:http_security, :enabled], true)
+ import Mox
- test "it sends CSP headers when enabled", %{conn: conn} do
+ setup do
+ base_config = Pleroma.Config.get([:http_security])
+ %{base_config: base_config}
+ end
+
+ defp mock_config(config, additional \\ %{}) do
+ Pleroma.StaticStubbedConfigMock
+ |> stub(:get, fn
+ [:http_security, key] -> config[key]
+ key -> additional[key]
+ end)
+ end
+
+ describe "http security enabled" do
+ setup %{base_config: base_config} do
+ %{base_config: Keyword.put(base_config, :enabled, true)}
+ end
+
+ test "it does not contain unsafe-eval", %{conn: conn, base_config: base_config} do
+ mock_config(base_config)
+
+ conn = get(conn, "/api/v1/instance")
+ [header] = Conn.get_resp_header(conn, "content-security-policy")
+ refute header =~ ~r/unsafe-eval/
+ end
+
+ test "with allow_unsafe_eval set, it does contain it", %{conn: conn, base_config: base_config} do
+ base_config =
+ base_config
+ |> Keyword.put(:allow_unsafe_eval, true)
+
+ mock_config(base_config)
+
+ conn = get(conn, "/api/v1/instance")
+ [header] = Conn.get_resp_header(conn, "content-security-policy")
+ assert header =~ ~r/unsafe-eval/
+ end
+
+ test "it sends CSP headers when enabled", %{conn: conn, base_config: base_config} do
+ mock_config(base_config)
conn = get(conn, "/api/v1/instance")
refute Conn.get_resp_header(conn, "x-xss-protection") == []
@@ -22,8 +60,10 @@ test "it sends CSP headers when enabled", %{conn: conn} do
refute Conn.get_resp_header(conn, "content-security-policy") == []
end
- test "it sends STS headers when enabled", %{conn: conn} do
- clear_config([:http_security, :sts], true)
+ test "it sends STS headers when enabled", %{conn: conn, base_config: base_config} do
+ base_config
+ |> Keyword.put(:sts, true)
+ |> mock_config()
conn = get(conn, "/api/v1/instance")
@@ -31,8 +71,10 @@ test "it sends STS headers when enabled", %{conn: conn} do
refute Conn.get_resp_header(conn, "expect-ct") == []
end
- test "it does not send STS headers when disabled", %{conn: conn} do
- clear_config([:http_security, :sts], false)
+ test "it does not send STS headers when disabled", %{conn: conn, base_config: base_config} do
+ base_config
+ |> Keyword.put(:sts, false)
+ |> mock_config()
conn = get(conn, "/api/v1/instance")
@@ -40,19 +82,30 @@ test "it does not send STS headers when disabled", %{conn: conn} do
assert Conn.get_resp_header(conn, "expect-ct") == []
end
- test "referrer-policy header reflects configured value", %{conn: conn} do
- resp = get(conn, "/api/v1/instance")
+ test "referrer-policy header reflects configured value", %{
+ conn: conn,
+ base_config: base_config
+ } do
+ mock_config(base_config)
+ resp = get(conn, "/api/v1/instance")
assert Conn.get_resp_header(resp, "referrer-policy") == ["same-origin"]
- clear_config([:http_security, :referrer_policy], "no-referrer")
+ base_config
+ |> Keyword.put(:referrer_policy, "no-referrer")
+ |> mock_config
resp = get(conn, "/api/v1/instance")
assert Conn.get_resp_header(resp, "referrer-policy") == ["no-referrer"]
end
- test "it sends `report-to` & `report-uri` CSP response headers", %{conn: conn} do
+ test "it sends `report-to` & `report-uri` CSP response headers", %{
+ conn: conn,
+ base_config: base_config
+ } do
+ mock_config(base_config)
+
conn = get(conn, "/api/v1/instance")
[csp] = Conn.get_resp_header(conn, "content-security-policy")
@@ -65,7 +118,11 @@ test "it sends `report-to` & `report-uri` CSP response headers", %{conn: conn} d
"{\"endpoints\":[{\"url\":\"https://endpoint.com\"}],\"group\":\"csp-endpoint\",\"max-age\":10886400}"
end
- test "default values for img-src and media-src with disabled media proxy", %{conn: conn} do
+ test "default values for img-src and media-src with disabled media proxy", %{
+ conn: conn,
+ base_config: base_config
+ } do
+ mock_config(base_config)
conn = get(conn, "/api/v1/instance")
[csp] = Conn.get_resp_header(conn, "content-security-policy")
@@ -73,60 +130,129 @@ test "default values for img-src and media-src with disabled media proxy", %{con
assert csp =~ "img-src 'self' data: blob: https:;"
end
- test "it sets the Service-Worker-Allowed header", %{conn: conn} do
- clear_config([:http_security, :enabled], true)
- clear_config([:frontends, :primary], %{"name" => "fedi-fe", "ref" => "develop"})
+ test "it sets the Service-Worker-Allowed header", %{conn: conn, base_config: base_config} do
+ base_config
+ |> Keyword.put(:enabled, true)
- clear_config([:frontends, :available], %{
- "fedi-fe" => %{
- "name" => "fedi-fe",
- "custom-http-headers" => [{"service-worker-allowed", "/"}]
- }
- })
+ additional_config =
+ %{}
+ |> Map.put([:frontends, :primary], %{"name" => "fedi-fe", "ref" => "develop"})
+ |> Map.put(
+ [:frontends, :available],
+ %{
+ "fedi-fe" => %{
+ "name" => "fedi-fe",
+ "custom-http-headers" => [{"service-worker-allowed", "/"}]
+ }
+ }
+ )
+ mock_config(base_config, additional_config)
conn = get(conn, "/api/v1/instance")
assert Conn.get_resp_header(conn, "service-worker-allowed") == ["/"]
end
end
describe "img-src and media-src" do
- setup do
- clear_config([:http_security, :enabled], true)
- clear_config([:media_proxy, :enabled], true)
- clear_config([:media_proxy, :proxy_opts, :redirect_on_failure], false)
+ setup %{base_config: base_config} do
+ base_config =
+ base_config
+ |> Keyword.put(:enabled, true)
+
+ additional_config =
+ %{}
+ |> Map.put([:media_proxy, :enabled], true)
+ |> Map.put([:media_proxy, :proxy_opts, :redirect_on_failure], false)
+ |> Map.put([:media_proxy, :whitelist], [])
+
+ %{base_config: base_config, additional_config: additional_config}
end
- test "media_proxy with base_url", %{conn: conn} do
+ test "media_proxy with base_url", %{
+ conn: conn,
+ base_config: base_config,
+ additional_config: additional_config
+ } do
url = "https://example.com"
- clear_config([:media_proxy, :base_url], url)
+
+ additional_config =
+ additional_config
+ |> Map.put([:media_proxy, :base_url], url)
+
+ mock_config(base_config, additional_config)
+
assert_media_img_src(conn, url)
end
- test "upload with base url", %{conn: conn} do
+ test "upload with base url", %{
+ conn: conn,
+ base_config: base_config,
+ additional_config: additional_config
+ } do
url = "https://example2.com"
- clear_config([Pleroma.Upload, :base_url], url)
+
+ additional_config =
+ additional_config
+ |> Map.put([Pleroma.Upload, :base_url], url)
+
+ mock_config(base_config, additional_config)
+
assert_media_img_src(conn, url)
end
- test "with S3 public endpoint", %{conn: conn} do
+ test "with S3 public endpoint", %{
+ conn: conn,
+ base_config: base_config,
+ additional_config: additional_config
+ } do
url = "https://example3.com"
- clear_config([Pleroma.Uploaders.S3, :public_endpoint], url)
+
+ additional_config =
+ additional_config
+ |> Map.put([Pleroma.Uploaders.S3, :public_endpoint], url)
+
+ mock_config(base_config, additional_config)
assert_media_img_src(conn, url)
end
- test "with captcha endpoint", %{conn: conn} do
- clear_config([Pleroma.Captcha.Mock, :endpoint], "https://captcha.com")
+ test "with captcha endpoint", %{
+ conn: conn,
+ base_config: base_config,
+ additional_config: additional_config
+ } do
+ additional_config =
+ additional_config
+ |> Map.put([Pleroma.Captcha.Mock, :endpoint], "https://captcha.com")
+ |> Map.put([Pleroma.Captcha, :method], Pleroma.Captcha.Mock)
+
+ mock_config(base_config, additional_config)
assert_media_img_src(conn, "https://captcha.com")
end
- test "with media_proxy whitelist", %{conn: conn} do
- clear_config([:media_proxy, :whitelist], ["https://example6.com", "https://example7.com"])
+ test "with media_proxy whitelist", %{
+ conn: conn,
+ base_config: base_config,
+ additional_config: additional_config
+ } do
+ additional_config =
+ additional_config
+ |> Map.put([:media_proxy, :whitelist], ["https://example6.com", "https://example7.com"])
+
+ mock_config(base_config, additional_config)
assert_media_img_src(conn, "https://example7.com https://example6.com")
end
# TODO: delete after removing support bare domains for media proxy whitelist
- test "with media_proxy bare domains whitelist (deprecated)", %{conn: conn} do
- clear_config([:media_proxy, :whitelist], ["example4.com", "example5.com"])
+ test "with media_proxy bare domains whitelist (deprecated)", %{
+ conn: conn,
+ base_config: base_config,
+ additional_config: additional_config
+ } do
+ additional_config =
+ additional_config
+ |> Map.put([:media_proxy, :whitelist], ["example4.com", "example5.com"])
+
+ mock_config(base_config, additional_config)
assert_media_img_src(conn, "example5.com example4.com")
end
end
@@ -138,8 +264,10 @@ defp assert_media_img_src(conn, url) do
assert csp =~ "img-src 'self' data: blob: #{url};"
end
- test "it does not send CSP headers when disabled", %{conn: conn} do
- clear_config([:http_security, :enabled], false)
+ test "it does not send CSP headers when disabled", %{conn: conn, base_config: base_config} do
+ base_config
+ |> Keyword.put(:enabled, false)
+ |> mock_config
conn = get(conn, "/api/v1/instance")
diff --git a/test/pleroma/web/plugs/http_signature_plug_test.exs b/test/pleroma/web/plugs/http_signature_plug_test.exs
index de68e8823a..9d07270bb4 100644
--- a/test/pleroma/web/plugs/http_signature_plug_test.exs
+++ b/test/pleroma/web/plugs/http_signature_plug_test.exs
@@ -3,89 +3,89 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.Plugs.HTTPSignaturePlugTest do
- use Pleroma.Web.ConnCase
+ use Pleroma.Web.ConnCase, async: true
+
+ alias Pleroma.StaticStubbedConfigMock, as: ConfigMock
+ alias Pleroma.StubbedHTTPSignaturesMock, as: HTTPSignaturesMock
alias Pleroma.Web.Plugs.HTTPSignaturePlug
- import Plug.Conn
+ import Mox
import Phoenix.Controller, only: [put_format: 2]
- import Mock
+ import Plug.Conn
- test "it call HTTPSignatures to check validity if the actor signed it" do
+ test "it calls HTTPSignatures to check validity if the actor signed it" do
params = %{"actor" => "http://mastodon.example.org/users/admin"}
conn = build_conn(:get, "/doesntmattter", params)
- with_mock HTTPSignatures,
- validate_conn: fn _ -> true end,
- signature_for_conn: fn _ ->
- %{"keyId" => "http://mastodon.example.org/users/admin#main-key"}
- end do
- conn =
- conn
- |> put_req_header(
- "signature",
- "keyId=\"http://mastodon.example.org/users/admin#main-key"
- )
- |> put_format("activity+json")
- |> HTTPSignaturePlug.call(%{})
+ HTTPSignaturesMock
+ |> expect(:validate_conn, fn _ -> true end)
- assert conn.assigns.valid_signature == true
- assert conn.halted == false
- assert called(HTTPSignatures.validate_conn(:_))
- end
+ conn =
+ conn
+ |> put_req_header(
+ "signature",
+ "keyId=\"http://mastodon.example.org/users/admin#main-key"
+ )
+ |> put_format("activity+json")
+ |> HTTPSignaturePlug.call(%{})
+
+ assert conn.assigns.valid_signature == true
+ assert conn.halted == false
end
describe "requires a signature when `authorized_fetch_mode` is enabled" do
setup do
- clear_config([:activitypub, :authorized_fetch_mode], true)
-
params = %{"actor" => "http://mastodon.example.org/users/admin"}
conn = build_conn(:get, "/doesntmattter", params) |> put_format("activity+json")
[conn: conn]
end
- test "when signature header is present", %{conn: conn} do
- with_mock HTTPSignatures,
- validate_conn: fn _ -> false end,
- signature_for_conn: fn _ ->
- %{"keyId" => "http://mastodon.example.org/users/admin#main-key"}
- end do
- conn =
- conn
- |> put_req_header(
- "signature",
- "keyId=\"http://mastodon.example.org/users/admin#main-key"
- )
- |> HTTPSignaturePlug.call(%{})
+ test "when signature header is present", %{conn: orig_conn} do
+ ConfigMock
+ |> expect(:get, fn [:activitypub, :authorized_fetch_mode], false -> true end)
+ |> expect(:get, fn [:activitypub, :authorized_fetch_mode_exceptions], [] -> [] end)
- assert conn.assigns.valid_signature == false
- assert conn.halted == true
- assert conn.status == 401
- assert conn.state == :sent
- assert conn.resp_body == "Request not signed"
- assert called(HTTPSignatures.validate_conn(:_))
- end
+ HTTPSignaturesMock
+ |> expect(:validate_conn, 2, fn _ -> false end)
- with_mock HTTPSignatures,
- validate_conn: fn _ -> true end,
- signature_for_conn: fn _ ->
- %{"keyId" => "http://mastodon.example.org/users/admin#main-key"}
- end do
- conn =
- conn
- |> put_req_header(
- "signature",
- "keyId=\"http://mastodon.example.org/users/admin#main-key"
- )
- |> HTTPSignaturePlug.call(%{})
+ conn =
+ orig_conn
+ |> put_req_header(
+ "signature",
+ "keyId=\"http://mastodon.example.org/users/admin#main-key"
+ )
+ |> HTTPSignaturePlug.call(%{})
- assert conn.assigns.valid_signature == true
- assert conn.halted == false
- assert called(HTTPSignatures.validate_conn(:_))
- end
+ assert conn.assigns.valid_signature == false
+ assert conn.halted == true
+ assert conn.status == 401
+ assert conn.state == :sent
+ assert conn.resp_body == "Request not signed"
+
+ ConfigMock
+ |> expect(:get, fn [:activitypub, :authorized_fetch_mode], false -> true end)
+
+ HTTPSignaturesMock
+ |> expect(:validate_conn, fn _ -> true end)
+
+ conn =
+ orig_conn
+ |> put_req_header(
+ "signature",
+ "keyId=\"http://mastodon.example.org/users/admin#main-key"
+ )
+ |> HTTPSignaturePlug.call(%{})
+
+ assert conn.assigns.valid_signature == true
+ assert conn.halted == false
end
test "halts the connection when `signature` header is not present", %{conn: conn} do
+ ConfigMock
+ |> expect(:get, fn [:activitypub, :authorized_fetch_mode], false -> true end)
+ |> expect(:get, fn [:activitypub, :authorized_fetch_mode_exceptions], [] -> [] end)
+
conn = HTTPSignaturePlug.call(conn, %{})
assert conn.assigns[:valid_signature] == nil
assert conn.halted == true
@@ -93,48 +93,73 @@ test "halts the connection when `signature` header is not present", %{conn: conn
assert conn.state == :sent
assert conn.resp_body == "Request not signed"
end
- end
- test "rejects requests from `rejected_instances` when `authorized_fetch_mode` is enabled" do
- clear_config([:activitypub, :authorized_fetch_mode], true)
- clear_config([:instance, :rejected_instances], [{"mastodon.example.org", "no reason"}])
+ test "exempts specific IPs from `authorized_fetch_mode_exceptions`", %{conn: conn} do
+ ConfigMock
+ |> expect(:get, fn [:activitypub, :authorized_fetch_mode], false -> true end)
+ |> expect(:get, fn [:activitypub, :authorized_fetch_mode_exceptions], [] ->
+ ["192.168.0.0/24"]
+ end)
+ |> expect(:get, fn [:activitypub, :authorized_fetch_mode], false -> true end)
+
+ HTTPSignaturesMock
+ |> expect(:validate_conn, 2, fn _ -> false end)
- with_mock HTTPSignatures,
- validate_conn: fn _ -> true end,
- signature_for_conn: fn _ ->
- %{"keyId" => "http://mastodon.example.org/users/admin#main-key"}
- end do
conn =
- build_conn(:get, "/doesntmattter", %{"actor" => "http://mastodon.example.org/users/admin"})
+ conn
+ |> Map.put(:remote_ip, {192, 168, 0, 1})
|> put_req_header(
"signature",
"keyId=\"http://mastodon.example.org/users/admin#main-key"
)
- |> put_format("activity+json")
|> HTTPSignaturePlug.call(%{})
- assert conn.assigns.valid_signature == true
- assert conn.halted == true
- assert called(HTTPSignatures.validate_conn(:_))
- end
-
- with_mock HTTPSignatures,
- validate_conn: fn _ -> true end,
- signature_for_conn: fn _ ->
- %{"keyId" => "http://allowed.example.org/users/admin#main-key"}
- end do
- conn =
- build_conn(:get, "/doesntmattter", %{"actor" => "http://allowed.example.org/users/admin"})
- |> put_req_header(
- "signature",
- "keyId=\"http://allowed.example.org/users/admin#main-key"
- )
- |> put_format("activity+json")
- |> HTTPSignaturePlug.call(%{})
-
- assert conn.assigns.valid_signature == true
+ assert conn.remote_ip == {192, 168, 0, 1}
assert conn.halted == false
- assert called(HTTPSignatures.validate_conn(:_))
end
end
+
+ test "rejects requests from `rejected_instances` when `authorized_fetch_mode` is enabled" do
+ ConfigMock
+ |> expect(:get, fn [:activitypub, :authorized_fetch_mode], false -> true end)
+ |> expect(:get, fn [:instance, :rejected_instances] ->
+ [{"mastodon.example.org", "no reason"}]
+ end)
+
+ HTTPSignaturesMock
+ |> expect(:validate_conn, fn _ -> true end)
+
+ conn =
+ build_conn(:get, "/doesntmattter", %{"actor" => "http://mastodon.example.org/users/admin"})
+ |> put_req_header(
+ "signature",
+ "keyId=\"http://mastodon.example.org/users/admin#main-key"
+ )
+ |> put_format("activity+json")
+ |> HTTPSignaturePlug.call(%{})
+
+ assert conn.assigns.valid_signature == true
+ assert conn.halted == true
+
+ ConfigMock
+ |> expect(:get, fn [:activitypub, :authorized_fetch_mode], false -> true end)
+ |> expect(:get, fn [:instance, :rejected_instances] ->
+ [{"mastodon.example.org", "no reason"}]
+ end)
+
+ HTTPSignaturesMock
+ |> expect(:validate_conn, fn _ -> true end)
+
+ conn =
+ build_conn(:get, "/doesntmattter", %{"actor" => "http://allowed.example.org/users/admin"})
+ |> put_req_header(
+ "signature",
+ "keyId=\"http://allowed.example.org/users/admin#main-key"
+ )
+ |> put_format("activity+json")
+ |> HTTPSignaturePlug.call(%{})
+
+ assert conn.assigns.valid_signature == true
+ assert conn.halted == false
+ end
end
diff --git a/test/pleroma/web/rich_media/card_test.exs b/test/pleroma/web/rich_media/card_test.exs
new file mode 100644
index 0000000000..516ac99512
--- /dev/null
+++ b/test/pleroma/web/rich_media/card_test.exs
@@ -0,0 +1,71 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2024 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.RichMedia.CardTest do
+ use Pleroma.DataCase, async: true
+
+ alias Pleroma.UnstubbedConfigMock, as: ConfigMock
+ alias Pleroma.Web.CommonAPI
+ alias Pleroma.Web.RichMedia.Card
+
+ import Mox
+ import Pleroma.Factory
+ import Tesla.Mock
+
+ setup do
+ mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
+
+ ConfigMock
+ |> stub_with(Pleroma.Test.StaticConfig)
+
+ :ok
+ end
+
+ setup do: clear_config([:rich_media, :enabled], true)
+
+ test "crawls URL in activity" do
+ user = insert(:user)
+
+ url = "https://example.com/ogp"
+ url_hash = Card.url_to_hash(url)
+
+ {:ok, activity} =
+ CommonAPI.post(user, %{
+ status: "[test](#{url})",
+ content_type: "text/markdown"
+ })
+
+ assert %Card{url_hash: ^url_hash, fields: _} = Card.get_by_activity(activity)
+ end
+
+ test "recrawls URLs on status edits/updates" do
+ original_url = "https://google.com/"
+ original_url_hash = Card.url_to_hash(original_url)
+ updated_url = "https://yahoo.com/"
+ updated_url_hash = Card.url_to_hash(updated_url)
+
+ user = insert(:user)
+ {:ok, activity} = CommonAPI.post(user, %{status: "I like this site #{original_url}"})
+
+ # Force a backfill
+ Card.get_by_activity(activity)
+
+ assert match?(
+ %Card{url_hash: ^original_url_hash, fields: _},
+ Card.get_by_activity(activity)
+ )
+
+ {:ok, _} = CommonAPI.update(user, activity, %{status: "I like this site #{updated_url}"})
+
+ activity = Pleroma.Activity.get_by_id(activity.id)
+
+ # Force a backfill
+ Card.get_by_activity(activity)
+
+ assert match?(
+ %Card{url_hash: ^updated_url_hash, fields: _},
+ Card.get_by_activity(activity)
+ )
+ end
+end
diff --git a/test/pleroma/web/rich_media/helpers_test.exs b/test/pleroma/web/rich_media/helpers_test.exs
deleted file mode 100644
index f80103c800..0000000000
--- a/test/pleroma/web/rich_media/helpers_test.exs
+++ /dev/null
@@ -1,137 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2022 Pleroma Authors
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Web.RichMedia.HelpersTest do
- use Pleroma.DataCase, async: false
-
- alias Pleroma.StaticStubbedConfigMock, as: ConfigMock
- alias Pleroma.Web.CommonAPI
- alias Pleroma.Web.RichMedia.Helpers
-
- import Mox
- import Pleroma.Factory
- import Tesla.Mock
-
- setup do
- mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
-
- ConfigMock
- |> stub(:get, fn
- [:rich_media, :enabled] -> false
- path -> Pleroma.Test.StaticConfig.get(path)
- end)
- |> stub(:get, fn
- path, default -> Pleroma.Test.StaticConfig.get(path, default)
- end)
-
- :ok
- end
-
- test "refuses to crawl incomplete URLs" do
- user = insert(:user)
-
- {:ok, activity} =
- CommonAPI.post(user, %{
- status: "[test](example.com/ogp)",
- content_type: "text/markdown"
- })
-
- ConfigMock
- |> stub(:get, fn
- [:rich_media, :enabled] -> true
- path -> Pleroma.Test.StaticConfig.get(path)
- end)
-
- assert Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity) == nil
- end
-
- test "refuses to crawl malformed URLs" do
- user = insert(:user)
-
- {:ok, activity} =
- CommonAPI.post(user, %{
- status: "[test](example.com[]/ogp)",
- content_type: "text/markdown"
- })
-
- ConfigMock
- |> stub(:get, fn
- [:rich_media, :enabled] -> true
- path -> Pleroma.Test.StaticConfig.get(path)
- end)
-
- assert Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity) == nil
- end
-
- test "crawls valid, complete URLs" do
- user = insert(:user)
-
- {:ok, activity} =
- CommonAPI.post(user, %{
- status: "[test](https://example.com/ogp)",
- content_type: "text/markdown"
- })
-
- ConfigMock
- |> stub(:get, fn
- [:rich_media, :enabled] -> true
- path -> Pleroma.Test.StaticConfig.get(path)
- end)
-
- assert %{url: "https://example.com/ogp", meta: %{} = _} =
- Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
- end
-
- test "recrawls URLs on updates" do
- original_url = "https://google.com/"
- updated_url = "https://yahoo.com/"
-
- Pleroma.StaticStubbedConfigMock
- |> stub(:get, fn
- [:rich_media, :enabled] -> true
- path -> Pleroma.Test.StaticConfig.get(path)
- end)
-
- user = insert(:user)
- {:ok, activity} = CommonAPI.post(user, %{status: "I like this site #{original_url}"})
-
- assert match?(
- %{url: ^original_url, meta: _},
- Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
- )
-
- {:ok, _} = CommonAPI.update(user, activity, %{status: "I like this site #{updated_url}"})
-
- activity = Pleroma.Activity.get_by_id(activity.id)
-
- assert match?(
- %{url: ^updated_url, meta: _},
- Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
- )
- end
-
- test "refuses to crawl URLs of private network from posts" do
- user = insert(:user)
-
- {:ok, activity} =
- CommonAPI.post(user, %{status: "http://127.0.0.1:4000/notice/9kCP7VNyPJXFOXDrgO"})
-
- {:ok, activity2} = CommonAPI.post(user, %{status: "https://10.111.10.1/notice/9kCP7V"})
- {:ok, activity3} = CommonAPI.post(user, %{status: "https://172.16.32.40/notice/9kCP7V"})
- {:ok, activity4} = CommonAPI.post(user, %{status: "https://192.168.10.40/notice/9kCP7V"})
- {:ok, activity5} = CommonAPI.post(user, %{status: "https://pleroma.local/notice/9kCP7V"})
-
- ConfigMock
- |> stub(:get, fn
- [:rich_media, :enabled] -> true
- path -> Pleroma.Test.StaticConfig.get(path)
- end)
-
- assert Helpers.fetch_data_for_activity(activity) == nil
- assert Helpers.fetch_data_for_activity(activity2) == nil
- assert Helpers.fetch_data_for_activity(activity3) == nil
- assert Helpers.fetch_data_for_activity(activity4) == nil
- assert Helpers.fetch_data_for_activity(activity5) == nil
- end
-end
diff --git a/test/pleroma/web/rich_media/parser/card_test.exs b/test/pleroma/web/rich_media/parser/card_test.exs
deleted file mode 100644
index 07d367084e..0000000000
--- a/test/pleroma/web/rich_media/parser/card_test.exs
+++ /dev/null
@@ -1,129 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2021 Pleroma Authors
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Web.RichMedia.Parser.CardTest do
- use ExUnit.Case, async: true
- alias Pleroma.Web.RichMedia.Parser.Card
- alias Pleroma.Web.RichMedia.Parser.Embed
- alias Pleroma.Web.RichMedia.Parsers.TwitterCard
-
- describe "parse/1" do
- test "converts an %Embed{} into a %Card{}" do
- url =
- "https://www.nytimes.com/2019/08/01/nyregion/nypd-facial-recognition-children-teenagers.html"
-
- embed =
- File.read!("test/fixtures/nypd-facial-recognition-children-teenagers.html")
- |> Floki.parse_document!()
- |> TwitterCard.parse(%Embed{url: url})
-
- expected = %Card{
- description:
- "With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.",
- image:
- "https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-videoSixteenByNineJumbo1600.jpg",
- title: "She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database.",
- type: "link",
- provider_name: "www.nytimes.com",
- provider_url: "https://www.nytimes.com",
- url: url
- }
-
- assert Card.parse(embed) == {:ok, expected}
- end
-
- test "converts URL paths into absolute URLs" do
- embed = %Embed{
- url: "https://spam.com/luigi",
- title: "Watch Luigi not doing anything",
- meta: %{
- "og:image" => "/uploads/weegee.jpeg"
- }
- }
-
- {:ok, card} = Card.parse(embed)
- assert card.image == "https://spam.com/uploads/weegee.jpeg"
- end
-
- test "falls back to Link with invalid Rich/Video" do
- url = "https://ishothim.com/our-work/mexican-drug-cartels/"
- oembed = File.read!("test/fixtures/rich_media/wordpress_embed.json") |> Jason.decode!()
-
- embed =
- File.read!("test/fixtures/rich_media/wordpress.html")
- |> Floki.parse_document!()
- |> TwitterCard.parse(%Embed{url: url, oembed: oembed})
-
- expected = %Card{
- author_name: "Michael Jeter",
- author_url: "https://ishothim.com/author/mike/",
- blurhash: nil,
- description:
- "I Shot Him collaborated with the folks at Visual.ly on this informative animation about the violence from drug cartels happening right across our border. We researched, wrote, illustrated, and animated this piece to inform people about the connections of our drug and gun laws to the death of innocence in Mexico.",
- embed_url: nil,
- height: 338,
- html: "",
- image: "https://ishothim.com/wp-content/uploads/2013/01/Cartel_feature.jpg",
- provider_name: "I Shot Him",
- provider_url: "https://ishothim.com",
- title: "Mexican Drug Cartels",
- type: "link",
- url: "https://ishothim.com/our-work/mexican-drug-cartels/",
- width: 600
- }
-
- assert Card.parse(embed) == {:ok, expected}
- end
- end
-
- describe "validate/1" do
- test "returns {:ok, card} with a valid %Card{}" do
- card = %Card{
- title: "Moms can't believe this one trick",
- url: "http://spam.com",
- type: "link"
- }
-
- assert {:ok, ^card} = Card.validate(card)
- end
- end
-
- describe "fix_uri/2" do
- setup do: %{base_uri: "https://benis.xyz/hello/fam"}
-
- test "two full URLs", %{base_uri: base_uri} do
- uri = "https://benis.xyz/images/pic.jpeg"
- assert Card.fix_uri(uri, base_uri) == uri
- end
-
- test "URI with leading slash", %{base_uri: base_uri} do
- uri = "/images/pic.jpeg"
- expected = "https://benis.xyz/images/pic.jpeg"
- assert Card.fix_uri(uri, base_uri) == expected
- end
-
- test "URI without leading slash", %{base_uri: base_uri} do
- uri = "images/pic.jpeg"
- expected = "https://benis.xyz/images/pic.jpeg"
- assert Card.fix_uri(uri, base_uri) == expected
- end
-
- test "empty URI", %{base_uri: base_uri} do
- assert Card.fix_uri("", base_uri) == nil
- end
-
- test "nil URI", %{base_uri: base_uri} do
- assert Card.fix_uri(nil, base_uri) == nil
- end
-
- # https://github.com/elixir-lang/elixir/issues/10771
- test "Elixir #10771", _ do
- uri =
- "https://images.macrumors.com/t/4riJyi1XC906qyJ41nAfOgpvo1I=/1600x/https://images.macrumors.com/article-new/2020/09/spatialaudiofeature.jpg"
-
- base_uri = "https://www.macrumors.com/guide/apps-support-apples-spatial-audio-feature/"
- assert Card.fix_uri(uri, base_uri) == uri
- end
- end
-end
diff --git a/test/pleroma/web/rich_media/parser/meta_tags_test.exs b/test/pleroma/web/rich_media/parser/meta_tags_test.exs
deleted file mode 100644
index 128c83a95f..0000000000
--- a/test/pleroma/web/rich_media/parser/meta_tags_test.exs
+++ /dev/null
@@ -1,81 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2021 Pleroma Authors
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Web.RichMedia.Parser.MetaTagsTest do
- use ExUnit.Case, async: true
- alias Pleroma.Web.RichMedia.Parser.MetaTags
-
- test "returns a map of values" do
- html =
- File.read!("test/fixtures/nypd-facial-recognition-children-teenagers.html")
- |> Floki.parse_document!()
-
- expected = %{
- "CG" => "nyregion",
- "CN" => "experience-tech-and-society",
- "CT" => "spotlight",
- "PST" => "News",
- "PT" => "article",
- "SCG" => "",
- "al:android:app_name" => "NYTimes",
- "al:android:package" => "com.nytimes.android",
- "al:android:url" => "nytimes://reader/id/100000006583622",
- "al:ipad:app_name" => "NYTimes",
- "al:ipad:app_store_id" => "357066198",
- "al:ipad:url" =>
- "nytimes://www.nytimes.com/2019/08/01/nyregion/nypd-facial-recognition-children-teenagers.html",
- "al:iphone:app_name" => "NYTimes",
- "al:iphone:app_store_id" => "284862083",
- "al:iphone:url" =>
- "nytimes://www.nytimes.com/2019/08/01/nyregion/nypd-facial-recognition-children-teenagers.html",
- "article:modified" => "2019-08-02T09:30:23.000Z",
- "article:published" => "2019-08-01T17:15:31.000Z",
- "article:section" => "New York",
- "article:tag" => "New York City",
- "articleid" => "100000006583622",
- "byl" => "By Joseph Goldstein and Ali Watkins",
- "description" =>
- "With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.",
- "fb:app_id" => "9869919170",
- "image" =>
- "https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-facebookJumbo.jpg",
- "msapplication-starturl" => "https://www.nytimes.com",
- "news_keywords" =>
- "NYPD,Juvenile delinquency,Facial Recognition,Privacy,Government Surveillance,Police,Civil Rights,NYC",
- "nyt_uri" => "nyt://article/9da58246-2495-505f-9abd-b5fda8e67b56",
- "og:description" =>
- "With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.",
- "og:image" =>
- "https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-facebookJumbo.jpg",
- "og:title" =>
- "She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database.",
- "og:type" => "article",
- "og:url" =>
- "https://www.nytimes.com/2019/08/01/nyregion/nypd-facial-recognition-children-teenagers.html",
- "pdate" => "20190801",
- "pubp_event_id" => "pubp://event/47a657bafa8a476bb36832f90ee5ac6e",
- "robots" => "noarchive",
- "thumbnail" =>
- "https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-thumbStandard.jpg",
- "twitter:app:id:googleplay" => "com.nytimes.android",
- "twitter:app:name:googleplay" => "NYTimes",
- "twitter:app:url:googleplay" => "nytimes://reader/id/100000006583622",
- "twitter:card" => "summary_large_image",
- "twitter:description" =>
- "With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.",
- "twitter:image" =>
- "https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-videoSixteenByNineJumbo1600.jpg",
- "twitter:image:alt" => "",
- "twitter:title" =>
- "She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database.",
- "twitter:url" =>
- "https://www.nytimes.com/2019/08/01/nyregion/nypd-facial-recognition-children-teenagers.html",
- "url" =>
- "https://www.nytimes.com/2019/08/01/nyregion/nypd-facial-recognition-children-teenagers.html",
- "viewport" => "width=device-width, initial-scale=1, maximum-scale=1"
- }
-
- assert MetaTags.parse(html) == expected
- end
-end
diff --git a/test/pleroma/web/rich_media/parser/ttl/aws_signed_url_test.exs b/test/pleroma/web/rich_media/parser/ttl/aws_signed_url_test.exs
index b90f7d9e23..cc28aa7f39 100644
--- a/test/pleroma/web/rich_media/parser/ttl/aws_signed_url_test.exs
+++ b/test/pleroma/web/rich_media/parser/ttl/aws_signed_url_test.exs
@@ -3,8 +3,23 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrlTest do
- # Relies on Cachex, needs to be synchronous
- use Pleroma.DataCase
+ use Pleroma.DataCase, async: false
+ use Oban.Testing, repo: Pleroma.Repo
+
+ import Mox
+
+ alias Pleroma.UnstubbedConfigMock, as: ConfigMock
+ alias Pleroma.Web.RichMedia.Card
+ alias Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl
+
+ setup do
+ ConfigMock
+ |> stub_with(Pleroma.Test.StaticConfig)
+
+ clear_config([:rich_media, :enabled], true)
+
+ :ok
+ end
test "s3 signed url is parsed correct for expiration time" do
url = "https://pleroma.social/amz"
@@ -43,26 +58,35 @@ test "s3 signed url is parsed and correct ttl is set for rich media" do
-
+
"""
Tesla.Mock.mock(fn
%{
method: :get,
- url: "https://pleroma.social/amz"
+ url: ^url
} ->
%Tesla.Env{status: 200, body: body}
+
+ %{method: :head} ->
+ %Tesla.Env{status: 200}
end)
- Cachex.put(:rich_media_cache, url, metadata)
+ Card.get_or_backfill_by_url(url)
- Pleroma.Web.RichMedia.Parser.set_ttl_based_on_image(metadata, url)
+ assert_enqueued(worker: Pleroma.Workers.RichMediaExpirationWorker, args: %{"url" => url})
- {:ok, cache_ttl} = Cachex.ttl(:rich_media_cache, url)
+ [%Oban.Job{scheduled_at: scheduled_at}] = all_enqueued()
- # as there is delay in setting and pulling the data from cache we ignore 1 second
- # make it 2 seconds for flakyness
- assert_in_delta(valid_till * 1000, cache_ttl, 2000)
+ timestamp_dt = Timex.parse!(timestamp, "{ISO:Basic:Z}")
+
+ assert DateTime.diff(scheduled_at, timestamp_dt) == valid_till
+ end
+
+ test "AWS URL for an image without expiration works" do
+ og_data = %{"image" => "https://amazonaws.com/image.png"}
+
+ assert is_nil(AwsSignedUrl.ttl(og_data, ""))
end
defp construct_s3_url(timestamp, valid_till) do
@@ -71,11 +95,11 @@ defp construct_s3_url(timestamp, valid_till) do
defp construct_metadata(timestamp, valid_till, url) do
%{
- image: construct_s3_url(timestamp, valid_till),
- site: "Pleroma",
- title: "Pleroma",
- description: "Pleroma",
- url: url
+ "image" => construct_s3_url(timestamp, valid_till),
+ "site" => "Pleroma",
+ "title" => "Pleroma",
+ "description" => "Pleroma",
+ "url" => url
}
end
end
diff --git a/test/pleroma/web/rich_media/parser/ttl/opengraph_test.exs b/test/pleroma/web/rich_media/parser/ttl/opengraph_test.exs
new file mode 100644
index 0000000000..770968d477
--- /dev/null
+++ b/test/pleroma/web/rich_media/parser/ttl/opengraph_test.exs
@@ -0,0 +1,41 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2024 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.RichMedia.Parser.TTL.OpengraphTest do
+ use Pleroma.DataCase
+ use Oban.Testing, repo: Pleroma.Repo
+
+ import Mox
+
+ alias Pleroma.UnstubbedConfigMock, as: ConfigMock
+ alias Pleroma.Web.RichMedia.Card
+
+ setup do
+ ConfigMock
+ |> stub_with(Pleroma.Test.StaticConfig)
+
+ clear_config([:rich_media, :enabled], true)
+
+ :ok
+ end
+
+ test "OpenGraph TTL value is honored" do
+ url = "https://reddit.com/r/somepost"
+
+ Tesla.Mock.mock(fn
+ %{
+ method: :get,
+ url: ^url
+ } ->
+ %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/reddit.html")}
+
+ %{method: :head} ->
+ %Tesla.Env{status: 200}
+ end)
+
+ Card.get_or_backfill_by_url(url)
+
+ assert_enqueued(worker: Pleroma.Workers.RichMediaExpirationWorker, args: %{"url" => url})
+ end
+end
diff --git a/test/pleroma/web/rich_media/parser_test.exs b/test/pleroma/web/rich_media/parser_test.exs
index 6a9bae8171..3fcb5c8089 100644
--- a/test/pleroma/web/rich_media/parser_test.exs
+++ b/test/pleroma/web/rich_media/parser_test.exs
@@ -3,10 +3,9 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.RichMedia.ParserTest do
- use Pleroma.DataCase, async: false
+ use Pleroma.DataCase
alias Pleroma.Web.RichMedia.Parser
- alias Pleroma.Web.RichMedia.Parser.Embed
import Tesla.Mock
@@ -14,123 +13,84 @@ defmodule Pleroma.Web.RichMedia.ParserTest do
mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
end
- test "returns empty embed when no metadata present" do
- expected = %Embed{
- meta: %{},
- oembed: nil,
- title: nil,
- url: "https://example.com/empty"
- }
+ test "returns error when no metadata present" do
+ assert {:error, _} = Parser.parse("https://example.com/empty")
+ end
- assert Parser.parse("https://example.com/empty") == {:ok, expected}
+ test "doesn't just add a title" do
+ assert {:error, {:invalid_metadata, _}} = Parser.parse("https://example.com/non-ogp")
end
test "parses ogp" do
- url = "https://example.com/ogp"
-
- expected = %Embed{
- meta: %{
- "og:image" => "http://ia.media-imdb.com/images/rock.jpg",
- "og:title" => "The Rock",
- "og:description" =>
- "Directed by Michael Bay. With Sean Connery, Nicolas Cage, Ed Harris, John Spencer.",
- "og:type" => "video.movie",
- "og:url" => "http://www.imdb.com/title/tt0117500/"
- },
- oembed: nil,
- title: "The Rock (1996)",
- url: "https://example.com/ogp"
- }
-
- assert Parser.parse(url) == {:ok, expected}
+ assert Parser.parse("https://example.com/ogp") ==
+ {:ok,
+ %{
+ "image" => "http://ia.media-imdb.com/images/rock.jpg",
+ "title" => "The Rock",
+ "description" =>
+ "Directed by Michael Bay. With Sean Connery, Nicolas Cage, Ed Harris, John Spencer.",
+ "type" => "video.movie",
+ "url" => "https://example.com/ogp"
+ }}
end
- test "gets tag" do
- url = "https://example.com/ogp-missing-title"
- expected = "The Rock (1996)"
- assert {:ok, %Embed{title: ^expected}} = Parser.parse(url)
+ test "falls back to when ogp:title is missing" do
+ assert Parser.parse("https://example.com/ogp-missing-title") ==
+ {:ok,
+ %{
+ "image" => "http://ia.media-imdb.com/images/rock.jpg",
+ "title" => "The Rock (1996)",
+ "description" =>
+ "Directed by Michael Bay. With Sean Connery, Nicolas Cage, Ed Harris, John Spencer.",
+ "type" => "video.movie",
+ "url" => "https://example.com/ogp-missing-title"
+ }}
end
test "parses twitter card" do
- url = "https://example.com/twitter-card"
-
- expected = %Embed{
- meta: %{
- "twitter:card" => "summary",
- "twitter:description" => "View the album on Flickr.",
- "twitter:image" => "https://farm6.staticflickr.com/5510/14338202952_93595258ff_z.jpg",
- "twitter:site" => "@flickr",
- "twitter:title" => "Small Island Developing States Photo Submission"
- },
- oembed: nil,
- title: nil,
- url: "https://example.com/twitter-card"
- }
-
- assert Parser.parse(url) == {:ok, expected}
+ assert Parser.parse("https://example.com/twitter-card") ==
+ {:ok,
+ %{
+ "card" => "summary",
+ "site" => "@flickr",
+ "image" => "https://farm6.staticflickr.com/5510/14338202952_93595258ff_z.jpg",
+ "title" => "Small Island Developing States Photo Submission",
+ "description" => "View the album on Flickr.",
+ "url" => "https://example.com/twitter-card"
+ }}
end
- test "parses OEmbed" do
- url = "https://example.com/oembed"
-
- expected = %Embed{
- meta: %{},
- oembed: %{
- "author_name" => "\u202E\u202D\u202Cbees\u202C",
- "author_url" => "https://www.flickr.com/photos/bees/",
- "cache_age" => 3600,
- "flickr_type" => "photo",
- "height" => "768",
- "html" =>
- "",
- "license" => "All Rights Reserved",
- "license_id" => 0,
- "provider_name" => "Flickr",
- "provider_url" => "https://www.flickr.com/",
- "thumbnail_height" => 150,
- "thumbnail_url" => "https://farm4.staticflickr.com/3040/2362225867_4a87ab8baf_q.jpg",
- "thumbnail_width" => 150,
- "title" => "Bacon Lollys",
- "type" => "photo",
- "url" => "https://farm4.staticflickr.com/3040/2362225867_4a87ab8baf_b.jpg",
- "version" => "1.0",
- "web_page" => "https://www.flickr.com/photos/bees/2362225867/",
- "web_page_short_url" => "https://flic.kr/p/4AK2sc",
- "width" => "1024"
- },
- url: "https://example.com/oembed"
- }
-
- assert Parser.parse(url) == {:ok, expected}
+ test "parses OEmbed and filters HTML tags" do
+ assert Parser.parse("https://example.com/oembed") ==
+ {:ok,
+ %{
+ "author_name" => "\u202E\u202D\u202Cbees\u202C",
+ "author_url" => "https://www.flickr.com/photos/bees/",
+ "cache_age" => 3600,
+ "flickr_type" => "photo",
+ "height" => "768",
+ "html" =>
+ "",
+ "license" => "All Rights Reserved",
+ "license_id" => 0,
+ "provider_name" => "Flickr",
+ "provider_url" => "https://www.flickr.com/",
+ "thumbnail_height" => 150,
+ "thumbnail_url" =>
+ "https://farm4.staticflickr.com/3040/2362225867_4a87ab8baf_q.jpg",
+ "thumbnail_width" => 150,
+ "title" => "Bacon Lollys",
+ "type" => "photo",
+ "url" => "https://example.com/oembed",
+ "version" => "1.0",
+ "web_page" => "https://www.flickr.com/photos/bees/2362225867/",
+ "web_page_short_url" => "https://flic.kr/p/4AK2sc",
+ "width" => "1024"
+ }}
end
- test "cleans corrupted meta data" do
- expected = %Embed{
- meta: %{
- "Keywords" => "Konsument i zakupy",
- "ROBOTS" => "NOARCHIVE",
- "fb:app_id" => "515714931781741",
- "fb:pages" => "288018984602680",
- "google-site-verification" => "3P4BE3hLw82QWqtseIE60qQcOtrpMxMnCNkcv62pjTA",
- "news_keywords" => "Konsument i zakupy",
- "og:image" =>
- "https://bi.im-g.pl/im/f7/49/17/z24418295FBW,Prace-nad-projektem-chusty-antysmogowej-rozpoczely.jpg",
- "og:locale" => "pl_PL",
- "og:site_name" => "wyborcza.biz",
- "og:type" => "article",
- "og:url" =>
- "http://wyborcza.biz/biznes/7,147743,24417936,pomysl-na-biznes-chusta-ktora-chroni-przed-smogiem.html",
- "twitter:card" => "summary_large_image",
- "twitter:image" =>
- "https://bi.im-g.pl/im/f7/49/17/z24418295FBW,Prace-nad-projektem-chusty-antysmogowej-rozpoczely.jpg",
- "viewport" => "width=device-width, user-scalable=yes"
- },
- oembed: nil,
- title: nil,
- url: "https://example.com/malformed"
- }
-
- assert Parser.parse("https://example.com/malformed") == {:ok, expected}
+ test "rejects invalid OGP data" do
+ assert {:error, _} = Parser.parse("https://example.com/malformed")
end
test "returns error if getting page was not successful" do
@@ -144,4 +104,27 @@ test "does a HEAD request to check if the body is too large" do
test "does a HEAD request to check if the body is html" do
assert {:error, {:content_type, _}} = Parser.parse("https://example.com/pdf-file")
end
+
+ test "refuses to crawl incomplete URLs" do
+ url = "example.com/ogp"
+ assert :error == Parser.parse(url)
+ end
+
+ test "refuses to crawl malformed URLs" do
+ url = "example.com[]/ogp"
+ assert :error == Parser.parse(url)
+ end
+
+ test "refuses to crawl URLs of private network from posts" do
+ [
+ "http://127.0.0.1:4000/notice/9kCP7VNyPJXFOXDrgO",
+ "https://10.111.10.1/notice/9kCP7V",
+ "https://172.16.32.40/notice/9kCP7V",
+ "https://192.168.10.40/notice/9kCP7V",
+ "https://pleroma.local/notice/9kCP7V"
+ ]
+ |> Enum.each(fn url ->
+ assert :error == Parser.parse(url)
+ end)
+ end
end
diff --git a/test/pleroma/web/rich_media/parsers/twitter_card_test.exs b/test/pleroma/web/rich_media/parsers/twitter_card_test.exs
index db53fe6980..44e8a80a0d 100644
--- a/test/pleroma/web/rich_media/parsers/twitter_card_test.exs
+++ b/test/pleroma/web/rich_media/parsers/twitter_card_test.exs
@@ -6,10 +6,8 @@ defmodule Pleroma.Web.RichMedia.Parsers.TwitterCardTest do
use ExUnit.Case, async: true
alias Pleroma.Web.RichMedia.Parsers.TwitterCard
- test "fails gracefully with barebones HTML" do
- html = [{"html", [], [{"head", [], []}, {"body", [], []}]}]
- expected = %{meta: %{}, title: nil}
- assert TwitterCard.parse(html, %{}) == expected
+ test "returns error when html not contains twitter card" do
+ assert TwitterCard.parse([{"html", [], [{"head", [], []}, {"body", [], []}]}], %{}) == %{}
end
test "parses twitter card with only name attributes" do
@@ -17,24 +15,22 @@ test "parses twitter card with only name attributes" do
File.read!("test/fixtures/nypd-facial-recognition-children-teenagers3.html")
|> Floki.parse_document!()
- assert %{
- title:
- "She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database. - The New York Times",
- meta: %{
- "twitter:app:id:googleplay" => "com.nytimes.android",
- "twitter:app:name:googleplay" => "NYTimes",
- "twitter:app:url:googleplay" => "nytimes://reader/id/100000006583622",
- "og:description" =>
+ assert TwitterCard.parse(html, %{}) ==
+ %{
+ "app:id:googleplay" => "com.nytimes.android",
+ "app:name:googleplay" => "NYTimes",
+ "app:url:googleplay" => "nytimes://reader/id/100000006583622",
+ "site" => nil,
+ "description" =>
"With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.",
- "og:image" =>
+ "image" =>
"https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-facebookJumbo.jpg",
- "og:title" =>
- "She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database.",
- "og:type" => "article",
- "og:url" =>
- "https://www.nytimes.com/2019/08/01/nyregion/nypd-facial-recognition-children-teenagers.html"
+ "type" => "article",
+ "url" =>
+ "https://www.nytimes.com/2019/08/01/nyregion/nypd-facial-recognition-children-teenagers.html",
+ "title" =>
+ "She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database."
}
- } = TwitterCard.parse(html, %{})
end
test "parses twitter card with only property attributes" do
@@ -42,31 +38,20 @@ test "parses twitter card with only property attributes" do
File.read!("test/fixtures/nypd-facial-recognition-children-teenagers2.html")
|> Floki.parse_document!()
- assert %{
- title:
- "She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database. - The New York Times",
- meta: %{
- "twitter:card" => "summary_large_image",
- "twitter:description" =>
+ assert TwitterCard.parse(html, %{}) ==
+ %{
+ "card" => "summary_large_image",
+ "description" =>
"With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.",
- "twitter:image" =>
+ "image" =>
"https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-videoSixteenByNineJumbo1600.jpg",
- "twitter:image:alt" => "",
- "twitter:title" =>
+ "image:alt" => "",
+ "title" =>
"She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database.",
- "twitter:url" =>
+ "url" =>
"https://www.nytimes.com/2019/08/01/nyregion/nypd-facial-recognition-children-teenagers.html",
- "og:description" =>
- "With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.",
- "og:image" =>
- "https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-facebookJumbo.jpg",
- "og:title" =>
- "She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database.",
- "og:url" =>
- "https://www.nytimes.com/2019/08/01/nyregion/nypd-facial-recognition-children-teenagers.html",
- "og:type" => "article"
+ "type" => "article"
}
- } = TwitterCard.parse(html, %{})
end
test "parses twitter card with name & property attributes" do
@@ -74,43 +59,47 @@ test "parses twitter card with name & property attributes" do
File.read!("test/fixtures/nypd-facial-recognition-children-teenagers.html")
|> Floki.parse_document!()
- assert %{
- title:
- "She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database. - The New York Times",
- meta: %{
- "twitter:app:id:googleplay" => "com.nytimes.android",
- "twitter:app:name:googleplay" => "NYTimes",
- "twitter:app:url:googleplay" => "nytimes://reader/id/100000006583622",
- "twitter:card" => "summary_large_image",
- "twitter:description" =>
+ assert TwitterCard.parse(html, %{}) ==
+ %{
+ "app:id:googleplay" => "com.nytimes.android",
+ "app:name:googleplay" => "NYTimes",
+ "app:url:googleplay" => "nytimes://reader/id/100000006583622",
+ "card" => "summary_large_image",
+ "description" =>
"With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.",
- "twitter:image" =>
+ "image" =>
"https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-videoSixteenByNineJumbo1600.jpg",
- "twitter:image:alt" => "",
- "twitter:title" =>
+ "image:alt" => "",
+ "site" => nil,
+ "title" =>
"She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database.",
- "twitter:url" =>
+ "url" =>
"https://www.nytimes.com/2019/08/01/nyregion/nypd-facial-recognition-children-teenagers.html",
- "og:description" =>
- "With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.",
- "og:image" =>
- "https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-facebookJumbo.jpg",
- "og:title" =>
- "She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database.",
- "og:url" =>
- "https://www.nytimes.com/2019/08/01/nyregion/nypd-facial-recognition-children-teenagers.html",
- "og:type" => "article"
+ "type" => "article"
}
- } = TwitterCard.parse(html, %{})
end
test "respect only first title tag on the page" do
+ image_path =
+ "https://assets.atlasobscura.com/media/W1siZiIsInVwbG9hZHMvYXNzZXRzLzkwYzgyMzI4LThlMDUtNGRiNS05MDg3LTUzMGUxZTM5N2RmMmVkOTM5ZDM4MGM4OTIx" <>
+ "YTQ5MF9EQVIgZXhodW1hdGlvbiBvZiBNYXJnYXJldCBDb3JiaW4gZ3JhdmUgMTkyNi5qcGciXSxbInAiLCJjb252ZXJ0IiwiIl0sWyJwIiwiY29udmVydCIsIi1xdWFsaXR5IDgxIC1hdXRvLW9" <>
+ "yaWVudCJdLFsicCIsInRodW1iIiwiNjAweD4iXV0/DAR%20exhumation%20of%20Margaret%20Corbin%20grave%201926.jpg"
+
html =
File.read!("test/fixtures/margaret-corbin-grave-west-point.html") |> Floki.parse_document!()
- expected = "The Missing Grave of Margaret Corbin, Revolutionary War Veteran - Atlas Obscura"
-
- assert %{title: ^expected} = TwitterCard.parse(html, %{})
+ assert TwitterCard.parse(html, %{}) ==
+ %{
+ "site" => "@atlasobscura",
+ "title" => "The Missing Grave of Margaret Corbin, Revolutionary War Veteran",
+ "card" => "summary_large_image",
+ "image" => image_path,
+ "description" =>
+ "She's the only woman veteran honored with a monument at West Point. But where was she buried?",
+ "site_name" => "Atlas Obscura",
+ "type" => "article",
+ "url" => "http://www.atlasobscura.com/articles/margaret-corbin-grave-west-point"
+ }
end
test "takes first title found in html head if there is an html markup error" do
@@ -118,9 +107,21 @@ test "takes first title found in html head if there is an html markup error" do
File.read!("test/fixtures/nypd-facial-recognition-children-teenagers4.html")
|> Floki.parse_document!()
- expected =
- "She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database. - The New York Times"
-
- assert %{title: ^expected} = TwitterCard.parse(html, %{})
+ assert TwitterCard.parse(html, %{}) ==
+ %{
+ "site" => nil,
+ "title" =>
+ "She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database.",
+ "app:id:googleplay" => "com.nytimes.android",
+ "app:name:googleplay" => "NYTimes",
+ "app:url:googleplay" => "nytimes://reader/id/100000006583622",
+ "description" =>
+ "With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.",
+ "image" =>
+ "https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-facebookJumbo.jpg",
+ "type" => "article",
+ "url" =>
+ "https://www.nytimes.com/2019/08/01/nyregion/nypd-facial-recognition-children-teenagers.html"
+ }
end
end
diff --git a/test/pleroma/web/web_finger_test.exs b/test/pleroma/web/web_finger_test.exs
index 501841daaa..23aefb5640 100644
--- a/test/pleroma/web/web_finger_test.exs
+++ b/test/pleroma/web/web_finger_test.exs
@@ -226,4 +226,18 @@ test "prevents spoofing" do
{:error, _data} = WebFinger.finger("alex@gleasonator.com")
end
end
+
+ @tag capture_log: true
+ test "prevents forgeries" do
+ Tesla.Mock.mock(fn
+ %{url: "https://fba.ryona.agency/.well-known/webfinger?resource=acct:graf@fba.ryona.agency"} ->
+ fake_webfinger =
+ File.read!("test/fixtures/webfinger/graf-imposter-webfinger.json") |> Jason.decode!()
+
+ Tesla.Mock.json(fake_webfinger)
+
+ %{url: "https://fba.ryona.agency/.well-known/host-meta"} ->
+ {:ok, %Tesla.Env{status: 404}}
+ end)
+ end
end
diff --git a/test/pleroma/webhook/notify_test.ex b/test/pleroma/webhook/notify_test.ex
deleted file mode 100644
index 8aa9de08c6..0000000000
--- a/test/pleroma/webhook/notify_test.ex
+++ /dev/null
@@ -1,29 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2022 Pleroma Authors
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Webhook.NotifyTest do
- use Pleroma.DataCase, async: true
-
- alias Pleroma.Webhook
- alias Pleroma.Webhook.Notify
-
- import Pleroma.Factory
-
- test "notifies have a valid signature" do
- activity = insert(:report_activity)
-
- %{secret: secret} =
- webhook = Webhook.create(%{url: "https://example.com/webhook", events: [:"report.created"]})
-
- Tesla.Mock.mock(fn %{url: "https://example.com/webhook", body: body, headers: headers} = _ ->
- {"X-Hub-Signature", "sha256=" <> signature} =
- Enum.find(headers, fn {key, _} -> key == "X-Hub-Signature" end)
-
- assert signature == :crypto.mac(:hmac, :sha256, secret, body) |> Base.encode16()
- %Tesla.Env{status: 200, body: ""}
- end)
-
- Notify.report_created(webhook, activity)
- end
-end
diff --git a/test/pleroma/webhook/notify_test.exs b/test/pleroma/webhook/notify_test.exs
index 4190b1a170..f2a05400ad 100644
--- a/test/pleroma/webhook/notify_test.exs
+++ b/test/pleroma/webhook/notify_test.exs
@@ -3,7 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Webhook.NotifyTest do
- use Pleroma.DataCase, async: true
+ use Pleroma.DataCase
alias Pleroma.Webhook
alias Pleroma.Webhook.Notify
@@ -21,7 +21,7 @@ test "notifies have a valid signature" do
{"X-Hub-Signature", "sha256=" <> signature} =
Enum.find(headers, fn {key, _} -> key == "X-Hub-Signature" end)
- assert signature == :crypto.mac(:hmac, :sha256, secret, body) |> Base.encode16()
+ assert signature == :crypto.mac(:hmac, :sha256, secret, body) |> Base.encode16(case: :lower)
%Tesla.Env{status: 200, body: ""}
end)
diff --git a/test/support/data_case.ex b/test/support/data_case.ex
index 14403f0b81..52d4bef1a1 100644
--- a/test/support/data_case.ex
+++ b/test/support/data_case.ex
@@ -116,6 +116,7 @@ def stub_pipeline do
Mox.stub_with(Pleroma.Web.FederatorMock, Pleroma.Web.Federator)
Mox.stub_with(Pleroma.ConfigMock, Pleroma.Config)
Mox.stub_with(Pleroma.StaticStubbedConfigMock, Pleroma.Test.StaticConfig)
+ Mox.stub_with(Pleroma.StubbedHTTPSignaturesMock, Pleroma.Test.HTTPSignaturesProxy)
end
def ensure_local_uploader(context) do
diff --git a/test/support/http_request_mock.ex b/test/support/http_request_mock.ex
index 50216470ea..87ec9c14e9 100644
--- a/test/support/http_request_mock.ex
+++ b/test/support/http_request_mock.ex
@@ -1389,27 +1389,6 @@ def get("https://misskey.io/users/83ssedkv53", _, _, _) do
}}
end
- def get("https://misskey.io/notes/8vs6wxufd0", _, _, _) do
- {:ok,
- %Tesla.Env{
- status: 200,
- body: File.read!("test/fixtures/tesla_mock/misskey.io_8vs6wxufd0.json"),
- headers: activitypub_object_headers()
- }}
- end
-
- def get("https://mitra.social/objects/01830912-1357-d4c5-e4a2-76eab347e749", _, _, _) do
- {:ok,
- %Tesla.Env{
- status: 200,
- body:
- File.read!(
- "test/fixtures/tesla_mock/mitra.social_01830912-1357-d4c5-e4a2-76eab347e749.json"
- ),
- headers: activitypub_object_headers()
- }}
- end
-
def get("https://gleasonator.com/users/macgirvin", _, _, _) do
{:ok,
%Tesla.Env{
@@ -1431,15 +1410,6 @@ def get("https://gleasonator.com/users/macgirvin/collections/featured", _, _, _)
}}
end
- def get("https://friends.grishka.me/posts/54642", _, _, _) do
- {:ok,
- %Tesla.Env{
- status: 200,
- body: File.read!("test/fixtures/tesla_mock/smithereen_non_anonymous_poll.json"),
- headers: activitypub_object_headers()
- }}
- end
-
def get("https://mk.absturztau.be/users/8ozbzjs3o8", _, _, _) do
{:ok,
%Tesla.Env{
@@ -1449,15 +1419,6 @@ def get("https://mk.absturztau.be/users/8ozbzjs3o8", _, _, _) do
}}
end
- def get("https://friends.grishka.me/users/1", _, _, _) do
- {:ok,
- %Tesla.Env{
- status: 200,
- body: File.read!("test/fixtures/tesla_mock/smithereen_user.json"),
- headers: activitypub_object_headers()
- }}
- end
-
def get("https://p.helene.moe/users/helene", _, _, _) do
{:ok,
%Tesla.Env{
@@ -1494,31 +1455,69 @@ def get("https://p.helene.moe/objects/fd5910ac-d9dc-412e-8d1d-914b203296c4", _,
}}
end
- def get(
- "https://nominatim.openstreetmap.org/search?format=geocodejson&q=Benis&limit=10&accept-language=en&addressdetails=1&namedetails=1",
- _,
- _,
- _
- ) do
+ def get("https://misskey.io/notes/8vs6wxufd0", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/tesla_mock/nominatim_search_results.json"),
- headers: [{"content-type", "application/json"}]
+ body: File.read!("test/fixtures/tesla_mock/misskey.io_8vs6wxufd0.json"),
+ headers: activitypub_object_headers()
}}
end
- def get(
- "https://nominatim.openstreetmap.org/lookup?format=geocodejson&osm_ids=N3726208425,R3726208425,W3726208425&accept-language=en&addressdetails=1&namedetails=1",
- _,
- _,
- _
- ) do
+ def get("https://google.com/", _, _, _) do
+ {:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/google.html")}}
+ end
+
+ def get("https://yahoo.com/", _, _, _) do
+ {:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/yahoo.html")}}
+ end
+
+ def get("https://example.com/error", _, _, _), do: {:error, :overload}
+
+ def get("https://example.com/ogp-missing-title", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/tesla_mock/nominatim_single_result.json"),
- headers: [{"content-type", "application/json"}]
+ body: File.read!("test/fixtures/rich_media/ogp-missing-title.html")
+ }}
+ end
+
+ def get("https://example.com/oembed", _, _, _) do
+ {:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/oembed.html")}}
+ end
+
+ def get("https://example.com/oembed.json", _, _, _) do
+ {:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/oembed.json")}}
+ end
+
+ def get("https://example.com/twitter-card", _, _, _) do
+ {:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/twitter_card.html")}}
+ end
+
+ def get("https://example.com/non-ogp", _, _, _) do
+ {:ok,
+ %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/non_ogp_embed.html")}}
+ end
+
+ def get("https://example.com/empty", _, _, _) do
+ {:ok, %Tesla.Env{status: 200, body: "hello"}}
+ end
+
+ def get("https://friends.grishka.me/posts/54642", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/tesla_mock/smithereen_non_anonymous_poll.json"),
+ headers: activitypub_object_headers()
+ }}
+ end
+
+ def get("https://friends.grishka.me/users/1", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/tesla_mock/smithereen_user.json"),
+ headers: activitypub_object_headers()
}}
end
@@ -1636,60 +1635,43 @@ def get("https://sub.pleroma.example/users/a", _, _, _) do
}}
end
- def get("https://google.com/", _, _, _) do
- {:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/google.html")}}
- end
-
- def get("https://yahoo.com/", _, _, _) do
- {:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/yahoo.html")}}
- end
-
- def get("https://example.com/error", _, _, _), do: {:error, :overload}
-
- def get("https://example.com/ogp-missing-title", _, _, _) do
+ def get("https://mitra.social/objects/01830912-1357-d4c5-e4a2-76eab347e749", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/rich_media/ogp-missing-title.html")
- }}
- end
-
- def get("https://example.com/oembed", _, _, _) do
- {:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/oembed.html")}}
- end
-
- def get("https://example.com/oembed.json", _, _, _) do
- {:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/oembed.json")}}
- end
-
- def get("https://example.com/twitter-card", _, _, _) do
- {:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/twitter_card.html")}}
- end
-
- def get("https://example.com/non-ogp", _, _, _) do
- {:ok,
- %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/non_ogp_embed.html")}}
- end
-
- def get("https://example.com/empty", _, _, _) do
- {:ok, %Tesla.Env{status: 200, body: "hello"}}
- end
-
- def get("https://friends.grishka.me/posts/54642", _, _, _) do
- {:ok,
- %Tesla.Env{
- status: 200,
- body: File.read!("test/fixtures/tesla_mock/smithereen_non_anonymous_poll.json"),
+ body:
+ File.read!(
+ "test/fixtures/tesla_mock/mitra.social_01830912-1357-d4c5-e4a2-76eab347e749.json"
+ ),
headers: activitypub_object_headers()
}}
end
- def get("https://friends.grishka.me/users/1", _, _, _) do
+ def get(
+ "https://nominatim.openstreetmap.org/search?format=geocodejson&q=Benis&limit=10&accept-language=en&addressdetails=1&namedetails=1",
+ _,
+ _,
+ _
+ ) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/tesla_mock/smithereen_user.json"),
- headers: activitypub_object_headers()
+ body: File.read!("test/fixtures/tesla_mock/nominatim_search_results.json"),
+ headers: [{"content-type", "application/json"}]
+ }}
+ end
+
+ def get(
+ "https://nominatim.openstreetmap.org/lookup?format=geocodejson&osm_ids=N3726208425,R3726208425,W3726208425&accept-language=en&addressdetails=1&namedetails=1",
+ _,
+ _,
+ _
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/tesla_mock/nominatim_single_result.json"),
+ headers: [{"content-type", "application/json"}]
}}
end
diff --git a/test/support/http_signatures_proxy.ex b/test/support/http_signatures_proxy.ex
new file mode 100644
index 0000000000..4c6b39d19c
--- /dev/null
+++ b/test/support/http_signatures_proxy.ex
@@ -0,0 +1,9 @@
+defmodule Pleroma.Test.HTTPSignaturesProxy do
+ @behaviour Pleroma.HTTPSignaturesAPI
+
+ @impl true
+ defdelegate validate_conn(conn), to: HTTPSignatures
+
+ @impl true
+ defdelegate signature_for_conn(conn), to: HTTPSignatures
+end
diff --git a/test/support/mocks.ex b/test/support/mocks.ex
index d906f0e1da..63cbc49ab6 100644
--- a/test/support/mocks.ex
+++ b/test/support/mocks.ex
@@ -28,6 +28,7 @@
Mox.defmock(Pleroma.ConfigMock, for: Pleroma.Config.Getting)
Mox.defmock(Pleroma.UnstubbedConfigMock, for: Pleroma.Config.Getting)
Mox.defmock(Pleroma.StaticStubbedConfigMock, for: Pleroma.Config.Getting)
+Mox.defmock(Pleroma.StubbedHTTPSignaturesMock, for: Pleroma.HTTPSignaturesAPI)
Mox.defmock(Pleroma.LoggerMock, for: Pleroma.Logging)
Want to add to the discussion?
Post a comment!