Merge branch 'multilang' into backend-new
Signed-off-by: marcin mikołajczak <git@mkljczk.pl>
This commit is contained in:
commit
9040b98b07
242 changed files with 6011 additions and 2409 deletions
|
@ -1,8 +1,8 @@
|
|||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base
|
||||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp-24
|
||||
|
||||
variables: &global_variables
|
||||
# Only used for the release
|
||||
ELIXIR_VER: 1.12.3
|
||||
ELIXIR_VER: 1.13.4
|
||||
POSTGRES_DB: pleroma_test
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
|
@ -72,7 +72,7 @@ check-changelog:
|
|||
tags:
|
||||
- amd64
|
||||
|
||||
build-1.12.3:
|
||||
build-1.13.4:
|
||||
extends:
|
||||
- .build_changes_policy
|
||||
- .using-ci-base
|
||||
|
@ -85,7 +85,7 @@ build-1.15.7-otp-25:
|
|||
- .build_changes_policy
|
||||
- .using-ci-base
|
||||
stage: build
|
||||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15
|
||||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25
|
||||
allow_failure: true
|
||||
script:
|
||||
- mix compile --force
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
ARG ELIXIR_IMG=hexpm/elixir
|
||||
ARG ELIXIR_VER=1.12.3
|
||||
ARG ERLANG_VER=24.2.1
|
||||
ARG ALPINE_VER=3.17.0
|
||||
ARG ELIXIR_VER=1.13.4
|
||||
ARG ERLANG_VER=24.3.4.15
|
||||
ARG ALPINE_VER=3.17.5
|
||||
|
||||
FROM ${ELIXIR_IMG}:${ELIXIR_VER}-erlang-${ERLANG_VER}-alpine-${ALPINE_VER} as build
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
A fork of Pleroma/Rebased. More information soon.
|
||||
`pl`. A fork of Pleroma/Rebased. More information soon.
|
||||
|
||||
---
|
||||
|
||||
|
|
6
app.json
6
app.json
|
@ -1,10 +1,10 @@
|
|||
{
|
||||
"name": "Rebased",
|
||||
"description": "Rebased, the recommended backend for Soapbox written in Elixir.",
|
||||
"name": "pl",
|
||||
"description": "Federated social media software, a fork of Pleroma/Rebased",
|
||||
"keywords": [
|
||||
"fediverse"
|
||||
],
|
||||
"website": "https://soapbox.pub",
|
||||
"website": "https://github.com/mkljczk/pl",
|
||||
"dokku": {
|
||||
"plugins": [
|
||||
"postgres"
|
||||
|
|
1
changelog.d/3904.security
Normal file
1
changelog.d/3904.security
Normal file
|
@ -0,0 +1 @@
|
|||
HTTP Security: By default, don't allow unsafe-eval. The setting needs to be changed to allow Flash emulation.
|
0
changelog.d/3907.skip
Normal file
0
changelog.d/3907.skip
Normal file
1
changelog.d/add-ipfs-upload.add
Normal file
1
changelog.d/add-ipfs-upload.add
Normal file
|
@ -0,0 +1 @@
|
|||
Uploader: Add support for uploading attachments using IPFS
|
1
changelog.d/add-nsfw-mrf.add
Normal file
1
changelog.d/add-nsfw-mrf.add
Normal file
|
@ -0,0 +1 @@
|
|||
Add NSFW-detecting MRF
|
1
changelog.d/add-rbl-mrf.add
Normal file
1
changelog.d/add-rbl-mrf.add
Normal file
|
@ -0,0 +1 @@
|
|||
Add DNSRBL MRF
|
1
changelog.d/anti-mentionspam-mrf.add
Normal file
1
changelog.d/anti-mentionspam-mrf.add
Normal file
|
@ -0,0 +1 @@
|
|||
Add Anti-mention Spam MRF backported from Rebased
|
0
changelog.d/api-docs-2.skip
Normal file
0
changelog.d/api-docs-2.skip
Normal file
1
changelog.d/auth-fetch-exception.add
Normal file
1
changelog.d/auth-fetch-exception.add
Normal file
|
@ -0,0 +1 @@
|
|||
HTTPSignaturePlug: Add :authorized_fetch_mode_exceptions configuration
|
1
changelog.d/authorized-fetch-rejections.add
Normal file
1
changelog.d/authorized-fetch-rejections.add
Normal file
|
@ -0,0 +1 @@
|
|||
Add an option to reject certain domains when authorized fetch is enabled.
|
1
changelog.d/bump-elixir.change
Normal file
1
changelog.d/bump-elixir.change
Normal file
|
@ -0,0 +1 @@
|
|||
Elixir 1.13 is the minimum required version.
|
1
changelog.d/card-endpoint.remove
Normal file
1
changelog.d/card-endpoint.remove
Normal file
|
@ -0,0 +1 @@
|
|||
Mastodon API: Remove deprecated GET /api/v1/statuses/:id/card endpoint https://github.com/mastodon/mastodon/pull/11213
|
1
changelog.d/card-image-description.add
Normal file
1
changelog.d/card-image-description.add
Normal file
|
@ -0,0 +1 @@
|
|||
Include image description in status media cards
|
0
changelog.d/description-meilisearch-type.skip
Normal file
0
changelog.d/description-meilisearch-type.skip
Normal file
1
changelog.d/familiar-followers.add
Normal file
1
changelog.d/familiar-followers.add
Normal file
|
@ -0,0 +1 @@
|
|||
Implement `/api/v1/accounts/familiar_followers`
|
1
changelog.d/fep-2c59.add
Normal file
1
changelog.d/fep-2c59.add
Normal file
|
@ -0,0 +1 @@
|
|||
Implement FEP-2c59, add "webfinger" to user actor
|
1
changelog.d/ffmpeg-limiter.add
Normal file
1
changelog.d/ffmpeg-limiter.add
Normal file
|
@ -0,0 +1 @@
|
|||
Framegrabs with ffmpeg will execute with a 5 second timeout and cache the URLs of failures with a TTL of 15 minutes to prevent excessive retries.
|
1
changelog.d/fix-webfinger-spoofing.security
Normal file
1
changelog.d/fix-webfinger-spoofing.security
Normal file
|
@ -0,0 +1 @@
|
|||
Fix webfinger spoofing.
|
1
changelog.d/instance-rules.add
Normal file
1
changelog.d/instance-rules.add
Normal file
|
@ -0,0 +1 @@
|
|||
Add instance rules
|
1
changelog.d/logger-metadata.add
Normal file
1
changelog.d/logger-metadata.add
Normal file
|
@ -0,0 +1 @@
|
|||
Logger metadata is now attached to some logs to help with troubleshooting and analysis
|
1
changelog.d/mark-read.fix
Normal file
1
changelog.d/mark-read.fix
Normal file
|
@ -0,0 +1 @@
|
|||
The query for marking notifications as read has been simplified
|
1
changelog.d/mastodon_api_v2.add
Normal file
1
changelog.d/mastodon_api_v2.add
Normal file
|
@ -0,0 +1 @@
|
|||
Add new parameters to /api/v2/instance: configuration[accounts][max_pinned_statuses] and configuration[statuses][characters_reserved_per_url]
|
1
changelog.d/mediaproxy-http.fix
Normal file
1
changelog.d/mediaproxy-http.fix
Normal file
|
@ -0,0 +1 @@
|
|||
Ensure MediaProxy HTTP requests obey all the defined connection settings
|
1
changelog.d/missing-mrfs.add
Normal file
1
changelog.d/missing-mrfs.add
Normal file
|
@ -0,0 +1 @@
|
|||
Startup detection for configured MRF modules that are missing or incorrectly defined
|
1
changelog.d/oban-queues.change
Normal file
1
changelog.d/oban-queues.change
Normal file
|
@ -0,0 +1 @@
|
|||
Oban queues have refactored to simplify the queue design
|
1
changelog.d/pools.change
Normal file
1
changelog.d/pools.change
Normal file
|
@ -0,0 +1 @@
|
|||
HTTP connection pool adjustments
|
1
changelog.d/prometheus-docs.change
Normal file
1
changelog.d/prometheus-docs.change
Normal file
|
@ -0,0 +1 @@
|
|||
Update the documentation for configuring Prometheus metrics.
|
1
changelog.d/promexdocs.add
Normal file
1
changelog.d/promexdocs.add
Normal file
|
@ -0,0 +1 @@
|
|||
PromEx documentation
|
1
changelog.d/qdrant_search.add
Normal file
1
changelog.d/qdrant_search.add
Normal file
|
@ -0,0 +1 @@
|
|||
Add Qdrant/OpenAI embedding search
|
1
changelog.d/realpath-over-readlink.fix
Normal file
1
changelog.d/realpath-over-readlink.fix
Normal file
|
@ -0,0 +1 @@
|
|||
pleroma_ctl: Use realpath(1) instead of readlink(1)
|
1
changelog.d/receiverworker-error-handling.fix
Normal file
1
changelog.d/receiverworker-error-handling.fix
Normal file
|
@ -0,0 +1 @@
|
|||
ReceiverWorker: Make sure non-{:ok, _} is returned as {:error, …}
|
1
changelog.d/reply-to-deleted.change
Normal file
1
changelog.d/reply-to-deleted.change
Normal file
|
@ -0,0 +1 @@
|
|||
A 422 error is returned when attempting to reply to a deleted status
|
1
changelog.d/rich_media_refactor.change
Normal file
1
changelog.d/rich_media_refactor.change
Normal file
|
@ -0,0 +1 @@
|
|||
Refactored Rich Media to cache the content in the database. Fetching operations that could block status rendering have been eliminated.
|
1
changelog.d/richmediattl.fix
Normal file
1
changelog.d/richmediattl.fix
Normal file
|
@ -0,0 +1 @@
|
|||
Parsing of RichMedia TTLs for Amazon URLs when query parameters are nil
|
1
changelog.d/search-healthcheck.add
Normal file
1
changelog.d/search-healthcheck.add
Normal file
|
@ -0,0 +1 @@
|
|||
Monitoring of search backend health to control the processing of jobs in the search indexing Oban queue
|
1
changelog.d/show-reposter-replies.add
Normal file
1
changelog.d/show-reposter-replies.add
Normal file
|
@ -0,0 +1 @@
|
|||
Display reposted replies with exclude_replies: true
|
1
changelog.d/status-notification-type.add
Normal file
1
changelog.d/status-notification-type.add
Normal file
|
@ -0,0 +1 @@
|
|||
Add "status" notification type
|
1
changelog.d/support-honk-image-summaries.add
Normal file
1
changelog.d/support-honk-image-summaries.add
Normal file
|
@ -0,0 +1 @@
|
|||
Support honk-style attachment summaries as alt-text.
|
1
changelog.d/web_push_filtered.fix
Normal file
1
changelog.d/web_push_filtered.fix
Normal file
|
@ -0,0 +1 @@
|
|||
Web Push notifications are no longer generated for muted/blocked threads and users.
|
1
changelog.d/webfinger-validation.fix
Normal file
1
changelog.d/webfinger-validation.fix
Normal file
|
@ -0,0 +1 @@
|
|||
Fix validate_webfinger when running a different domain for Webfinger
|
8
ci/elixir-1.13/Dockerfile
Normal file
8
ci/elixir-1.13/Dockerfile
Normal file
|
@ -0,0 +1,8 @@
|
|||
FROM elixir:1.13.4-otp-24
|
||||
|
||||
# Single RUN statement, otherwise intermediate images are created
|
||||
# https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#run
|
||||
RUN apt-get update &&\
|
||||
apt-get install -y libmagic-dev cmake libimage-exiftool-perl ffmpeg &&\
|
||||
mix local.hex --force &&\
|
||||
mix local.rebar --force
|
1
ci/elixir-1.13/build_and_push.sh
Executable file
1
ci/elixir-1.13/build_and_push.sh
Executable file
|
@ -0,0 +1 @@
|
|||
docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp-24 --push .
|
|
@ -1 +1 @@
|
|||
docker buildx build --platform linux/amd64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25 --push .
|
||||
docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25 --push .
|
||||
|
|
|
@ -82,6 +82,10 @@
|
|||
# region: "us-east-1", # may be required for Amazon AWS
|
||||
scheme: "https://"
|
||||
|
||||
config :pleroma, Pleroma.Uploaders.IPFS,
|
||||
post_gateway_url: nil,
|
||||
get_gateway_url: nil
|
||||
|
||||
config :pleroma, :emoji,
|
||||
shortcode_globs: ["/emoji/custom/**/*.png"],
|
||||
pack_extensions: [".png", ".gif"],
|
||||
|
@ -131,13 +135,13 @@
|
|||
config :logger, :console,
|
||||
level: :debug,
|
||||
format: "\n$time $metadata[$level] $message\n",
|
||||
metadata: [:request_id]
|
||||
metadata: [:actor, :path, :type, :user]
|
||||
|
||||
config :logger, :ex_syslogger,
|
||||
level: :debug,
|
||||
ident: "pleroma",
|
||||
format: "$metadata[$level] $message",
|
||||
metadata: [:request_id]
|
||||
metadata: [:actor, :path, :type, :user]
|
||||
|
||||
config :mime, :types, %{
|
||||
"application/xml" => ["xml"],
|
||||
|
@ -188,6 +192,7 @@
|
|||
allow_relay: true,
|
||||
public: true,
|
||||
quarantined_instances: [],
|
||||
rejected_instances: [],
|
||||
static_dir: "instance/static/",
|
||||
allowed_post_formats: [
|
||||
"text/plain",
|
||||
|
@ -411,6 +416,11 @@
|
|||
accept: [],
|
||||
reject: []
|
||||
|
||||
config :pleroma, :mrf_dnsrbl,
|
||||
nameserver: "127.0.0.1",
|
||||
port: 53,
|
||||
zone: "bl.pleroma.com"
|
||||
|
||||
# threshold of 7 days
|
||||
config :pleroma, :mrf_object_age,
|
||||
threshold: 604_800,
|
||||
|
@ -436,21 +446,26 @@
|
|||
ttl: 60_000,
|
||||
min_length: 50
|
||||
|
||||
config :pleroma, :mrf_force_mention,
|
||||
config :pleroma, :mrf_force_mention,
|
||||
mention_parent: true,
|
||||
mention_quoted: true
|
||||
|
||||
config :pleroma, :mrf_antimentionspam, user_age_limit: 30_000
|
||||
|
||||
config :pleroma, :rich_media,
|
||||
enabled: true,
|
||||
ignore_hosts: [],
|
||||
ignore_tld: ["local", "localdomain", "lan"],
|
||||
parsers: [
|
||||
Pleroma.Web.RichMedia.Parsers.OEmbed,
|
||||
Pleroma.Web.RichMedia.Parsers.TwitterCard
|
||||
Pleroma.Web.RichMedia.Parsers.TwitterCard,
|
||||
Pleroma.Web.RichMedia.Parsers.OEmbed
|
||||
],
|
||||
oembed_providers_enabled: true,
|
||||
failure_backoff: 60_000,
|
||||
ttl_setters: [Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl]
|
||||
ttl_setters: [
|
||||
Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl,
|
||||
Pleroma.Web.RichMedia.Parser.TTL.Opengraph
|
||||
],
|
||||
max_body: 5_000_000
|
||||
|
||||
config :pleroma, :media_proxy,
|
||||
enabled: false,
|
||||
|
@ -515,7 +530,8 @@
|
|||
sts: false,
|
||||
sts_max_age: 31_536_000,
|
||||
ct_max_age: 2_592_000,
|
||||
referrer_policy: "same-origin"
|
||||
referrer_policy: "same-origin",
|
||||
allow_unsafe_eval: false
|
||||
|
||||
config :cors_plug,
|
||||
max_age: 86_400,
|
||||
|
@ -577,25 +593,15 @@
|
|||
log: false,
|
||||
queues: [
|
||||
activity_expiration: 10,
|
||||
token_expiration: 5,
|
||||
filter_expiration: 1,
|
||||
backup: 1,
|
||||
federator_incoming: 50,
|
||||
federator_outgoing: 50,
|
||||
federator_incoming: 5,
|
||||
federator_outgoing: 5,
|
||||
ingestion_queue: 50,
|
||||
web_push: 50,
|
||||
mailer: 10,
|
||||
transmogrifier: 20,
|
||||
scheduled_activities: 10,
|
||||
poll_notifications: 10,
|
||||
notifications: 20,
|
||||
background: 5,
|
||||
remote_fetcher: 2,
|
||||
attachments_cleanup: 1,
|
||||
new_users_digest: 1,
|
||||
mute_expire: 5,
|
||||
search_indexing: 10,
|
||||
check_domain_resolve: 1
|
||||
search_indexing: [limit: 10, paused: true],
|
||||
slow: 1
|
||||
],
|
||||
plugins: [Oban.Plugins.Pruner],
|
||||
crontab: [
|
||||
|
@ -845,22 +851,27 @@
|
|||
|
||||
config :pleroma, :pools,
|
||||
federation: [
|
||||
size: 50,
|
||||
max_waiting: 10,
|
||||
size: 75,
|
||||
max_waiting: 20,
|
||||
recv_timeout: 10_000
|
||||
],
|
||||
media: [
|
||||
size: 50,
|
||||
size: 75,
|
||||
max_waiting: 20,
|
||||
recv_timeout: 15_000
|
||||
],
|
||||
rich_media: [
|
||||
size: 25,
|
||||
max_waiting: 20,
|
||||
recv_timeout: 15_000
|
||||
],
|
||||
upload: [
|
||||
size: 25,
|
||||
max_waiting: 5,
|
||||
max_waiting: 20,
|
||||
recv_timeout: 15_000
|
||||
],
|
||||
default: [
|
||||
size: 10,
|
||||
size: 50,
|
||||
max_waiting: 2,
|
||||
recv_timeout: 5_000
|
||||
]
|
||||
|
@ -874,6 +885,10 @@
|
|||
max_connections: 50,
|
||||
timeout: 150_000
|
||||
],
|
||||
rich_media: [
|
||||
max_connections: 50,
|
||||
timeout: 150_000
|
||||
],
|
||||
upload: [
|
||||
max_connections: 25,
|
||||
timeout: 300_000
|
||||
|
@ -919,8 +934,6 @@
|
|||
process_chunk_size: 100
|
||||
|
||||
config :pleroma, ConcurrentLimiter, [
|
||||
{Pleroma.Web.RichMedia.Helpers, [max_running: 5, max_waiting: 5]},
|
||||
{Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy, [max_running: 5, max_waiting: 5]},
|
||||
{Pleroma.Search, [max_running: 30, max_waiting: 50]},
|
||||
{Pleroma.Webhook.Notify, [max_running: 5, max_waiting: 200]}
|
||||
]
|
||||
|
@ -963,6 +976,19 @@
|
|||
|
||||
config :pleroma, Pleroma.Uploaders.Uploader, timeout: 30_000
|
||||
|
||||
config :pleroma, Pleroma.Search.QdrantSearch,
|
||||
qdrant_url: "http://127.0.0.1:6333/",
|
||||
qdrant_api_key: "",
|
||||
openai_url: "http://127.0.0.1:11345",
|
||||
# The healthcheck url has to be set to nil when used with the real openai
|
||||
# API, as it doesn't have a healthcheck endpoint.
|
||||
openai_healthcheck_url: "http://127.0.0.1:11345/health",
|
||||
openai_model: "snowflake/snowflake-arctic-embed-xs",
|
||||
openai_api_key: "",
|
||||
qdrant_index_configuration: %{
|
||||
vectors: %{size: 384, distance: "Cosine"}
|
||||
}
|
||||
|
||||
# Import environment specific config. This must remain at the bottom
|
||||
# of this file so it overrides the configuration defined above.
|
||||
import_config "#{Mix.env()}.exs"
|
||||
|
|
|
@ -136,6 +136,31 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
group: :pleroma,
|
||||
key: Pleroma.Uploaders.IPFS,
|
||||
type: :group,
|
||||
description: "IPFS uploader-related settings",
|
||||
children: [
|
||||
%{
|
||||
key: :get_gateway_url,
|
||||
type: :string,
|
||||
description: "GET Gateway URL",
|
||||
suggestions: [
|
||||
"https://ipfs.mydomain.com/{CID}",
|
||||
"https://{CID}.ipfs.mydomain.com/"
|
||||
]
|
||||
},
|
||||
%{
|
||||
key: :post_gateway_url,
|
||||
type: :string,
|
||||
description: "POST Gateway URL",
|
||||
suggestions: [
|
||||
"http://localhost:5001/"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
group: :pleroma,
|
||||
key: Pleroma.Uploaders.S3,
|
||||
|
@ -749,6 +774,18 @@
|
|||
{"*.quarantined.com", "Reason"}
|
||||
]
|
||||
},
|
||||
%{
|
||||
key: :rejected_instances,
|
||||
type: {:list, :tuple},
|
||||
key_placeholder: "instance",
|
||||
value_placeholder: "reason",
|
||||
description:
|
||||
"List of ActivityPub instances to reject requests from if authorized_fetch_mode is enabled",
|
||||
suggestions: [
|
||||
{"rejected.com", "Reason"},
|
||||
{"*.rejected.com", "Reason"}
|
||||
]
|
||||
},
|
||||
%{
|
||||
key: :static_dir,
|
||||
type: :string,
|
||||
|
@ -1829,6 +1866,12 @@
|
|||
type: :boolean,
|
||||
description: "Require HTTP signatures for AP fetches"
|
||||
},
|
||||
%{
|
||||
key: :authorized_fetch_mode_exceptions,
|
||||
type: {:list, :string},
|
||||
description:
|
||||
"List of IPs (CIDR format accepted) to exempt from HTTP Signatures requirement (for example to allow debugging, you shouldn't otherwise need this)"
|
||||
},
|
||||
%{
|
||||
key: :note_replies_output_limit,
|
||||
type: :integer,
|
||||
|
@ -2149,12 +2192,6 @@
|
|||
type: :boolean,
|
||||
description: "Enables RichMedia parsing of URLs"
|
||||
},
|
||||
%{
|
||||
key: :oembed_providers_enabled,
|
||||
type: :boolean,
|
||||
description:
|
||||
"Embed rich media from a list of known providers. This takes precedence over other parsers."
|
||||
},
|
||||
%{
|
||||
key: :ignore_hosts,
|
||||
type: {:list, :string},
|
||||
|
@ -3763,7 +3800,7 @@
|
|||
},
|
||||
%{
|
||||
key: :initial_indexing_chunk_size,
|
||||
type: :int,
|
||||
type: :integer,
|
||||
description:
|
||||
"Amount of posts in a batch when running the initial indexing operation. Should probably not be more than 100000" <>
|
||||
" since there's a limit on maximum insert size",
|
||||
|
|
|
@ -35,8 +35,8 @@
|
|||
# configured to run both http and https servers on
|
||||
# different ports.
|
||||
|
||||
# Do not include metadata nor timestamps in development logs
|
||||
config :logger, :console, format: "[$level] $message\n"
|
||||
# Do not include timestamps in development logs
|
||||
config :logger, :console, format: "$metadata[$level] $message\n"
|
||||
|
||||
# Set a higher stacktrace during development. Avoid configuring such
|
||||
# in production as building large stacktraces may be expensive.
|
||||
|
|
|
@ -67,7 +67,8 @@
|
|||
config :pleroma, :rich_media,
|
||||
enabled: false,
|
||||
ignore_hosts: [],
|
||||
ignore_tld: ["local", "localdomain", "lan"]
|
||||
ignore_tld: ["local", "localdomain", "lan"],
|
||||
max_body: 2_000_000
|
||||
|
||||
config :pleroma, :instance,
|
||||
multi_factor_authentication: [
|
||||
|
@ -164,6 +165,12 @@
|
|||
config :pleroma, Pleroma.Upload, config_impl: Pleroma.UnstubbedConfigMock
|
||||
config :pleroma, Pleroma.ScheduledActivity, config_impl: Pleroma.UnstubbedConfigMock
|
||||
config :pleroma, Pleroma.Web.RichMedia.Helpers, config_impl: Pleroma.StaticStubbedConfigMock
|
||||
config :pleroma, Pleroma.Uploaders.IPFS, config_impl: Pleroma.UnstubbedConfigMock
|
||||
config :pleroma, Pleroma.Web.Plugs.HTTPSecurityPlug, config_impl: Pleroma.StaticStubbedConfigMock
|
||||
config :pleroma, Pleroma.Web.Plugs.HTTPSignaturePlug, config_impl: Pleroma.StaticStubbedConfigMock
|
||||
|
||||
config :pleroma, Pleroma.Web.Plugs.HTTPSignaturePlug,
|
||||
http_signatures_impl: Pleroma.StubbedHTTPSignaturesMock
|
||||
|
||||
peer_module =
|
||||
if String.to_integer(System.otp_release()) >= 25 do
|
||||
|
@ -186,6 +193,8 @@
|
|||
|
||||
config :pleroma, Pleroma.Emoji.Loader, test_emoji: true
|
||||
|
||||
config :pleroma, Pleroma.Web.RichMedia.Backfill, provider: Pleroma.Web.RichMedia.Backfill
|
||||
|
||||
if File.exists?("./config/test.secret.exs") do
|
||||
import_config "test.secret.exs"
|
||||
else
|
||||
|
|
|
@ -36,6 +36,7 @@ To add configuration to your config file, you can copy it from the base config.
|
|||
* `allow_relay`: Permits remote instances to subscribe to all public posts of your instance. This may increase the visibility of your instance.
|
||||
* `public`: Makes the client API in authenticated mode-only except for user-profiles. Useful for disabling the Local Timeline and The Whole Known Network. Note that there is a dependent setting restricting or allowing unauthenticated access to specific resources, see `restrict_unauthenticated` for more details.
|
||||
* `quarantined_instances`: ActivityPub instances where private (DMs, followers-only) activities will not be send.
|
||||
* `rejected_instances`: ActivityPub instances to reject requests from if authorized_fetch_mode is enabled.
|
||||
* `allowed_post_formats`: MIME-type list of formats allowed to be posted (transformed into HTML).
|
||||
* `extended_nickname_format`: Set to `true` to use extended local nicknames format (allows underscores/dashes). This will break federation with
|
||||
older software for theses nicknames.
|
||||
|
@ -283,6 +284,7 @@ Notes:
|
|||
* `deny_follow_blocked`: Whether to disallow following an account that has blocked the user in question
|
||||
* `sign_object_fetches`: Sign object fetches with HTTP signatures
|
||||
* `authorized_fetch_mode`: Require HTTP signatures for AP fetches
|
||||
* `authorized_fetch_mode_exceptions`: List of IPs (CIDR format accepted) to exempt from HTTP Signatures requirement (for example to allow debugging, you shouldn't otherwise need this)
|
||||
|
||||
## Pleroma.User
|
||||
|
||||
|
@ -433,7 +435,6 @@ config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Http,
|
|||
* `ignore_hosts`: list of hosts which will be ignored by the metadata parser. For example `["accounts.google.com", "xss.website"]`, defaults to `[]`.
|
||||
* `ignore_tld`: list TLDs (top-level domains) which will ignore for parse metadata. default is ["local", "localdomain", "lan"].
|
||||
* `parsers`: list of Rich Media parsers.
|
||||
* `oembed_providers_enabled`: Embed rich media from a list of known providers. This takes precedence over other parsers.
|
||||
* `failure_backoff`: Amount of milliseconds after request failure, during which the request will not be retried.
|
||||
|
||||
## HTTP server
|
||||
|
@ -472,6 +473,7 @@ This will make Pleroma listen on `127.0.0.1` port `8080` and generate urls start
|
|||
* ``ct_max_age``: The maximum age for the `Expect-CT` header if sent.
|
||||
* ``referrer_policy``: The referrer policy to use, either `"same-origin"` or `"no-referrer"`.
|
||||
* ``report_uri``: Adds the specified url to `report-uri` and `report-to` group in CSP header.
|
||||
* `allow_unsafe_eval`: Adds `wasm-unsafe-eval` to the CSP header. Needed for some non-essential frontend features like Flash emulation.
|
||||
|
||||
### Pleroma.Web.Plugs.RemoteIp
|
||||
|
||||
|
@ -662,6 +664,19 @@ config :ex_aws, :s3,
|
|||
host: "s3.eu-central-1.amazonaws.com"
|
||||
```
|
||||
|
||||
#### Pleroma.Uploaders.IPFS
|
||||
|
||||
* `post_gateway_url`: URL with port of POST Gateway (unauthenticated)
|
||||
* `get_gateway_url`: URL of public GET Gateway
|
||||
|
||||
Example:
|
||||
|
||||
```elixir
|
||||
config :pleroma, Pleroma.Uploaders.IPFS,
|
||||
post_gateway_url: "http://localhost:5001",
|
||||
get_gateway_url: "http://{CID}.ipfs.mydomain.com"
|
||||
```
|
||||
|
||||
### Upload filters
|
||||
|
||||
#### Pleroma.Upload.Filter.AnonymizeFilename
|
||||
|
|
|
@ -10,6 +10,30 @@ To use built-in search that has no external dependencies, set the search module
|
|||
|
||||
While it has no external dependencies, it has problems with performance and relevancy.
|
||||
|
||||
## QdrantSearch
|
||||
|
||||
This uses the vector search engine [Qdrant](https://qdrant.tech) to search the posts in a vector space. This needs a way to generate embeddings and uses the [OpenAI API](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings). This is implemented by several project besides OpenAI itself, including the python-based fastembed-server found in `supplemental/search/fastembed-api`.
|
||||
|
||||
The default settings will support a setup where both the fastembed server and Qdrant run on the same system as pleroma. To use it, set the search provider and run the fastembed server, see the README in `supplemental/search/fastembed-api`:
|
||||
|
||||
> config :pleroma, Pleroma.Search, module: Pleroma.Search.QdrantSearch
|
||||
|
||||
Then, start the Qdrant server, see [here](https://qdrant.tech/documentation/quick-start/) for instructions.
|
||||
|
||||
You will also need to create the Qdrant index once by running `mix pleroma.search.indexer create_index`. Running `mix pleroma.search.indexer index` will retroactively index the last 100_000 activities.
|
||||
|
||||
### Indexing and model options
|
||||
|
||||
To see the available configuration options, check out the QdrantSearch section in `config/config.exs`.
|
||||
|
||||
The default indexing option work for the default model (`snowflake-arctic-embed-xs`). To optimize for a low memory footprint, adjust the index configuration as described in the [Qdrant docs](https://qdrant.tech/documentation/guides/optimize/). See also [this blog post](https://qdrant.tech/articles/memory-consumption/) that goes into detail.
|
||||
|
||||
Different embedding models will need different vector size settings. You can see a list of the models supported by the fastembed server [here](https://qdrant.github.io/fastembed/examples/Supported_Models), including their vector dimensions. These vector dimensions need to be set in the `qdrant_index_configuration`.
|
||||
|
||||
E.g, If you want to use `sentence-transformers/all-MiniLM-L6-v2` as a model, you will not need to adjust things, because it and `snowflake-arctic-embed-xs` are both 384 dimensional models. If you want to use `snowflake/snowflake-arctic-embed-l`, you will need to adjust the `size` parameter in the `qdrant_index_configuration` to 1024, as it has a dimension of 1024.
|
||||
|
||||
When using a different model, you will need do drop the index and recreate it (`mix pleroma.search.indexer drop_index` and `mix pleroma.search.indexer create_index`), as the different embeddings are not compatible with each other.
|
||||
|
||||
## Meilisearch
|
||||
|
||||
Note that it's quite a bit more memory hungry than PostgreSQL (around 4-5G for ~1.2 million
|
||||
|
|
|
@ -1907,3 +1907,52 @@ Note that this differs from the Mastodon API variant: Mastodon API only returns
|
|||
```json
|
||||
{}
|
||||
```
|
||||
|
||||
## `GET /api/v1/pleroma/admin/rules`
|
||||
|
||||
### List rules
|
||||
|
||||
- Response: JSON, list of rules
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"id": "1",
|
||||
"priority": 1,
|
||||
"text": "There are no rules",
|
||||
"hint": null
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
## `POST /api/v1/pleroma/admin/rules`
|
||||
|
||||
### Create a rule
|
||||
|
||||
- Params:
|
||||
- `text`: string, required, rule content
|
||||
- `hint`: string, optional, rule description
|
||||
- `priority`: integer, optional, rule ordering priority
|
||||
|
||||
- Response: JSON, a single rule
|
||||
|
||||
## `PATCH /api/v1/pleroma/admin/rules/:id`
|
||||
|
||||
### Update a rule
|
||||
|
||||
- Params:
|
||||
- `text`: string, optional, rule content
|
||||
- `hint`: string, optional, rule description
|
||||
- `priority`: integer, optional, rule ordering priority
|
||||
|
||||
- Response: JSON, a single rule
|
||||
|
||||
## `DELETE /api/v1/pleroma/admin/rules/:id`
|
||||
|
||||
### Delete a rule
|
||||
|
||||
- Response: JSON, empty object
|
||||
|
||||
```json
|
||||
{}
|
||||
```
|
||||
|
|
|
@ -40,6 +40,7 @@ Has these additional fields under the `pleroma` object:
|
|||
- `parent_visible`: If the parent of this post is visible to the user or not.
|
||||
- `pinned_at`: a datetime (iso8601) when status was pinned, `null` otherwise.
|
||||
- `quotes_count`: the count of status quotes.
|
||||
- `event`: event information if the post is an event, `null` otherwise.
|
||||
- `non_anonymous`: true if the source post specifies the poll results are not anonymous. Currently only implemented by Smithereen.
|
||||
- `bookmark_folder`: the ID of the folder bookmark is stored within (if any).
|
||||
- `event`: event information if the post is an event, `null` otherwise.
|
||||
|
|
|
@ -406,9 +406,7 @@ See [Admin-API](admin_api.md)
|
|||
"id": "9umDrYheeY451cQnEe",
|
||||
"name": "Read later",
|
||||
"emoji": "🕓",
|
||||
"source": {
|
||||
"emoji": "🕓"
|
||||
}
|
||||
"emoji_url": null
|
||||
}
|
||||
]
|
||||
```
|
||||
|
|
|
@ -1,44 +1,47 @@
|
|||
# Prometheus Metrics
|
||||
# Prometheus / OpenTelemetry Metrics
|
||||
|
||||
Pleroma includes support for exporting metrics via the [prometheus_ex](https://github.com/deadtrickster/prometheus.ex) library.
|
||||
Pleroma includes support for exporting metrics via the [prom_ex](https://github.com/akoutmos/prom_ex) library.
|
||||
The metrics are exposed by a dedicated webserver/port to improve privacy and security.
|
||||
|
||||
Config example:
|
||||
|
||||
```
|
||||
config :prometheus, Pleroma.Web.Endpoint.MetricsExporter,
|
||||
enabled: true,
|
||||
auth: {:basic, "myusername", "mypassword"},
|
||||
ip_whitelist: ["127.0.0.1"],
|
||||
path: "/api/pleroma/app_metrics",
|
||||
format: :text
|
||||
```
|
||||
|
||||
* `enabled` (Pleroma extension) enables the endpoint
|
||||
* `ip_whitelist` (Pleroma extension) could be used to restrict access only to specified IPs
|
||||
* `auth` sets the authentication (`false` for no auth; configurable to HTTP Basic Auth, see [prometheus-plugs](https://github.com/deadtrickster/prometheus-plugs#exporting) documentation)
|
||||
* `format` sets the output format (`:text` or `:protobuf`)
|
||||
* `path` sets the path to app metrics page
|
||||
|
||||
|
||||
## `/api/pleroma/app_metrics`
|
||||
|
||||
### Exports Prometheus application metrics
|
||||
|
||||
* Method: `GET`
|
||||
* Authentication: not required by default (see configuration options above)
|
||||
* Params: none
|
||||
* Response: text
|
||||
|
||||
## Grafana
|
||||
|
||||
### Config example
|
||||
|
||||
The following is a config example to use with [Grafana](https://grafana.com)
|
||||
config :pleroma, Pleroma.PromEx,
|
||||
disabled: false,
|
||||
manual_metrics_start_delay: :no_delay,
|
||||
drop_metrics_groups: [],
|
||||
grafana: [
|
||||
host: System.get_env("GRAFANA_HOST", "http://localhost:3000"),
|
||||
auth_token: System.get_env("GRAFANA_TOKEN"),
|
||||
upload_dashboards_on_start: false,
|
||||
folder_name: "BEAM",
|
||||
annotate_app_lifecycle: true
|
||||
],
|
||||
metrics_server: [
|
||||
port: 4021,
|
||||
path: "/metrics",
|
||||
protocol: :http,
|
||||
pool_size: 5,
|
||||
cowboy_opts: [],
|
||||
auth_strategy: :none
|
||||
],
|
||||
datasource: "Prometheus"
|
||||
|
||||
```
|
||||
- job_name: 'beam'
|
||||
metrics_path: /api/pleroma/app_metrics
|
||||
scheme: https
|
||||
|
||||
PromEx supports the ability to automatically publish dashboards to your Grafana server as well as register Annotations. If you do not wish to configure this capability you must generate the dashboard JSON files and import them directly. You can find the mix commands in the upstream [documentation](https://hexdocs.pm/prom_ex/Mix.Tasks.PromEx.Dashboard.Export.html). You can find the list of modules enabled in Pleroma for which you should generate dashboards for by examining the contents of the `lib/pleroma/prom_ex.ex` module.
|
||||
|
||||
## prometheus.yml
|
||||
|
||||
The following is a bare minimum config example to use with [Prometheus](https://prometheus.io) or Prometheus-compatible software like [VictoriaMetrics](https://victoriametrics.com).
|
||||
|
||||
```
|
||||
global:
|
||||
scrape_interval: 15s
|
||||
|
||||
scrape_configs:
|
||||
- job_name: 'pleroma'
|
||||
scheme: http
|
||||
static_configs:
|
||||
- targets: ['pleroma.soykaf.com']
|
||||
- targets: ['pleroma.soykaf.com:4021']
|
||||
```
|
||||
|
|
|
@ -14,7 +14,7 @@ Note: This article is potentially outdated because at this time we may not have
|
|||
|
||||
- PostgreSQL 11.0以上 (Ubuntu16.04では9.5しか提供されていないので,[](https://www.postgresql.org/download/linux/ubuntu/)こちらから新しいバージョンを入手してください)
|
||||
- `postgresql-contrib` 11.0以上 (同上)
|
||||
- Elixir 1.8 以上 ([Debianのリポジトリからインストールしないこと!!! ここからインストールすること!](https://elixir-lang.org/install.html#unix-and-unix-like)。または [asdf](https://github.com/asdf-vm/asdf) をpleromaユーザーでインストールしてください)
|
||||
- Elixir 1.13 以上 ([Debianのリポジトリからインストールしないこと!!! ここからインストールすること!](https://elixir-lang.org/install.html#unix-and-unix-like)。または [asdf](https://github.com/asdf-vm/asdf) をpleromaユーザーでインストールしてください)
|
||||
- `erlang-dev`
|
||||
- `erlang-nox`
|
||||
- `git`
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
## Required dependencies
|
||||
|
||||
* PostgreSQL >=11.0
|
||||
* Elixir >=1.11.0 <1.15
|
||||
* Elixir >=1.13.0 <1.15
|
||||
* Erlang OTP >=22.2.0 (supported: <27)
|
||||
* git
|
||||
* file / libmagic
|
||||
|
|
80
lib/mix/tasks/pleroma/search/indexer.ex
Normal file
80
lib/mix/tasks/pleroma/search/indexer.ex
Normal file
|
@ -0,0 +1,80 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Mix.Tasks.Pleroma.Search.Indexer do
|
||||
import Mix.Pleroma
|
||||
import Ecto.Query
|
||||
|
||||
alias Pleroma.Workers.SearchIndexingWorker
|
||||
|
||||
def run(["create_index"]) do
|
||||
start_pleroma()
|
||||
|
||||
with :ok <- Pleroma.Config.get([Pleroma.Search, :module]).create_index() do
|
||||
IO.puts("Index created")
|
||||
else
|
||||
e -> IO.puts("Could not create index: #{inspect(e)}")
|
||||
end
|
||||
end
|
||||
|
||||
def run(["drop_index"]) do
|
||||
start_pleroma()
|
||||
|
||||
with :ok <- Pleroma.Config.get([Pleroma.Search, :module]).drop_index() do
|
||||
IO.puts("Index dropped")
|
||||
else
|
||||
e -> IO.puts("Could not drop index: #{inspect(e)}")
|
||||
end
|
||||
end
|
||||
|
||||
def run(["index" | options]) do
|
||||
{options, [], []} =
|
||||
OptionParser.parse(
|
||||
options,
|
||||
strict: [
|
||||
limit: :integer
|
||||
]
|
||||
)
|
||||
|
||||
start_pleroma()
|
||||
|
||||
limit = Keyword.get(options, :limit, 100_000)
|
||||
|
||||
per_step = 1000
|
||||
chunks = max(div(limit, per_step), 1)
|
||||
|
||||
1..chunks
|
||||
|> Enum.each(fn step ->
|
||||
q =
|
||||
from(a in Pleroma.Activity,
|
||||
limit: ^per_step,
|
||||
offset: ^per_step * (^step - 1),
|
||||
select: [:id],
|
||||
order_by: [desc: :id]
|
||||
)
|
||||
|
||||
{:ok, ids} =
|
||||
Pleroma.Repo.transaction(fn ->
|
||||
Pleroma.Repo.stream(q, timeout: :infinity)
|
||||
|> Enum.map(fn a ->
|
||||
a.id
|
||||
end)
|
||||
end)
|
||||
|
||||
IO.puts("Got #{length(ids)} activities, adding to indexer")
|
||||
|
||||
ids
|
||||
|> Enum.chunk_every(100)
|
||||
|> Enum.each(fn chunk ->
|
||||
IO.puts("Adding #{length(chunk)} activities to indexing queue")
|
||||
|
||||
chunk
|
||||
|> Enum.map(fn id ->
|
||||
SearchIndexingWorker.new(%{"op" => "add_to_index", "activity" => id})
|
||||
end)
|
||||
|> Oban.insert_all()
|
||||
end)
|
||||
end)
|
||||
end
|
||||
end
|
|
@ -15,6 +15,7 @@ defmodule Pleroma.Application do
|
|||
@compat_name Mix.Project.config()[:compat_name]
|
||||
@version Mix.Project.config()[:version]
|
||||
@repository Mix.Project.config()[:source_url]
|
||||
@compile_env Mix.env()
|
||||
|
||||
def name, do: @name
|
||||
def compat_name, do: @compat_name
|
||||
|
@ -54,7 +55,11 @@ def start(_type, _args) do
|
|||
Pleroma.HTML.compile_scrubbers()
|
||||
Pleroma.Config.Oban.warn()
|
||||
Config.DeprecationWarnings.warn()
|
||||
Pleroma.Web.Plugs.HTTPSecurityPlug.warn_if_disabled()
|
||||
|
||||
if @compile_env != :test do
|
||||
Pleroma.Web.Plugs.HTTPSecurityPlug.warn_if_disabled()
|
||||
end
|
||||
|
||||
Pleroma.ApplicationRequirements.verify!()
|
||||
load_custom_modules()
|
||||
Pleroma.Docs.JSON.compile()
|
||||
|
@ -112,7 +117,8 @@ def start(_type, _args) do
|
|||
task_children() ++
|
||||
streamer_registry() ++
|
||||
background_migrators() ++
|
||||
[Pleroma.Gopher.Server]
|
||||
[Pleroma.Gopher.Server] ++
|
||||
[Pleroma.Search.Healthcheck]
|
||||
|
||||
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
|
||||
# for other strategies and supported options
|
||||
|
@ -159,15 +165,16 @@ defp cachex_children do
|
|||
build_cachex("web_resp", limit: 2500),
|
||||
build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10),
|
||||
build_cachex("failed_proxy_url", limit: 2500),
|
||||
build_cachex("failed_media_helper_url", default_ttl: :timer.minutes(15), limit: 2_500),
|
||||
build_cachex("banned_urls", default_ttl: :timer.hours(24 * 30), limit: 5_000),
|
||||
build_cachex("chat_message_id_idempotency_key",
|
||||
expiration: chat_message_id_idempotency_key_expiration(),
|
||||
limit: 500_000
|
||||
),
|
||||
build_cachex("anti_duplication_mrf", limit: 5_000),
|
||||
build_cachex("translations", default_ttl: :timer.hours(24), limit: 5_000),
|
||||
build_cachex("rel_me", default_ttl: :timer.minutes(30), limit: 2_500),
|
||||
build_cachex("host_meta", default_ttl: :timer.minutes(120), limit: 5000),
|
||||
build_cachex("anti_duplication_mrf", limit: 5_000),
|
||||
build_cachex("translations", default_ttl: :timer.hours(24), limit: 5_000),
|
||||
build_cachex("domain", limit: 2500)
|
||||
]
|
||||
end
|
||||
|
|
|
@ -28,6 +28,7 @@ def verify! do
|
|||
|> check_welcome_message_config!()
|
||||
|> check_rum!()
|
||||
|> check_repo_pool_size!()
|
||||
|> check_mrfs()
|
||||
|> handle_result()
|
||||
end
|
||||
|
||||
|
@ -268,4 +269,25 @@ defp check_filter(filter, command_required) do
|
|||
true
|
||||
end
|
||||
end
|
||||
|
||||
defp check_mrfs(:ok) do
|
||||
mrfs = Config.get!([:mrf, :policies])
|
||||
|
||||
missing_mrfs =
|
||||
Enum.reduce(mrfs, [], fn x, acc ->
|
||||
if Code.ensure_compiled(x) do
|
||||
acc
|
||||
else
|
||||
acc ++ [x]
|
||||
end
|
||||
end)
|
||||
|
||||
if Enum.empty?(missing_mrfs) do
|
||||
:ok
|
||||
else
|
||||
{:error, "The following MRF modules are configured but missing: #{inspect(missing_mrfs)}"}
|
||||
end
|
||||
end
|
||||
|
||||
defp check_mrfs(result), do: result
|
||||
end
|
||||
|
|
|
@ -20,6 +20,7 @@ defmodule Pleroma.Constants do
|
|||
"deleted_activity_id",
|
||||
"pleroma_internal",
|
||||
"generator",
|
||||
"rules",
|
||||
"assigned_account",
|
||||
"rules",
|
||||
"content_type",
|
||||
|
|
|
@ -5,13 +5,13 @@
|
|||
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.ContentLanguageMap do
|
||||
use Ecto.Type
|
||||
|
||||
import Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode,
|
||||
only: [is_good_locale_code?: 1]
|
||||
alias Pleroma.MultiLanguage
|
||||
|
||||
def type, do: :map
|
||||
|
||||
def cast(%{} = object) do
|
||||
with {status, %{} = data} when status in [:modified, :ok] <- validate_map(object) do
|
||||
with {status, %{} = data} when status in [:modified, :ok] <-
|
||||
MultiLanguage.validate_map(object) do
|
||||
{:ok, data}
|
||||
else
|
||||
{_, nil} -> {:ok, nil}
|
||||
|
@ -24,26 +24,4 @@ def cast(_), do: :error
|
|||
def dump(data), do: {:ok, data}
|
||||
|
||||
def load(data), do: {:ok, data}
|
||||
|
||||
defp validate_map(%{} = object) do
|
||||
{status, data} =
|
||||
object
|
||||
|> Enum.reduce({:ok, %{}}, fn
|
||||
{lang, value}, {status, acc} when is_binary(lang) and is_binary(value) ->
|
||||
if is_good_locale_code?(lang) do
|
||||
{status, Map.put(acc, lang, value)}
|
||||
else
|
||||
{:modified, acc}
|
||||
end
|
||||
|
||||
_, {_status, acc} ->
|
||||
{:modified, acc}
|
||||
end)
|
||||
|
||||
if data == %{} do
|
||||
{status, nil}
|
||||
else
|
||||
{status, data}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -5,10 +5,12 @@
|
|||
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode do
|
||||
use Ecto.Type
|
||||
|
||||
alias Pleroma.MultiLanguage
|
||||
|
||||
def type, do: :string
|
||||
|
||||
def cast(language) when is_binary(language) do
|
||||
if is_good_locale_code?(language) do
|
||||
if MultiLanguage.good_locale_code?(language) do
|
||||
{:ok, language}
|
||||
else
|
||||
{:error, :invalid_language}
|
||||
|
@ -20,8 +22,4 @@ def cast(_), do: :error
|
|||
def dump(data), do: {:ok, data}
|
||||
|
||||
def load(data), do: {:ok, data}
|
||||
|
||||
def is_good_locale_code?(code) when is_binary(code), do: code =~ ~r<^[a-zA-Z0-9\-]+$>
|
||||
|
||||
def is_good_locale_code?(_code), do: false
|
||||
end
|
||||
|
|
|
@ -48,5 +48,9 @@ def get_emoji_map(text) when is_binary(text) do
|
|||
end)
|
||||
end
|
||||
|
||||
def get_emoji_map(%{} = map) do
|
||||
Enum.reduce(map, %{}, fn {_, content}, acc -> Map.merge(acc, get_emoji_map(content)) end)
|
||||
end
|
||||
|
||||
def get_emoji_map(_), do: %{}
|
||||
end
|
||||
|
|
|
@ -16,4 +16,15 @@ def parse_address(ip) when is_binary(ip) do
|
|||
def parse_address(ip) do
|
||||
:inet.parse_address(ip)
|
||||
end
|
||||
|
||||
def parse_cidr(proxy) when is_binary(proxy) do
|
||||
proxy =
|
||||
cond do
|
||||
"/" in String.codepoints(proxy) -> proxy
|
||||
InetCidr.v4?(InetCidr.parse_address!(proxy)) -> proxy <> "/32"
|
||||
InetCidr.v6?(InetCidr.parse_address!(proxy)) -> proxy <> "/128"
|
||||
end
|
||||
|
||||
InetCidr.parse_cidr!(proxy, true)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -12,6 +12,8 @@ defmodule Pleroma.Helpers.MediaHelper do
|
|||
|
||||
require Logger
|
||||
|
||||
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
|
||||
|
||||
def missing_dependencies do
|
||||
Enum.reduce([ffmpeg: "ffmpeg"], [], fn {sym, executable}, acc ->
|
||||
if Pleroma.Utils.command_available?(executable) do
|
||||
|
@ -43,29 +45,40 @@ def image_resize(url, options) do
|
|||
@spec video_framegrab(String.t()) :: {:ok, binary()} | {:error, any()}
|
||||
def video_framegrab(url) do
|
||||
with executable when is_binary(executable) <- System.find_executable("ffmpeg"),
|
||||
false <- @cachex.exists?(:failed_media_helper_cache, url),
|
||||
{:ok, env} <- HTTP.get(url, [], pool: :media),
|
||||
{:ok, pid} <- StringIO.open(env.body) do
|
||||
body_stream = IO.binstream(pid, 1)
|
||||
|
||||
result =
|
||||
Exile.stream!(
|
||||
[
|
||||
executable,
|
||||
"-i",
|
||||
"pipe:0",
|
||||
"-vframes",
|
||||
"1",
|
||||
"-f",
|
||||
"mjpeg",
|
||||
"pipe:1"
|
||||
],
|
||||
input: body_stream,
|
||||
ignore_epipe: true,
|
||||
stderr: :disable
|
||||
)
|
||||
|> Enum.into(<<>>)
|
||||
task =
|
||||
Task.async(fn ->
|
||||
Exile.stream!(
|
||||
[
|
||||
executable,
|
||||
"-i",
|
||||
"pipe:0",
|
||||
"-vframes",
|
||||
"1",
|
||||
"-f",
|
||||
"mjpeg",
|
||||
"pipe:1"
|
||||
],
|
||||
input: body_stream,
|
||||
ignore_epipe: true,
|
||||
stderr: :disable
|
||||
)
|
||||
|> Enum.into(<<>>)
|
||||
end)
|
||||
|
||||
{:ok, result}
|
||||
case Task.yield(task, 5_000) do
|
||||
nil ->
|
||||
Task.shutdown(task)
|
||||
@cachex.put(:failed_media_helper_cache, url, nil)
|
||||
{:error, {:ffmpeg, :timeout}}
|
||||
|
||||
result ->
|
||||
{:ok, result}
|
||||
end
|
||||
else
|
||||
nil -> {:error, {:ffmpeg, :command_not_found}}
|
||||
{:error, _} = error -> error
|
||||
|
|
|
@ -65,20 +65,16 @@ def ensure_scrubbed_html(
|
|||
end
|
||||
end
|
||||
|
||||
@spec extract_first_external_url_from_object(Pleroma.Object.t()) ::
|
||||
{:ok, String.t()} | {:error, :no_content}
|
||||
@spec extract_first_external_url_from_object(Pleroma.Object.t()) :: String.t() | nil
|
||||
def extract_first_external_url_from_object(%{data: %{"content" => content}})
|
||||
when is_binary(content) do
|
||||
url =
|
||||
content
|
||||
|> Floki.parse_fragment!()
|
||||
|> Floki.find("a:not(.mention,.hashtag,.attachment,[rel~=\"tag\"])")
|
||||
|> Enum.take(1)
|
||||
|> Floki.attribute("href")
|
||||
|> Enum.at(0)
|
||||
|
||||
{:ok, url}
|
||||
content
|
||||
|> Floki.parse_fragment!()
|
||||
|> Floki.find("a:not(.mention,.hashtag,.attachment,[rel~=\"tag\"])")
|
||||
|> Enum.take(1)
|
||||
|> Floki.attribute("href")
|
||||
|> Enum.at(0)
|
||||
end
|
||||
|
||||
def extract_first_external_url_from_object(_), do: {:error, :no_content}
|
||||
def extract_first_external_url_from_object(_), do: nil
|
||||
end
|
||||
|
|
4
lib/pleroma/http_signatures_api.ex
Normal file
4
lib/pleroma/http_signatures_api.ex
Normal file
|
@ -0,0 +1,4 @@
|
|||
defmodule Pleroma.HTTPSignaturesAPI do
|
||||
@callback validate_conn(conn :: Plug.Conn.t()) :: boolean
|
||||
@callback signature_for_conn(conn :: Plug.Conn.t()) :: map
|
||||
end
|
43
lib/pleroma/multi_language.ex
Normal file
43
lib/pleroma/multi_language.ex
Normal file
|
@ -0,0 +1,43 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.MultiLanguage do
|
||||
def good_locale_code?(code) when is_binary(code), do: code =~ ~r<^[a-zA-Z0-9\-]+$>
|
||||
|
||||
def good_locale_code?(_code), do: false
|
||||
|
||||
def validate_map(%{} = object) do
|
||||
{status, data} =
|
||||
object
|
||||
|> Enum.reduce({:ok, %{}}, fn
|
||||
{lang, value}, {status, acc} when is_binary(lang) and is_binary(value) ->
|
||||
if good_locale_code?(lang) do
|
||||
{status, Map.put(acc, lang, value)}
|
||||
else
|
||||
{:modified, acc}
|
||||
end
|
||||
|
||||
_, {_status, acc} ->
|
||||
{:modified, acc}
|
||||
end)
|
||||
|
||||
if data == %{} do
|
||||
{status, nil}
|
||||
else
|
||||
{status, data}
|
||||
end
|
||||
end
|
||||
|
||||
def validate_map(_), do: {:error, nil}
|
||||
|
||||
def str_to_map(data, opts \\ []) do
|
||||
with lang when is_binary(lang) <- opts[:lang],
|
||||
true <- good_locale_code?(lang) do
|
||||
%{lang => data}
|
||||
else
|
||||
_ ->
|
||||
%{"und" => data}
|
||||
end
|
||||
end
|
||||
end
|
|
@ -74,6 +74,7 @@ def unread_notifications_count(%User{id: user_id}) do
|
|||
reblog
|
||||
poll
|
||||
status
|
||||
update
|
||||
pleroma:participation_accepted
|
||||
pleroma:participation_request
|
||||
pleroma:event_reminder
|
||||
|
@ -285,7 +286,7 @@ def set_read_up_to(%{id: user_id} = user, id) do
|
|||
select: n.id
|
||||
)
|
||||
|
||||
{:ok, %{ids: {_, notification_ids}, marker: marker}} =
|
||||
{:ok, %{marker: marker}} =
|
||||
Multi.new()
|
||||
|> Multi.update_all(:ids, query, set: [seen: true, updated_at: NaiveDateTime.utc_now()])
|
||||
|> Marker.multi_set_last_read_id(user, "notifications")
|
||||
|
@ -293,23 +294,21 @@ def set_read_up_to(%{id: user_id} = user, id) do
|
|||
|
||||
Streamer.stream(["user", "user:notification"], marker)
|
||||
|
||||
for_user_query(user)
|
||||
|> where([n], n.id in ^notification_ids)
|
||||
|> Repo.all()
|
||||
{:ok, %{marker: marker}}
|
||||
end
|
||||
|
||||
@spec read_one(User.t(), String.t()) ::
|
||||
{:ok, Notification.t()} | {:error, Ecto.Changeset.t()} | nil
|
||||
def read_one(%User{} = user, notification_id) do
|
||||
with {:ok, %Notification{} = notification} <- get(user, notification_id) do
|
||||
Multi.new()
|
||||
|> Multi.update(:update, changeset(notification, %{seen: true}))
|
||||
|> Marker.multi_set_last_read_id(user, "notifications")
|
||||
|> Repo.transaction()
|
||||
|> case do
|
||||
{:ok, %{update: notification}} -> {:ok, notification}
|
||||
{:error, :update, changeset, _} -> {:error, changeset}
|
||||
end
|
||||
with {:ok, %Notification{} = notification} <- get(user, notification_id),
|
||||
{:ok, %{marker: marker}} <-
|
||||
Multi.new()
|
||||
|> Multi.update(:update, changeset(notification, %{seen: true}))
|
||||
|> Marker.multi_set_last_read_id(user, "notifications")
|
||||
|> Repo.transaction() do
|
||||
Streamer.stream(["user", "user:notification"], marker)
|
||||
|
||||
{:ok, %{marker: marker}}
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -368,20 +367,20 @@ def dismiss(%{id: user_id} = _user, id) do
|
|||
end
|
||||
end
|
||||
|
||||
@spec create_notifications(Activity.t(), keyword()) :: {:ok, [Notification.t()] | []}
|
||||
def create_notifications(activity, options \\ [])
|
||||
@spec create_notifications(Activity.t()) :: {:ok, [Notification.t()] | []}
|
||||
def create_notifications(activity)
|
||||
|
||||
def create_notifications(%Activity{data: %{"to" => _, "type" => "Create"}} = activity, options) do
|
||||
def create_notifications(%Activity{data: %{"to" => _, "type" => "Create"}} = activity) do
|
||||
object = Object.normalize(activity, fetch: false)
|
||||
|
||||
if object && object.data["type"] == "Answer" do
|
||||
{:ok, []}
|
||||
else
|
||||
do_create_notifications(activity, options)
|
||||
do_create_notifications(activity)
|
||||
end
|
||||
end
|
||||
|
||||
def create_notifications(%Activity{data: %{"type" => type}} = activity, options)
|
||||
def create_notifications(%Activity{data: %{"type" => type}} = activity)
|
||||
when type in [
|
||||
"Follow",
|
||||
"Like",
|
||||
|
@ -393,41 +392,29 @@ def create_notifications(%Activity{data: %{"type" => type}} = activity, options)
|
|||
"Accept",
|
||||
"Join"
|
||||
] do
|
||||
do_create_notifications(activity, options)
|
||||
do_create_notifications(activity)
|
||||
end
|
||||
|
||||
def create_notifications(_, _), do: {:ok, []}
|
||||
def create_notifications(_), do: {:ok, []}
|
||||
|
||||
defp do_create_notifications(%Activity{} = activity, options) do
|
||||
do_send = Keyword.get(options, :do_send, true)
|
||||
defp do_create_notifications(%Activity{} = activity) do
|
||||
enabled_participants = get_notified_participants_from_activity(activity)
|
||||
|
||||
{enabled_participants, disabled_participants} =
|
||||
get_notified_participants_from_activity(activity)
|
||||
enabled_receivers = get_notified_from_activity(activity) -- enabled_participants
|
||||
|
||||
potential_participants = enabled_participants ++ disabled_participants
|
||||
|
||||
{enabled_receivers, disabled_receivers} = get_notified_from_activity(activity)
|
||||
|
||||
potential_receivers = (enabled_receivers ++ disabled_receivers) -- potential_participants
|
||||
|
||||
{enabled_subscribers, disabled_subscribers} = get_notified_subscribers_from_activity(activity)
|
||||
|
||||
potential_subscribers =
|
||||
(enabled_subscribers ++ disabled_subscribers) --
|
||||
(potential_participants ++ potential_receivers)
|
||||
enabled_subscribers =
|
||||
get_notified_subscribers_from_activity(activity) --
|
||||
(enabled_participants ++ enabled_receivers)
|
||||
|
||||
notifications =
|
||||
(Enum.map(potential_receivers, fn user ->
|
||||
do_send = do_send && user in enabled_receivers
|
||||
create_notification(activity, user, do_send: do_send)
|
||||
(Enum.map(enabled_receivers, fn user ->
|
||||
create_notification(activity, user)
|
||||
end) ++
|
||||
Enum.map(potential_subscribers, fn user ->
|
||||
do_send = do_send && user in enabled_subscribers
|
||||
create_notification(activity, user, do_send: do_send, type: "status")
|
||||
Enum.map(enabled_subscribers, fn user ->
|
||||
create_notification(activity, user, type: "status")
|
||||
end) ++
|
||||
Enum.map(potential_participants, fn user ->
|
||||
do_send = do_send && user in enabled_participants
|
||||
create_notification(activity, user, do_send: do_send, type: "pleroma:event_update")
|
||||
Enum.map(enabled_participants, fn user ->
|
||||
create_notification(activity, user, type: "pleroma:event_update")
|
||||
end))
|
||||
|> Enum.reject(&is_nil/1)
|
||||
|
||||
|
@ -493,7 +480,6 @@ defp type_from_activity_object(%{data: %{"type" => "Create"}} = activity) do
|
|||
|
||||
# TODO move to sql, too.
|
||||
def create_notification(%Activity{} = activity, %User{} = user, opts \\ []) do
|
||||
do_send = Keyword.get(opts, :do_send, true)
|
||||
type = Keyword.get(opts, :type, type_from_activity(activity))
|
||||
|
||||
unless skip?(activity, user, opts) do
|
||||
|
@ -508,11 +494,6 @@ def create_notification(%Activity{} = activity, %User{} = user, opts \\ []) do
|
|||
|> Marker.multi_set_last_read_id(user, "notifications")
|
||||
|> Repo.transaction()
|
||||
|
||||
if do_send do
|
||||
Streamer.stream(["user", "user:notification"], notification)
|
||||
Push.send(notification)
|
||||
end
|
||||
|
||||
notification
|
||||
end
|
||||
end
|
||||
|
@ -594,13 +575,10 @@ def get_notified_from_activity(%Activity{data: %{"type" => type}} = activity, lo
|
|||
|> exclude_relationship_restricted_ap_ids(activity)
|
||||
|> exclude_thread_muter_ap_ids(activity)
|
||||
|
||||
notification_enabled_users =
|
||||
Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
|
||||
|
||||
{notification_enabled_users, potential_receivers -- notification_enabled_users}
|
||||
Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
|
||||
end
|
||||
|
||||
def get_notified_from_activity(_, _local_only), do: {[], []}
|
||||
def get_notified_from_activity(_, _local_only), do: []
|
||||
|
||||
def get_notified_subscribers_from_activity(activity, local_only \\ true)
|
||||
|
||||
|
@ -615,13 +593,10 @@ def get_notified_subscribers_from_activity(
|
|||
potential_receivers =
|
||||
User.get_users_from_set(notification_enabled_ap_ids, local_only: local_only)
|
||||
|
||||
notification_enabled_users =
|
||||
Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
|
||||
|
||||
{notification_enabled_users, potential_receivers -- notification_enabled_users}
|
||||
Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
|
||||
end
|
||||
|
||||
def get_notified_subscribers_from_activity(_, _), do: {[], []}
|
||||
def get_notified_subscribers_from_activity(_, _), do: []
|
||||
|
||||
def get_notified_participants_from_activity(activity, local_only \\ true)
|
||||
|
||||
|
@ -636,13 +611,10 @@ def get_notified_participants_from_activity(
|
|||
potential_receivers =
|
||||
User.get_users_from_set(notification_enabled_ap_ids, local_only: local_only)
|
||||
|
||||
notification_enabled_users =
|
||||
Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
|
||||
|
||||
{notification_enabled_users, potential_receivers -- notification_enabled_users}
|
||||
Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
|
||||
end
|
||||
|
||||
def get_notified_participants_from_activity(_, _), do: {[], []}
|
||||
def get_notified_participants_from_activity(_, _), do: []
|
||||
|
||||
# For some activities, only notify the author of the object
|
||||
def get_potential_receiver_ap_ids(%{data: %{"type" => type, "object" => object_id}})
|
||||
|
@ -780,6 +752,7 @@ def skip?(activity, user, opts \\ [])
|
|||
def skip?(%Activity{} = activity, %User{} = user, opts) do
|
||||
[
|
||||
:self,
|
||||
:internal,
|
||||
:invisible,
|
||||
:block_from_strangers,
|
||||
:recently_followed,
|
||||
|
@ -799,6 +772,12 @@ def skip?(:self, %Activity{} = activity, %User{} = user, opts) do
|
|||
end
|
||||
end
|
||||
|
||||
def skip?(:internal, %Activity{} = activity, _user, _opts) do
|
||||
actor = activity.data["actor"]
|
||||
user = User.get_cached_by_ap_id(actor)
|
||||
User.internal?(user)
|
||||
end
|
||||
|
||||
def skip?(:invisible, %Activity{} = activity, _user, _opts) do
|
||||
actor = activity.data["actor"]
|
||||
user = User.get_cached_by_ap_id(actor)
|
||||
|
@ -885,4 +864,12 @@ def mark_context_as_read(%User{id: id}, context) do
|
|||
)
|
||||
|> Repo.update_all(set: [seen: true])
|
||||
end
|
||||
|
||||
@spec send(list(Notification.t())) :: :ok
|
||||
def send(notifications) do
|
||||
Enum.each(notifications, fn notification ->
|
||||
Streamer.stream(["user", "user:notification"], notification)
|
||||
Push.send(notification)
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -16,13 +16,14 @@ defmodule Pleroma.Rule do
|
|||
schema "rules" do
|
||||
field(:priority, :integer, default: 0)
|
||||
field(:text, :string)
|
||||
field(:hint, :string)
|
||||
|
||||
timestamps()
|
||||
end
|
||||
|
||||
def changeset(%Rule{} = rule, params \\ %{}) do
|
||||
rule
|
||||
|> cast(params, [:priority, :text])
|
||||
|> cast(params, [:priority, :text, :hint])
|
||||
|> validate_required([:text])
|
||||
end
|
||||
|
||||
|
@ -39,6 +40,11 @@ def get(ids) when is_list(ids) do
|
|||
|
||||
def get(id), do: Repo.get(__MODULE__, id)
|
||||
|
||||
def exists?(id) do
|
||||
from(r in __MODULE__, where: r.id == ^id)
|
||||
|> Repo.exists?()
|
||||
end
|
||||
|
||||
def create(params) do
|
||||
{:ok, rule} =
|
||||
%Rule{}
|
||||
|
|
|
@ -204,7 +204,7 @@ def due_activities(offset \\ 0) do
|
|||
|
||||
def job_query(scheduled_activity_id) do
|
||||
from(j in Oban.Job,
|
||||
where: j.queue == "scheduled_activities",
|
||||
where: j.queue == "federator_outgoing",
|
||||
where: fragment("args ->> 'activity_id' = ?::text", ^to_string(scheduled_activity_id))
|
||||
)
|
||||
end
|
||||
|
|
|
@ -10,8 +10,12 @@ def remove_from_index(%Pleroma.Object{id: object_id}) do
|
|||
end
|
||||
|
||||
def search(query, options) do
|
||||
search_module = Pleroma.Config.get([Pleroma.Search, :module], Pleroma.Activity)
|
||||
|
||||
search_module = Pleroma.Config.get([Pleroma.Search, :module])
|
||||
search_module.search(options[:for_user], query, options)
|
||||
end
|
||||
|
||||
def healthcheck_endpoints do
|
||||
search_module = Pleroma.Config.get([Pleroma.Search, :module])
|
||||
search_module.healthcheck_endpoints
|
||||
end
|
||||
end
|
||||
|
|
|
@ -48,6 +48,15 @@ def add_to_index(_activity), do: :ok
|
|||
@impl true
|
||||
def remove_from_index(_object), do: :ok
|
||||
|
||||
@impl true
|
||||
def create_index, do: :ok
|
||||
|
||||
@impl true
|
||||
def drop_index, do: :ok
|
||||
|
||||
@impl true
|
||||
def healthcheck_endpoints, do: nil
|
||||
|
||||
def maybe_restrict_author(query, %User{} = author) do
|
||||
Activity.Queries.by_author(query, author)
|
||||
end
|
||||
|
|
86
lib/pleroma/search/healthcheck.ex
Normal file
86
lib/pleroma/search/healthcheck.ex
Normal file
|
@ -0,0 +1,86 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
defmodule Pleroma.Search.Healthcheck do
|
||||
@doc """
|
||||
Monitors health of search backend to control processing of events based on health and availability.
|
||||
"""
|
||||
use GenServer
|
||||
require Logger
|
||||
|
||||
@queue :search_indexing
|
||||
@tick :timer.seconds(5)
|
||||
@timeout :timer.seconds(2)
|
||||
|
||||
def start_link(_) do
|
||||
GenServer.start_link(__MODULE__, [], name: __MODULE__)
|
||||
end
|
||||
|
||||
@impl true
|
||||
def init(_) do
|
||||
state = %{healthy: false}
|
||||
{:ok, state, {:continue, :start}}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_continue(:start, state) do
|
||||
tick()
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_info(:check, state) do
|
||||
urls = Pleroma.Search.healthcheck_endpoints()
|
||||
|
||||
new_state =
|
||||
if check(urls) do
|
||||
Oban.resume_queue(queue: @queue)
|
||||
Map.put(state, :healthy, true)
|
||||
else
|
||||
Oban.pause_queue(queue: @queue)
|
||||
Map.put(state, :healthy, false)
|
||||
end
|
||||
|
||||
maybe_log_state_change(state, new_state)
|
||||
|
||||
tick()
|
||||
{:noreply, new_state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_call(:state, _from, state) do
|
||||
{:reply, state, state, :hibernate}
|
||||
end
|
||||
|
||||
def state, do: GenServer.call(__MODULE__, :state)
|
||||
|
||||
def check([]), do: true
|
||||
|
||||
def check(urls) when is_list(urls) do
|
||||
Enum.all?(
|
||||
urls,
|
||||
fn url ->
|
||||
case Pleroma.HTTP.get(url, [], recv_timeout: @timeout) do
|
||||
{:ok, %{status: 200}} -> true
|
||||
_ -> false
|
||||
end
|
||||
end
|
||||
)
|
||||
end
|
||||
|
||||
def check(_), do: true
|
||||
|
||||
defp tick do
|
||||
Process.send_after(self(), :check, @tick)
|
||||
end
|
||||
|
||||
defp maybe_log_state_change(%{healthy: true}, %{healthy: false}) do
|
||||
Logger.error("Pausing Oban queue #{@queue} due to search backend healthcheck failure")
|
||||
end
|
||||
|
||||
defp maybe_log_state_change(%{healthy: false}, %{healthy: true}) do
|
||||
Logger.info("Resuming Oban queue #{@queue} due to search backend healthcheck pass")
|
||||
end
|
||||
|
||||
defp maybe_log_state_change(_, _), do: :ok
|
||||
end
|
|
@ -10,6 +10,12 @@ defmodule Pleroma.Search.Meilisearch do
|
|||
|
||||
@behaviour Pleroma.Search.SearchBackend
|
||||
|
||||
@impl true
|
||||
def create_index, do: :ok
|
||||
|
||||
@impl true
|
||||
def drop_index, do: :ok
|
||||
|
||||
defp meili_headers do
|
||||
private_key = Config.get([Pleroma.Search.Meilisearch, :private_key])
|
||||
|
||||
|
@ -178,4 +184,15 @@ def add_to_index(activity) do
|
|||
def remove_from_index(object) do
|
||||
meili_delete("/indexes/objects/documents/#{object.id}")
|
||||
end
|
||||
|
||||
@impl true
|
||||
def healthcheck_endpoints do
|
||||
endpoint =
|
||||
Config.get([Pleroma.Search.Meilisearch, :url])
|
||||
|> URI.parse()
|
||||
|> Map.put(:path, "/health")
|
||||
|> URI.to_string()
|
||||
|
||||
[endpoint]
|
||||
end
|
||||
end
|
||||
|
|
182
lib/pleroma/search/qdrant_search.ex
Normal file
182
lib/pleroma/search/qdrant_search.ex
Normal file
|
@ -0,0 +1,182 @@
|
|||
defmodule Pleroma.Search.QdrantSearch do
|
||||
@behaviour Pleroma.Search.SearchBackend
|
||||
import Ecto.Query
|
||||
|
||||
alias Pleroma.Activity
|
||||
alias Pleroma.Config.Getting, as: Config
|
||||
|
||||
alias __MODULE__.OpenAIClient
|
||||
alias __MODULE__.QdrantClient
|
||||
|
||||
import Pleroma.Search.Meilisearch, only: [object_to_search_data: 1]
|
||||
import Pleroma.Search.DatabaseSearch, only: [maybe_fetch: 3]
|
||||
|
||||
@impl true
|
||||
def create_index do
|
||||
payload = Config.get([Pleroma.Search.QdrantSearch, :qdrant_index_configuration])
|
||||
|
||||
with {:ok, %{status: 200}} <- QdrantClient.put("/collections/posts", payload) do
|
||||
:ok
|
||||
else
|
||||
e -> {:error, e}
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def drop_index do
|
||||
with {:ok, %{status: 200}} <- QdrantClient.delete("/collections/posts") do
|
||||
:ok
|
||||
else
|
||||
e -> {:error, e}
|
||||
end
|
||||
end
|
||||
|
||||
def get_embedding(text) do
|
||||
with {:ok, %{body: %{"data" => [%{"embedding" => embedding}]}}} <-
|
||||
OpenAIClient.post("/v1/embeddings", %{
|
||||
input: text,
|
||||
model: Config.get([Pleroma.Search.QdrantSearch, :openai_model])
|
||||
}) do
|
||||
{:ok, embedding}
|
||||
else
|
||||
_ ->
|
||||
{:error, "Failed to get embedding"}
|
||||
end
|
||||
end
|
||||
|
||||
defp actor_from_activity(%{data: %{"actor" => actor}}) do
|
||||
actor
|
||||
end
|
||||
|
||||
defp actor_from_activity(_), do: nil
|
||||
|
||||
defp build_index_payload(activity, embedding) do
|
||||
actor = actor_from_activity(activity)
|
||||
published_at = activity.data["published"]
|
||||
|
||||
%{
|
||||
points: [
|
||||
%{
|
||||
id: activity.id |> FlakeId.from_string() |> Ecto.UUID.cast!(),
|
||||
vector: embedding,
|
||||
payload: %{actor: actor, published_at: published_at}
|
||||
}
|
||||
]
|
||||
}
|
||||
end
|
||||
|
||||
defp build_search_payload(embedding, options) do
|
||||
base = %{
|
||||
vector: embedding,
|
||||
limit: options[:limit] || 20,
|
||||
offset: options[:offset] || 0
|
||||
}
|
||||
|
||||
if author = options[:author] do
|
||||
Map.put(base, :filter, %{
|
||||
must: [%{key: "actor", match: %{value: author.ap_id}}]
|
||||
})
|
||||
else
|
||||
base
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def add_to_index(activity) do
|
||||
# This will only index public or unlisted notes
|
||||
maybe_search_data = object_to_search_data(activity.object)
|
||||
|
||||
if activity.data["type"] == "Create" and maybe_search_data do
|
||||
with {:ok, embedding} <- get_embedding(maybe_search_data.content),
|
||||
{:ok, %{status: 200}} <-
|
||||
QdrantClient.put(
|
||||
"/collections/posts/points",
|
||||
build_index_payload(activity, embedding)
|
||||
) do
|
||||
:ok
|
||||
else
|
||||
e -> {:error, e}
|
||||
end
|
||||
else
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def remove_from_index(object) do
|
||||
activity = Activity.get_by_object_ap_id_with_object(object.data["id"])
|
||||
id = activity.id |> FlakeId.from_string() |> Ecto.UUID.cast!()
|
||||
|
||||
with {:ok, %{status: 200}} <-
|
||||
QdrantClient.post("/collections/posts/points/delete", %{"points" => [id]}) do
|
||||
:ok
|
||||
else
|
||||
e -> {:error, e}
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def search(user, original_query, options) do
|
||||
query = "Represent this sentence for searching relevant passages: #{original_query}"
|
||||
|
||||
with {:ok, embedding} <- get_embedding(query),
|
||||
{:ok, %{body: %{"result" => result}}} <-
|
||||
QdrantClient.post(
|
||||
"/collections/posts/points/search",
|
||||
build_search_payload(embedding, options)
|
||||
) do
|
||||
ids =
|
||||
Enum.map(result, fn %{"id" => id} ->
|
||||
Ecto.UUID.dump!(id)
|
||||
end)
|
||||
|
||||
from(a in Activity, where: a.id in ^ids)
|
||||
|> Activity.with_preloaded_object()
|
||||
|> Activity.restrict_deactivated_users()
|
||||
|> Ecto.Query.order_by([a], fragment("array_position(?, ?)", ^ids, a.id))
|
||||
|> Pleroma.Repo.all()
|
||||
|> maybe_fetch(user, original_query)
|
||||
else
|
||||
_ ->
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def healthcheck_endpoints do
|
||||
qdrant_health =
|
||||
Config.get([Pleroma.Search.QdrantSearch, :qdrant_url])
|
||||
|> URI.parse()
|
||||
|> Map.put(:path, "/healthz")
|
||||
|> URI.to_string()
|
||||
|
||||
openai_health = Config.get([Pleroma.Search.QdrantSearch, :openai_healthcheck_url])
|
||||
|
||||
[qdrant_health, openai_health] |> Enum.filter(& &1)
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Pleroma.Search.QdrantSearch.OpenAIClient do
|
||||
use Tesla
|
||||
alias Pleroma.Config.Getting, as: Config
|
||||
|
||||
plug(Tesla.Middleware.BaseUrl, Config.get([Pleroma.Search.QdrantSearch, :openai_url]))
|
||||
plug(Tesla.Middleware.JSON)
|
||||
|
||||
plug(Tesla.Middleware.Headers, [
|
||||
{"Authorization",
|
||||
"Bearer #{Pleroma.Config.get([Pleroma.Search.QdrantSearch, :openai_api_key])}"}
|
||||
])
|
||||
end
|
||||
|
||||
defmodule Pleroma.Search.QdrantSearch.QdrantClient do
|
||||
use Tesla
|
||||
alias Pleroma.Config.Getting, as: Config
|
||||
|
||||
plug(Tesla.Middleware.BaseUrl, Config.get([Pleroma.Search.QdrantSearch, :qdrant_url]))
|
||||
plug(Tesla.Middleware.JSON)
|
||||
|
||||
plug(Tesla.Middleware.Headers, [
|
||||
{"api-key", Pleroma.Config.get([Pleroma.Search.QdrantSearch, :qdrant_api_key])}
|
||||
])
|
||||
end
|
|
@ -21,4 +21,22 @@ defmodule Pleroma.Search.SearchBackend do
|
|||
from index.
|
||||
"""
|
||||
@callback remove_from_index(object :: Pleroma.Object.t()) :: :ok | {:error, any()}
|
||||
|
||||
@doc """
|
||||
Create the index
|
||||
"""
|
||||
@callback create_index() :: :ok | {:error, any()}
|
||||
|
||||
@doc """
|
||||
Drop the index
|
||||
"""
|
||||
@callback drop_index() :: :ok | {:error, any()}
|
||||
|
||||
@doc """
|
||||
Healthcheck endpoints of search backend infrastructure to monitor for controlling
|
||||
processing of jobs in the Oban queue.
|
||||
|
||||
It is expected a 200 response is healthy and other responses are unhealthy.
|
||||
"""
|
||||
@callback healthcheck_endpoints :: list() | nil
|
||||
end
|
||||
|
|
|
@ -62,6 +62,7 @@ defmodule Pleroma.Upload do
|
|||
height: integer(),
|
||||
blurhash: String.t(),
|
||||
description: String.t(),
|
||||
description_map: map(),
|
||||
path: String.t()
|
||||
}
|
||||
defstruct [
|
||||
|
@ -73,21 +74,44 @@ defmodule Pleroma.Upload do
|
|||
:height,
|
||||
:blurhash,
|
||||
:description,
|
||||
:description_map,
|
||||
:path
|
||||
]
|
||||
|
||||
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
|
||||
|
||||
defp get_description(upload) do
|
||||
case {upload.description, Pleroma.Config.get([Pleroma.Upload, :default_description])} do
|
||||
{description, _} when is_binary(description) -> description
|
||||
case {upload, Pleroma.Config.get([Pleroma.Upload, :default_description])} do
|
||||
{%{description_map: %{} = description_map}, _} -> description_map
|
||||
{%{description: description}, _} when is_binary(description) -> description
|
||||
{_, :filename} -> upload.name
|
||||
{_, str} when is_binary(str) -> str
|
||||
_ -> ""
|
||||
end
|
||||
end
|
||||
|
||||
@spec store(source, options :: [option()]) :: {:ok, map()} | {:error, any()}
|
||||
defp validate_description_limit(%{} = description) do
|
||||
Enum.all?(description, fn {_, content} ->
|
||||
String.length(content) <= Pleroma.Config.get([:instance, :description_limit])
|
||||
end)
|
||||
end
|
||||
|
||||
defp validate_description_limit(description) when is_binary(description) do
|
||||
String.length(description) <= Pleroma.Config.get([:instance, :description_limit])
|
||||
end
|
||||
|
||||
defp description_fields(%{} = description, language) do
|
||||
%{
|
||||
"name" => Map.get(description, language),
|
||||
"nameMap" => description
|
||||
}
|
||||
end
|
||||
|
||||
defp description_fields(description, _) when is_binary(description) do
|
||||
%{"name" => description}
|
||||
end
|
||||
|
||||
@spec store(source, options :: [option()]) :: {:ok, Map.t()} | {:error, any()}
|
||||
@doc "Store a file. If using a `Plug.Upload{}` as the source, be sure to use `Majic.Plug` to ensure its content_type and filename is correct."
|
||||
def store(upload, opts \\ []) do
|
||||
opts = get_opts(opts)
|
||||
|
@ -96,9 +120,10 @@ def store(upload, opts \\ []) do
|
|||
upload = %__MODULE__{upload | path: upload.path || "#{upload.id}/#{upload.name}"},
|
||||
{:ok, upload} <- Pleroma.Upload.Filter.filter(opts.filters, upload),
|
||||
description = get_description(upload),
|
||||
{_, true} <- {:description_limit, validate_description_limit(description)},
|
||||
{_, true} <-
|
||||
{:description_limit,
|
||||
String.length(description) <= Pleroma.Config.get([:instance, :description_limit])},
|
||||
{:valid_locale,
|
||||
opts[:language] == nil or Pleroma.MultiLanguage.good_locale_code?(opts[:language])},
|
||||
{:ok, url_spec} <- Pleroma.Uploaders.Uploader.put_file(opts.uploader, upload) do
|
||||
{:ok,
|
||||
%{
|
||||
|
@ -113,10 +138,11 @@ def store(upload, opts \\ []) do
|
|||
}
|
||||
|> Maps.put_if_present("width", upload.width)
|
||||
|> Maps.put_if_present("height", upload.height)
|
||||
],
|
||||
"name" => description
|
||||
]
|
||||
}
|
||||
|> Maps.put_if_present("blurhash", upload.blurhash)}
|
||||
|> Map.merge(description_fields(description, opts[:language]))
|
||||
|> Maps.put_if_present("blurhash", upload.blurhash)
|
||||
|> Maps.put_if_present("language", opts[:language])}
|
||||
else
|
||||
{:description_limit, _} ->
|
||||
{:error, :description_too_long}
|
||||
|
@ -156,6 +182,7 @@ defp get_opts(opts) do
|
|||
uploader: Keyword.get(opts, :uploader, Pleroma.Config.get([__MODULE__, :uploader])),
|
||||
filters: Keyword.get(opts, :filters, Pleroma.Config.get([__MODULE__, :filters])),
|
||||
description: Keyword.get(opts, :description),
|
||||
description_map: Keyword.get(opts, :description_map),
|
||||
base_url: base_url()
|
||||
}
|
||||
end
|
||||
|
@ -168,7 +195,8 @@ defp prepare_upload(%Plug.Upload{} = file, opts) do
|
|||
name: file.filename,
|
||||
tempfile: file.path,
|
||||
content_type: file.content_type,
|
||||
description: opts.description
|
||||
description: opts.description,
|
||||
description_map: opts.description_map
|
||||
}}
|
||||
end
|
||||
end
|
||||
|
@ -239,8 +267,12 @@ defp url_from_spec(%__MODULE__{name: name}, base_url, {:file, path}) do
|
|||
""
|
||||
end
|
||||
|
||||
[base_url, path]
|
||||
|> Path.join()
|
||||
if String.contains?(base_url, Pleroma.Uploaders.IPFS.placeholder()) do
|
||||
String.replace(base_url, Pleroma.Uploaders.IPFS.placeholder(), path)
|
||||
else
|
||||
[base_url, path]
|
||||
|> Path.join()
|
||||
end
|
||||
end
|
||||
|
||||
defp url_from_spec(_upload, _base_url, {:url, url}), do: url
|
||||
|
@ -277,6 +309,9 @@ def base_url do
|
|||
Path.join([upload_base_url, bucket_with_namespace])
|
||||
end
|
||||
|
||||
Pleroma.Uploaders.IPFS ->
|
||||
@config_impl.get([Pleroma.Uploaders.IPFS, :get_gateway_url])
|
||||
|
||||
_ ->
|
||||
public_endpoint || upload_base_url || Pleroma.Web.Endpoint.url() <> "/media/"
|
||||
end
|
||||
|
|
77
lib/pleroma/uploaders/ipfs.ex
Normal file
77
lib/pleroma/uploaders/ipfs.ex
Normal file
|
@ -0,0 +1,77 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Uploaders.IPFS do
|
||||
@behaviour Pleroma.Uploaders.Uploader
|
||||
require Logger
|
||||
|
||||
alias Tesla.Multipart
|
||||
|
||||
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
|
||||
|
||||
defp get_final_url(method) do
|
||||
config = @config_impl.get([__MODULE__])
|
||||
post_base_url = Keyword.get(config, :post_gateway_url)
|
||||
|
||||
Path.join([post_base_url, method])
|
||||
end
|
||||
|
||||
def put_file_endpoint do
|
||||
get_final_url("/api/v0/add")
|
||||
end
|
||||
|
||||
def delete_file_endpoint do
|
||||
get_final_url("/api/v0/files/rm")
|
||||
end
|
||||
|
||||
@placeholder "{CID}"
|
||||
def placeholder, do: @placeholder
|
||||
|
||||
@impl true
|
||||
def get_file(file) do
|
||||
b_url = Pleroma.Upload.base_url()
|
||||
|
||||
if String.contains?(b_url, @placeholder) do
|
||||
{:ok, {:url, String.replace(b_url, @placeholder, URI.decode(file))}}
|
||||
else
|
||||
{:error, "IPFS Get URL doesn't contain 'cid' placeholder"}
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def put_file(%Pleroma.Upload{} = upload) do
|
||||
mp =
|
||||
Multipart.new()
|
||||
|> Multipart.add_content_type_param("charset=utf-8")
|
||||
|> Multipart.add_file(upload.tempfile)
|
||||
|
||||
case Pleroma.HTTP.post(put_file_endpoint(), mp, [], params: ["cid-version": "1"]) do
|
||||
{:ok, ret} ->
|
||||
case Jason.decode(ret.body) do
|
||||
{:ok, ret} ->
|
||||
if Map.has_key?(ret, "Hash") do
|
||||
{:ok, {:file, ret["Hash"]}}
|
||||
else
|
||||
{:error, "JSON doesn't contain Hash key"}
|
||||
end
|
||||
|
||||
error ->
|
||||
Logger.error("#{__MODULE__}: #{inspect(error)}")
|
||||
{:error, "JSON decode failed"}
|
||||
end
|
||||
|
||||
error ->
|
||||
Logger.error("#{__MODULE__}: #{inspect(error)}")
|
||||
{:error, "IPFS Gateway upload failed"}
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def delete_file(file) do
|
||||
case Pleroma.HTTP.post(delete_file_endpoint(), "", [], params: [arg: file]) do
|
||||
{:ok, %{status: 204}} -> :ok
|
||||
error -> {:error, inspect(error)}
|
||||
end
|
||||
end
|
||||
end
|
|
@ -33,6 +33,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
import Ecto.Query
|
||||
import Pleroma.Web.ActivityPub.Utils
|
||||
import Pleroma.Web.ActivityPub.Visibility
|
||||
import Pleroma.Web.Gettext
|
||||
import Pleroma.Webhook.Notify, only: [trigger_webhooks: 2]
|
||||
|
||||
require Logger
|
||||
|
@ -170,9 +171,7 @@ def insert(map, local \\ true, fake \\ false, bypass_actor_check \\ false) when
|
|||
# Splice in the child object if we have one.
|
||||
activity = Maps.put_if_present(activity, :object, object)
|
||||
|
||||
ConcurrentLimiter.limit(Pleroma.Web.RichMedia.Helpers, fn ->
|
||||
Task.start(fn -> Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity) end)
|
||||
end)
|
||||
Pleroma.Web.RichMedia.Card.get_by_activity(activity)
|
||||
|
||||
# Add local posts to search index
|
||||
if local, do: Pleroma.Search.add_to_index(activity)
|
||||
|
@ -200,7 +199,7 @@ def insert(map, local \\ true, fake \\ false, bypass_actor_check \\ false) when
|
|||
id: "pleroma:fakeid"
|
||||
}
|
||||
|
||||
Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
|
||||
Pleroma.Web.RichMedia.Card.get_by_activity(activity)
|
||||
{:ok, activity}
|
||||
|
||||
{:remote_limit_pass, _} ->
|
||||
|
@ -1362,6 +1361,15 @@ defp restrict_unauthenticated(query, nil) do
|
|||
|
||||
defp restrict_unauthenticated(query, _), do: query
|
||||
|
||||
defp restrict_rule(query, %{rule_id: rule_id}) do
|
||||
from(
|
||||
activity in query,
|
||||
where: fragment("(?)->'rules' \\? (?)", activity.data, ^rule_id)
|
||||
)
|
||||
end
|
||||
|
||||
defp restrict_rule(query, _), do: query
|
||||
|
||||
defp exclude_poll_votes(query, %{include_poll_votes: true}), do: query
|
||||
|
||||
defp exclude_poll_votes(query, _) do
|
||||
|
@ -1540,7 +1548,9 @@ def fetch_activities_query(recipients, opts \\ %{}) do
|
|||
|> restrict_announce_object_actor(opts)
|
||||
|> restrict_object(opts)
|
||||
|> restrict_filtered(opts)
|
||||
|> restrict_rule(opts)
|
||||
|> restrict_quote_url(opts)
|
||||
|> restrict_rule(opts)
|
||||
|> maybe_restrict_deactivated_users(opts)
|
||||
|> exclude_poll_votes(opts)
|
||||
|> exclude_chat_messages(opts)
|
||||
|
@ -1615,12 +1625,37 @@ def fetch_activities_bounded(
|
|||
|> Enum.reverse()
|
||||
end
|
||||
|
||||
defp validate_media_description_map(%{} = map, language) do
|
||||
with {:ok, %{}} <- Pleroma.MultiLanguage.validate_map(map),
|
||||
true <- Pleroma.MultiLanguage.good_locale_code?(language) do
|
||||
:ok
|
||||
else
|
||||
false -> :invalid_language
|
||||
_ -> :error
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_media_description_map(nil, _), do: :ok
|
||||
defp validate_media_description_map(_, _), do: :error
|
||||
|
||||
@spec upload(Upload.source(), keyword()) :: {:ok, Object.t()} | {:error, any()}
|
||||
def upload(file, opts \\ []) do
|
||||
with {:ok, data} <- Upload.store(sanitize_upload_file(file), opts) do
|
||||
with {_, :ok} <-
|
||||
{:description_map,
|
||||
validate_media_description_map(opts[:description_map], opts[:language])},
|
||||
{:ok, data} <- Upload.store(sanitize_upload_file(file), opts) do
|
||||
obj_data = Maps.put_if_present(data, "actor", opts[:actor])
|
||||
|
||||
Repo.insert(%Object{data: obj_data})
|
||||
else
|
||||
{:description_map, :invalid_language} ->
|
||||
{:error, dgettext("errors", "valid language must be provided with description_map")}
|
||||
|
||||
{:description_map, _} ->
|
||||
{:error, dgettext("errors", "description_map invalid")}
|
||||
|
||||
e ->
|
||||
e
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -62,6 +62,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|
|||
when action in [:following, :followers, :pinned, :inbox, :outbox, :update_outbox]
|
||||
)
|
||||
|
||||
plug(:log_inbox_metadata when action in [:inbox])
|
||||
plug(:set_requester_reachable when action in [:inbox])
|
||||
plug(:relay_active? when action in [:relay])
|
||||
|
||||
|
@ -531,6 +532,13 @@ defp set_requester_reachable(%Plug.Conn{} = conn, _) do
|
|||
conn
|
||||
end
|
||||
|
||||
defp log_inbox_metadata(%{params: %{"actor" => actor, "type" => type}} = conn, _) do
|
||||
Logger.metadata(actor: actor, type: type)
|
||||
conn
|
||||
end
|
||||
|
||||
defp log_inbox_metadata(conn, _), do: conn
|
||||
|
||||
def upload_media(%{assigns: %{user: %User{} = user}} = conn, %{"file" => file} = data) do
|
||||
with {:ok, object} <-
|
||||
ActivityPub.upload(
|
||||
|
|
|
@ -216,19 +216,51 @@ def create(actor, object, recipients) do
|
|||
|
||||
@spec note(ActivityDraft.t()) :: {:ok, map(), keyword()}
|
||||
def note(%ActivityDraft{} = draft) do
|
||||
content_fields =
|
||||
if draft.content_html_map do
|
||||
case Map.keys(draft.content_html_map) do
|
||||
["und"] ->
|
||||
%{"content" => Map.get(draft.content_html_map, "und")}
|
||||
|
||||
_ ->
|
||||
%{
|
||||
"contentMap" => draft.content_html_map,
|
||||
"content" => Map.get(draft.content_html_map, draft.language)
|
||||
}
|
||||
end
|
||||
else
|
||||
%{"content" => draft.content_html}
|
||||
end
|
||||
|
||||
summary_fields =
|
||||
if draft.summary_map do
|
||||
case Map.keys(draft.summary_map) do
|
||||
["und"] ->
|
||||
%{"summary" => Map.get(draft.summary_map, "und")}
|
||||
|
||||
_ ->
|
||||
%{
|
||||
"summaryMap" => draft.summary_map,
|
||||
"summary" => Map.get(draft.summary_map, draft.language)
|
||||
}
|
||||
end
|
||||
else
|
||||
%{"summary" => draft.summary}
|
||||
end
|
||||
|
||||
data =
|
||||
%{
|
||||
"type" => "Note",
|
||||
"to" => draft.to,
|
||||
"cc" => draft.cc,
|
||||
"content" => draft.content_html,
|
||||
"summary" => draft.summary,
|
||||
"sensitive" => draft.sensitive,
|
||||
"context" => draft.context,
|
||||
"attachment" => draft.attachments,
|
||||
"actor" => draft.user.ap_id,
|
||||
"tag" => Keyword.values(draft.tags) |> Enum.uniq()
|
||||
}
|
||||
|> Map.merge(content_fields)
|
||||
|> Map.merge(summary_fields)
|
||||
|> add_in_reply_to(draft.in_reply_to)
|
||||
|> add_quote(draft.quote_post)
|
||||
|> Map.merge(draft.extra)
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.ActivityPub.MRF.AntiMentionSpamPolicy do
|
||||
alias Pleroma.Config
|
||||
alias Pleroma.User
|
||||
require Pleroma.Constants
|
||||
|
||||
|
@ -11,8 +12,9 @@ defmodule Pleroma.Web.ActivityPub.MRF.AntiMentionSpamPolicy do
|
|||
defp user_has_posted?(%User{} = u), do: u.note_count > 0
|
||||
|
||||
defp user_has_age?(%User{} = u) do
|
||||
diff = NaiveDateTime.utc_now() |> NaiveDateTime.diff(u.inserted_at, :second)
|
||||
diff >= :timer.seconds(30)
|
||||
user_age_limit = Config.get([:mrf_antimentionspam, :user_age_limit], 30_000)
|
||||
diff = NaiveDateTime.utc_now() |> NaiveDateTime.diff(u.inserted_at, :millisecond)
|
||||
diff >= user_age_limit
|
||||
end
|
||||
|
||||
defp good_reputation?(%User{} = u) do
|
||||
|
|
142
lib/pleroma/web/activity_pub/mrf/dnsrbl_policy.ex
Normal file
142
lib/pleroma/web/activity_pub/mrf/dnsrbl_policy.ex
Normal file
|
@ -0,0 +1,142 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.ActivityPub.MRF.DNSRBLPolicy do
|
||||
@moduledoc """
|
||||
Dynamic activity filtering based on an RBL database
|
||||
|
||||
This MRF makes queries to a custom DNS server which will
|
||||
respond with values indicating the classification of the domain
|
||||
the activity originated from. This method has been widely used
|
||||
in the email anti-spam industry for very fast reputation checks.
|
||||
|
||||
e.g., if the DNS response is 127.0.0.1 or empty, the domain is OK
|
||||
Other values such as 127.0.0.2 may be used for specific classifications.
|
||||
|
||||
Information for why the host is blocked can be stored in a corresponding TXT record.
|
||||
|
||||
This method is fail-open so if the queries fail the activites are accepted.
|
||||
|
||||
An example of software meant for this purpsoe is rbldnsd which can be found
|
||||
at http://www.corpit.ru/mjt/rbldnsd.html or mirrored at
|
||||
https://git.pleroma.social/feld/rbldnsd
|
||||
|
||||
It is highly recommended that you run your own copy of rbldnsd and use an
|
||||
external mechanism to sync/share the contents of the zone file. This is
|
||||
important to keep the latency on the queries as low as possible and prevent
|
||||
your DNS server from being attacked so it fails and content is permitted.
|
||||
"""
|
||||
|
||||
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
|
||||
|
||||
alias Pleroma.Config
|
||||
|
||||
require Logger
|
||||
|
||||
@query_retries 1
|
||||
@query_timeout 500
|
||||
|
||||
@impl true
|
||||
def filter(%{"actor" => actor} = object) do
|
||||
actor_info = URI.parse(actor)
|
||||
|
||||
with {:ok, object} <- check_rbl(actor_info, object) do
|
||||
{:ok, object}
|
||||
else
|
||||
_ -> {:reject, "[DNSRBLPolicy]"}
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def filter(object), do: {:ok, object}
|
||||
|
||||
@impl true
|
||||
def describe do
|
||||
mrf_dnsrbl =
|
||||
Config.get(:mrf_dnsrbl)
|
||||
|> Enum.into(%{})
|
||||
|
||||
{:ok, %{mrf_dnsrbl: mrf_dnsrbl}}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def config_description do
|
||||
%{
|
||||
key: :mrf_dnsrbl,
|
||||
related_policy: "Pleroma.Web.ActivityPub.MRF.DNSRBLPolicy",
|
||||
label: "MRF DNSRBL",
|
||||
description: "DNS RealTime Blackhole Policy",
|
||||
children: [
|
||||
%{
|
||||
key: :nameserver,
|
||||
type: {:string},
|
||||
description: "DNSRBL Nameserver to Query (IP or hostame)",
|
||||
suggestions: ["127.0.0.1"]
|
||||
},
|
||||
%{
|
||||
key: :port,
|
||||
type: {:string},
|
||||
description: "Nameserver port",
|
||||
suggestions: ["53"]
|
||||
},
|
||||
%{
|
||||
key: :zone,
|
||||
type: {:string},
|
||||
description: "Root zone for querying",
|
||||
suggestions: ["bl.pleroma.com"]
|
||||
}
|
||||
]
|
||||
}
|
||||
end
|
||||
|
||||
defp check_rbl(%{host: actor_host}, object) do
|
||||
with false <- match?(^actor_host, Pleroma.Web.Endpoint.host()),
|
||||
zone when not is_nil(zone) <- Keyword.get(Config.get([:mrf_dnsrbl]), :zone) do
|
||||
query =
|
||||
Enum.join([actor_host, zone], ".")
|
||||
|> String.to_charlist()
|
||||
|
||||
rbl_response = rblquery(query)
|
||||
|
||||
if Enum.empty?(rbl_response) do
|
||||
{:ok, object}
|
||||
else
|
||||
Task.start(fn ->
|
||||
reason = rblquery(query, :txt) || "undefined"
|
||||
|
||||
Logger.warning(
|
||||
"DNSRBL Rejected activity from #{actor_host} for reason: #{inspect(reason)}"
|
||||
)
|
||||
end)
|
||||
|
||||
:error
|
||||
end
|
||||
else
|
||||
_ -> {:ok, object}
|
||||
end
|
||||
end
|
||||
|
||||
defp get_rblhost_ip(rblhost) do
|
||||
case rblhost |> String.to_charlist() |> :inet_parse.address() do
|
||||
{:ok, _} -> rblhost |> String.to_charlist() |> :inet_parse.address()
|
||||
_ -> {:ok, rblhost |> String.to_charlist() |> :inet_res.lookup(:in, :a) |> Enum.random()}
|
||||
end
|
||||
end
|
||||
|
||||
defp rblquery(query, type \\ :a) do
|
||||
config = Config.get([:mrf_dnsrbl])
|
||||
|
||||
case get_rblhost_ip(config[:nameserver]) do
|
||||
{:ok, rblnsip} ->
|
||||
:inet_res.lookup(query, :in, type,
|
||||
nameservers: [{rblnsip, config[:port]}],
|
||||
timeout: @query_timeout,
|
||||
retry: @query_retries
|
||||
)
|
||||
|
||||
_ ->
|
||||
[]
|
||||
end
|
||||
end
|
||||
end
|
|
@ -12,18 +12,41 @@ defmodule Pleroma.Web.ActivityPub.MRF.EnsureRePrepended do
|
|||
|
||||
def history_awareness, do: :auto
|
||||
|
||||
def filter_by_summary(
|
||||
%{data: %{"summaryMap" => %{} = parent_summary_map} = parent},
|
||||
%{"summaryMap" => %{} = child_summary_map} = child
|
||||
) do
|
||||
fixed_summary_map =
|
||||
Enum.reduce(child_summary_map, %{}, fn {lang, cur}, acc ->
|
||||
with {:ok, fixed_cur} <- fix_one(cur, parent_summary_map[lang]) do
|
||||
Map.put(acc, lang, fixed_cur)
|
||||
else
|
||||
_ -> Map.put(acc, lang, cur)
|
||||
end
|
||||
end)
|
||||
|
||||
fixed_summary =
|
||||
with {:ok, fixed} <- fix_one(child["summary"], parent["summary"]) do
|
||||
fixed
|
||||
else
|
||||
_ -> child["summary"]
|
||||
end
|
||||
|
||||
child
|
||||
|> Map.put("summaryMap", fixed_summary_map)
|
||||
|> Map.put("summary", fixed_summary)
|
||||
end
|
||||
|
||||
def filter_by_summary(
|
||||
%{data: %{"summary" => parent_summary}} = _in_reply_to,
|
||||
%{"summary" => child_summary} = child
|
||||
)
|
||||
when not is_nil(child_summary) and byte_size(child_summary) > 0 and
|
||||
not is_nil(parent_summary) and byte_size(parent_summary) > 0 do
|
||||
if (child_summary == parent_summary and not Regex.match?(@reply_prefix, child_summary)) or
|
||||
(Regex.match?(@reply_prefix, parent_summary) &&
|
||||
Regex.replace(@reply_prefix, parent_summary, "") == child_summary) do
|
||||
Map.put(child, "summary", "re: " <> child_summary)
|
||||
with {:ok, fixed_child_summary} <- fix_one(child_summary, parent_summary) do
|
||||
Map.put(child, "summary", fixed_child_summary)
|
||||
else
|
||||
child
|
||||
_ -> child
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -44,4 +67,20 @@ def filter(%{"type" => type, "object" => child_object} = object)
|
|||
def filter(object), do: {:ok, object}
|
||||
|
||||
def describe, do: {:ok, %{}}
|
||||
|
||||
defp fix_one(child_summary, parent_summary)
|
||||
when is_binary(child_summary) and child_summary != "" and is_binary(parent_summary) and
|
||||
parent_summary != "" do
|
||||
if (child_summary == parent_summary and not Regex.match?(@reply_prefix, child_summary)) or
|
||||
(Regex.match?(@reply_prefix, parent_summary) &&
|
||||
Regex.replace(@reply_prefix, parent_summary, "") == child_summary) do
|
||||
{:ok, "re: " <> child_summary}
|
||||
else
|
||||
{:nochange, nil}
|
||||
end
|
||||
end
|
||||
|
||||
defp fix_one(_, _) do
|
||||
{:nochange, nil}
|
||||
end
|
||||
end
|
||||
|
|
|
@ -78,23 +78,52 @@ defp clean_recipients(recipients, object) do
|
|||
def filter(
|
||||
%{
|
||||
"type" => type,
|
||||
"object" => %{"type" => "Note", "to" => to, "inReplyTo" => in_reply_to}
|
||||
} = object
|
||||
"object" => %{"type" => "Note", "to" => to, "inReplyTo" => in_reply_to} = object
|
||||
} = activity
|
||||
)
|
||||
when type in ["Create", "Update"] and is_list(to) and is_binary(in_reply_to) do
|
||||
# image-only posts from pleroma apparently reach this MRF without the content field
|
||||
content = object["object"]["content"] || ""
|
||||
|
||||
# Get the replied-to user for sorting
|
||||
replied_to_user = get_replied_to_user(object["object"])
|
||||
replied_to_user = get_replied_to_user(object)
|
||||
|
||||
mention_users =
|
||||
to
|
||||
|> clean_recipients(object)
|
||||
|> clean_recipients(activity)
|
||||
|> Enum.map(&User.get_cached_by_ap_id/1)
|
||||
|> Enum.reject(&is_nil/1)
|
||||
|> sort_replied_user(replied_to_user)
|
||||
|
||||
fixed_object =
|
||||
with %{} = content_map <- object["contentMap"] do
|
||||
fixed_content_map =
|
||||
Enum.reduce(content_map, %{}, fn {lang, content}, acc ->
|
||||
fixed_content = fix_content(content, mention_users)
|
||||
|
||||
Map.put(acc, lang, fixed_content)
|
||||
end)
|
||||
|
||||
object
|
||||
|> Map.put("contentMap", fixed_content_map)
|
||||
|> Map.put("content", fix_content(object["content"] || "", mention_users))
|
||||
else
|
||||
_ ->
|
||||
# image-only posts from pleroma apparently reach this MRF without the content field
|
||||
content = object["content"] || ""
|
||||
|
||||
fixed_content = fix_content(content, mention_users)
|
||||
|
||||
Map.put(object, "content", fixed_content)
|
||||
end
|
||||
|
||||
{:ok, put_in(activity["object"], fixed_object)}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def filter(object), do: {:ok, object}
|
||||
|
||||
@impl true
|
||||
def describe, do: {:ok, %{}}
|
||||
|
||||
defp fix_content(content, mention_users) do
|
||||
explicitly_mentioned_uris =
|
||||
extract_mention_uris_from_content(content)
|
||||
|> MapSet.new()
|
||||
|
@ -113,25 +142,16 @@ def filter(
|
|||
do: "<span class=\"recipients-inline\">#{added_mentions}</span>",
|
||||
else: ""
|
||||
|
||||
content =
|
||||
cond do
|
||||
# For Markdown posts, insert the mentions inside the first <p> tag
|
||||
recipients_inline != "" && String.starts_with?(content, "<p>") ->
|
||||
"<p>" <> recipients_inline <> String.trim_leading(content, "<p>")
|
||||
cond do
|
||||
# For Markdown posts, insert the mentions inside the first <p> tag
|
||||
recipients_inline != "" && String.starts_with?(content, "<p>") ->
|
||||
"<p>" <> recipients_inline <> String.trim_leading(content, "<p>")
|
||||
|
||||
recipients_inline != "" ->
|
||||
recipients_inline <> content
|
||||
recipients_inline != "" ->
|
||||
recipients_inline <> content
|
||||
|
||||
true ->
|
||||
content
|
||||
end
|
||||
|
||||
{:ok, put_in(object["object"]["content"], content)}
|
||||
true ->
|
||||
content
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def filter(object), do: {:ok, object}
|
||||
|
||||
@impl true
|
||||
def describe, do: {:ok, %{}}
|
||||
end
|
||||
|
|
|
@ -85,18 +85,32 @@ defp check_ftl_removal(message) do
|
|||
end
|
||||
|
||||
defp check_replace(%{"object" => %{} = object} = message) do
|
||||
config = Pleroma.Config.get([:mrf_keyword, :replace])
|
||||
|
||||
replace_kw = fn object ->
|
||||
["content", "name", "summary"]
|
||||
|> Enum.filter(fn field -> Map.has_key?(object, field) && object[field] end)
|
||||
|> Enum.filter(fn field ->
|
||||
is_map(object[field <> "Map"]) or
|
||||
(Map.has_key?(object, field) && object[field])
|
||||
end)
|
||||
|> Enum.reduce(object, fn field, object ->
|
||||
data =
|
||||
Enum.reduce(
|
||||
Pleroma.Config.get([:mrf_keyword, :replace]),
|
||||
object[field],
|
||||
fn {pat, repl}, acc -> String.replace(acc, pat, repl) end
|
||||
)
|
||||
field_name_map = field <> "Map"
|
||||
|
||||
Map.put(object, field, data)
|
||||
with %{} = data_map <- object[field_name_map] do
|
||||
fixed_data_map =
|
||||
Enum.reduce(data_map, %{}, fn {lang, content}, acc ->
|
||||
Map.put(acc, lang, replace_keyword(content, config))
|
||||
end)
|
||||
|
||||
object
|
||||
|> Map.put(field_name_map, fixed_data_map)
|
||||
|> Map.put(field, replace_keyword(object[field], config))
|
||||
else
|
||||
_ ->
|
||||
data = replace_keyword(object[field], config)
|
||||
|
||||
Map.put(object, field, data)
|
||||
end
|
||||
end)
|
||||
|> (fn object -> {:ok, object} end).()
|
||||
end
|
||||
|
@ -108,6 +122,14 @@ defp check_replace(%{"object" => %{} = object} = message) do
|
|||
{:ok, message}
|
||||
end
|
||||
|
||||
defp replace_keyword(data, config) do
|
||||
Enum.reduce(
|
||||
config,
|
||||
data,
|
||||
fn {pat, repl}, acc -> String.replace(acc, pat, repl) end
|
||||
)
|
||||
end
|
||||
|
||||
@impl true
|
||||
def filter(%{"type" => type, "object" => %{"content" => _content}} = message)
|
||||
when type in ["Create", "Update"] do
|
||||
|
|
|
@ -11,11 +11,6 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do
|
|||
|
||||
require Logger
|
||||
|
||||
@adapter_options [
|
||||
pool: :media,
|
||||
recv_timeout: 10_000
|
||||
]
|
||||
|
||||
@impl true
|
||||
def history_awareness, do: :auto
|
||||
|
||||
|
@ -27,17 +22,14 @@ defp prefetch(url) do
|
|||
|
||||
Logger.debug("Prefetching #{inspect(url)} as #{inspect(prefetch_url)}")
|
||||
|
||||
if Pleroma.Config.get(:env) == :test do
|
||||
fetch(prefetch_url)
|
||||
else
|
||||
ConcurrentLimiter.limit(__MODULE__, fn ->
|
||||
Task.start(fn -> fetch(prefetch_url) end)
|
||||
end)
|
||||
end
|
||||
fetch(prefetch_url)
|
||||
end
|
||||
end
|
||||
|
||||
defp fetch(url), do: HTTP.get(url, [], @adapter_options)
|
||||
defp fetch(url) do
|
||||
http_client_opts = Pleroma.Config.get([:media_proxy, :proxy_opts, :http], pool: :media)
|
||||
HTTP.get(url, [], http_client_opts)
|
||||
end
|
||||
|
||||
defp preload(%{"object" => %{"attachment" => attachments}} = _message) do
|
||||
Enum.each(attachments, fn
|
||||
|
|
|
@ -43,8 +43,16 @@ defp has_attachment?(_), do: false
|
|||
defp only_mentions?(%{"object" => %{"type" => "Note", "source" => source}}) do
|
||||
source =
|
||||
case source do
|
||||
%{"content" => text} -> text
|
||||
_ -> source
|
||||
%{"contentMap" => %{} = text_map} ->
|
||||
text_map
|
||||
|> Enum.map(fn {_, content} -> content end)
|
||||
|> Enum.join("\n")
|
||||
|
||||
%{"content" => text} ->
|
||||
text
|
||||
|
||||
_ ->
|
||||
source
|
||||
end
|
||||
|
||||
non_mentions =
|
||||
|
|
|
@ -6,9 +6,53 @@ defmodule Pleroma.Web.ActivityPub.MRF.NoPlaceholderTextPolicy do
|
|||
@moduledoc "Ensure no content placeholder is present (such as the dot from mastodon)"
|
||||
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
|
||||
|
||||
@placeholders [".", "<p>.</p>"]
|
||||
|
||||
@impl true
|
||||
def history_awareness, do: :auto
|
||||
|
||||
@impl true
|
||||
def filter(
|
||||
%{
|
||||
"type" => type,
|
||||
"object" => %{"contentMap" => %{} = content_map, "attachment" => _} = child_object
|
||||
} = object
|
||||
)
|
||||
when type in ["Create", "Update"] do
|
||||
fixed_content_map =
|
||||
Enum.reduce(content_map, %{}, fn {lang, content}, acc ->
|
||||
if content in @placeholders do
|
||||
acc
|
||||
else
|
||||
Map.put(acc, lang, content)
|
||||
end
|
||||
end)
|
||||
|
||||
fixed_content =
|
||||
if child_object["content"] in @placeholders do
|
||||
""
|
||||
else
|
||||
child_object["content"]
|
||||
end
|
||||
|
||||
fixed_object =
|
||||
if fixed_content_map == %{} do
|
||||
Map.put(
|
||||
object,
|
||||
"object",
|
||||
object["object"]
|
||||
|> Map.drop(["contentMap"])
|
||||
|> Map.put("content", "")
|
||||
)
|
||||
else
|
||||
object
|
||||
|> put_in(["object", "contentMap"], fixed_content_map)
|
||||
|> put_in(["object", "content"], fixed_content)
|
||||
end
|
||||
|
||||
{:ok, fixed_object}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def filter(
|
||||
%{
|
||||
|
@ -16,7 +60,7 @@ def filter(
|
|||
"object" => %{"content" => content, "attachment" => _} = _child_object
|
||||
} = object
|
||||
)
|
||||
when type in ["Create", "Update"] and content in [".", "<p>.</p>"] do
|
||||
when type in ["Create", "Update"] and content in @placeholders do
|
||||
{:ok, put_in(object, ["object", "content"], "")}
|
||||
end
|
||||
|
||||
|
|
|
@ -16,11 +16,24 @@ def filter(%{"type" => type, "object" => child_object} = object)
|
|||
when type in ["Create", "Update"] do
|
||||
scrub_policy = Pleroma.Config.get([:mrf_normalize_markup, :scrub_policy])
|
||||
|
||||
content =
|
||||
child_object["content"]
|
||||
|> HTML.filter_tags(scrub_policy)
|
||||
object =
|
||||
with %{} = content_map <- child_object["contentMap"] do
|
||||
fixed_content_map =
|
||||
Enum.reduce(content_map, %{}, fn {lang, content}, acc ->
|
||||
Map.put(acc, lang, HTML.filter_tags(content, scrub_policy))
|
||||
end)
|
||||
|
||||
object = put_in(object, ["object", "content"], content)
|
||||
object
|
||||
|> put_in(["object", "contentMap"], fixed_content_map)
|
||||
|> put_in(["object", "content"], HTML.filter_tags(child_object["content"], scrub_policy))
|
||||
else
|
||||
_ ->
|
||||
content =
|
||||
child_object["content"]
|
||||
|> HTML.filter_tags(scrub_policy)
|
||||
|
||||
put_in(object, ["object", "content"], content)
|
||||
end
|
||||
|
||||
{:ok, object}
|
||||
end
|
||||
|
|
|
@ -71,27 +71,6 @@ defp fix_replies(%{"replies" => replies} = data) when not is_list(replies),
|
|||
|
||||
defp fix_replies(data), do: data
|
||||
|
||||
defp fix_quote_url(%{"quoteUrl" => _quote_url} = data), do: data
|
||||
|
||||
# Fedibird
|
||||
# https://github.com/fedibird/mastodon/commit/dbd7ae6cf58a92ec67c512296b4daaea0d01e6ac
|
||||
defp fix_quote_url(%{"quoteUri" => quote_url} = data) do
|
||||
Map.put(data, "quoteUrl", quote_url)
|
||||
end
|
||||
|
||||
# Old Fedibird (bug)
|
||||
# https://github.com/fedibird/mastodon/issues/9
|
||||
defp fix_quote_url(%{"quoteURL" => quote_url} = data) do
|
||||
Map.put(data, "quoteUrl", quote_url)
|
||||
end
|
||||
|
||||
# Misskey fallback
|
||||
defp fix_quote_url(%{"_misskey_quote" => quote_url} = data) do
|
||||
Map.put(data, "quoteUrl", quote_url)
|
||||
end
|
||||
|
||||
defp fix_quote_url(data), do: data
|
||||
|
||||
def fix_attachments(%{"attachment" => attachment} = data) when is_map(attachment),
|
||||
do: Map.put(data, "attachment", [attachment])
|
||||
|
||||
|
@ -104,7 +83,6 @@ defp fix(data, meta) do
|
|||
|> fix_url()
|
||||
|> fix_tag()
|
||||
|> fix_replies()
|
||||
|> fix_quote_url()
|
||||
|> fix_attachments()
|
||||
|> CommonFixes.fix_quote_url()
|
||||
|> Transmogrifier.fix_emoji()
|
||||
|
|
|
@ -15,6 +15,8 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator do
|
|||
field(:type, :string, default: "Link")
|
||||
field(:mediaType, ObjectValidators.MIME, default: "application/octet-stream")
|
||||
field(:name, :string)
|
||||
field(:nameMap, ObjectValidators.ContentLanguageMap)
|
||||
field(:summary, :string)
|
||||
field(:blurhash, :string)
|
||||
|
||||
embeds_many :url, UrlObjectValidator, primary_key: false do
|
||||
|
@ -44,7 +46,7 @@ def changeset(struct, data) do
|
|||
|> fix_url()
|
||||
|
||||
struct
|
||||
|> cast(data, [:id, :type, :mediaType, :name, :blurhash])
|
||||
|> cast(data, [:id, :type, :mediaType, :name, :nameMap, :summary, :blurhash])
|
||||
|> cast_embed(:url, with: &url_changeset/2, required: true)
|
||||
end
|
||||
|
||||
|
|
|
@ -50,7 +50,9 @@ defmacro status_object_fields do
|
|||
embeds_many(:tag, TagValidator)
|
||||
|
||||
field(:name, :string)
|
||||
field(:nameMap, ObjectValidators.ContentLanguageMap)
|
||||
field(:summary, :string)
|
||||
field(:summaryMap, ObjectValidators.ContentLanguageMap)
|
||||
|
||||
field(:context, :string)
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes do
|
|||
alias Pleroma.EctoType.ActivityPub.ObjectValidators
|
||||
alias Pleroma.Language.LanguageDetector
|
||||
alias Pleroma.Maps
|
||||
alias Pleroma.MultiLanguage
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.Object.Containment
|
||||
alias Pleroma.User
|
||||
|
@ -14,9 +15,6 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes do
|
|||
|
||||
require Pleroma.Constants
|
||||
|
||||
import Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode,
|
||||
only: [is_good_locale_code?: 1]
|
||||
|
||||
import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1]
|
||||
|
||||
def cast_and_filter_recipients(message, field, follower_collection, field_fallback \\ []) do
|
||||
|
@ -163,7 +161,7 @@ def maybe_add_language(object) do
|
|||
get_language_from_content_map(object),
|
||||
get_language_from_content(object)
|
||||
]
|
||||
|> Enum.find(&is_good_locale_code?(&1))
|
||||
|> Enum.find(&MultiLanguage.good_locale_code?(&1))
|
||||
|
||||
if language do
|
||||
Map.put(object, "language", language)
|
||||
|
@ -202,6 +200,8 @@ defp get_language_from_content(%{"content" => content}) do
|
|||
|
||||
defp get_language_from_content(_), do: nil
|
||||
|
||||
def maybe_add_content_map(%{"contentMap" => %{}} = object), do: object
|
||||
|
||||
def maybe_add_content_map(%{"language" => language, "content" => content} = object)
|
||||
when not_empty_string(language) do
|
||||
Map.put(object, "contentMap", Map.put(%{}, language, content))
|
||||
|
|
|
@ -7,10 +7,13 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.QuestionOptionsValidator do
|
|||
|
||||
import Ecto.Changeset
|
||||
|
||||
alias Pleroma.EctoType.ActivityPub.ObjectValidators
|
||||
|
||||
@primary_key false
|
||||
|
||||
embedded_schema do
|
||||
field(:name, :string)
|
||||
field(:nameMap, ObjectValidators.ContentLanguageMap)
|
||||
|
||||
embeds_one :replies, Replies, primary_key: false do
|
||||
field(:totalItems, :integer)
|
||||
|
@ -22,7 +25,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.QuestionOptionsValidator do
|
|||
|
||||
def changeset(struct, data) do
|
||||
struct
|
||||
|> cast(data, [:name, :type])
|
||||
|> cast(data, [:name, :nameMap, :type])
|
||||
|> cast_embed(:replies, with: &replies_changeset/2)
|
||||
|> validate_inclusion(:type, ["Note"])
|
||||
|> validate_required([:name, :type])
|
||||
|
|
|
@ -22,7 +22,6 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
|
|||
alias Pleroma.Web.ActivityPub.Pipeline
|
||||
alias Pleroma.Web.ActivityPub.Utils
|
||||
alias Pleroma.Web.ActivityPub.Visibility
|
||||
alias Pleroma.Web.Push
|
||||
alias Pleroma.Web.Streamer
|
||||
alias Pleroma.Workers.EventReminderWorker
|
||||
alias Pleroma.Workers.PollWorker
|
||||
|
@ -116,7 +115,7 @@ def handle(
|
|||
nil
|
||||
end
|
||||
|
||||
{:ok, notifications} = Notification.create_notifications(object, do_send: false)
|
||||
{:ok, notifications} = Notification.create_notifications(object)
|
||||
|
||||
meta =
|
||||
meta
|
||||
|
@ -175,7 +174,11 @@ def handle(%{data: %{"type" => "Like"}} = object, meta) do
|
|||
liked_object = Object.get_by_ap_id(object.data["object"])
|
||||
Utils.add_like_to_object(object, liked_object)
|
||||
|
||||
Notification.create_notifications(object)
|
||||
{:ok, notifications} = Notification.create_notifications(object)
|
||||
|
||||
meta =
|
||||
meta
|
||||
|> add_notifications(notifications)
|
||||
|
||||
{:ok, object, meta}
|
||||
end
|
||||
|
@ -193,7 +196,7 @@ def handle(%{data: %{"type" => "Like"}} = object, meta) do
|
|||
def handle(%{data: %{"type" => "Create"}} = activity, meta) do
|
||||
with {:ok, object, meta} <- handle_object_creation(meta[:object_data], activity, meta),
|
||||
%User{} = user <- User.get_cached_by_ap_id(activity.data["actor"]) do
|
||||
{:ok, notifications} = Notification.create_notifications(activity, do_send: false)
|
||||
{:ok, notifications} = Notification.create_notifications(activity)
|
||||
{:ok, _user} = ActivityPub.increase_note_count_if_public(user, object)
|
||||
{:ok, _user} = ActivityPub.update_last_status_at_if_public(user, object)
|
||||
|
||||
|
@ -218,9 +221,7 @@ def handle(%{data: %{"type" => "Create"}} = activity, meta) do
|
|||
end
|
||||
end
|
||||
|
||||
ConcurrentLimiter.limit(Pleroma.Web.RichMedia.Helpers, fn ->
|
||||
Task.start(fn -> Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity) end)
|
||||
end)
|
||||
Pleroma.Web.RichMedia.Card.get_by_activity(activity)
|
||||
|
||||
Pleroma.Search.add_to_index(Map.put(activity, :object, object))
|
||||
|
||||
|
@ -253,11 +254,13 @@ def handle(%{data: %{"type" => "Announce"}} = object, meta) do
|
|||
|
||||
Utils.add_announce_to_object(object, announced_object)
|
||||
|
||||
if !User.internal?(user) do
|
||||
Notification.create_notifications(object)
|
||||
{:ok, notifications} = Notification.create_notifications(object)
|
||||
|
||||
ap_streamer().stream_out(object)
|
||||
end
|
||||
if !User.internal?(user), do: ap_streamer().stream_out(object)
|
||||
|
||||
meta =
|
||||
meta
|
||||
|> add_notifications(notifications)
|
||||
|
||||
{:ok, object, meta}
|
||||
end
|
||||
|
@ -278,7 +281,11 @@ def handle(%{data: %{"type" => "EmojiReact"}} = object, meta) do
|
|||
reacted_object = Object.get_by_ap_id(object.data["object"])
|
||||
Utils.add_emoji_reaction_to_object(object, reacted_object)
|
||||
|
||||
Notification.create_notifications(object)
|
||||
{:ok, notifications} = Notification.create_notifications(object)
|
||||
|
||||
meta =
|
||||
meta
|
||||
|> add_notifications(notifications)
|
||||
|
||||
{:ok, object, meta}
|
||||
end
|
||||
|
@ -689,10 +696,7 @@ defp delete_object(object) do
|
|||
|
||||
defp send_notifications(meta) do
|
||||
Keyword.get(meta, :notifications, [])
|
||||
|> Enum.each(fn notification ->
|
||||
Streamer.stream(["user", "user:notification"], notification)
|
||||
Push.send(notification)
|
||||
end)
|
||||
|> Notification.send()
|
||||
|
||||
meta
|
||||
end
|
||||
|
|
|
@ -215,7 +215,7 @@ def fix_context(object) do
|
|||
def fix_attachments(%{"attachment" => attachment} = object) when is_list(attachment) do
|
||||
attachments =
|
||||
attachment
|
||||
|> Enum.filter(fn data -> Map.has_key?(data, "url") end)
|
||||
|> Enum.filter(fn data -> Map.has_key?(data, "url") or Map.has_key?(data, "href") end)
|
||||
|> Enum.map(fn data ->
|
||||
url =
|
||||
cond do
|
||||
|
@ -342,10 +342,12 @@ def fix_tag(%{"tag" => %{} = tag} = object) do
|
|||
|
||||
def fix_tag(object), do: object
|
||||
|
||||
# prefer content over contentMap
|
||||
def fix_content_map(%{"content" => content} = object) when not_empty_string(content), do: object
|
||||
|
||||
# content map usually only has one language so this will do for now.
|
||||
def fix_content_map(%{"contentMap" => content_map} = object) do
|
||||
def fix_content_map(%{"contentMap" => content_map} = object)
|
||||
when is_map(content_map) and content_map != %{} do
|
||||
content_groups = Map.to_list(content_map)
|
||||
{_, content} = Enum.at(content_groups, 0)
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue