Merge remote-tracking branch 'origin/develop' into fork

Signed-off-by: marcin mikołajczak <git@mkljczk.pl>
This commit is contained in:
marcin mikołajczak 2024-06-23 23:35:05 +02:00
commit 5d1cb904ac
245 changed files with 783 additions and 625 deletions

3
.gitignore vendored
View file

@ -61,3 +61,6 @@ pleroma.iml
*~ *~
*# *#
*.swp *.swp
archive-*
.gitlab-ci-local

View file

@ -1,4 +1,4 @@
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp-24 image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp-25
variables: &global_variables variables: &global_variables
# Only used for the release # Only used for the release
@ -18,9 +18,7 @@ workflow:
- if: $CI_COMMIT_BRANCH - if: $CI_COMMIT_BRANCH
cache: &global_cache_policy cache: &global_cache_policy
key: key: $CI_JOB_IMAGE-$CI_COMMIT_SHORT_SHA
files:
- mix.lock
paths: paths:
- deps - deps
- _build - _build
@ -72,7 +70,7 @@ check-changelog:
tags: tags:
- amd64 - amd64
build-1.13.4: build-1.13.4-otp-25:
extends: extends:
- .build_changes_policy - .build_changes_policy
- .using-ci-base - .using-ci-base
@ -80,13 +78,12 @@ build-1.13.4:
script: script:
- mix compile --force - mix compile --force
build-1.15.7-otp-25: build-1.15.8-otp-26:
extends: extends:
- .build_changes_policy - .build_changes_policy
- .using-ci-base - .using-ci-base
stage: build stage: build
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25 image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15.8-otp-26
allow_failure: true
script: script:
- mix compile --force - mix compile --force
@ -121,7 +118,7 @@ benchmark:
- mix ecto.migrate - mix ecto.migrate
- mix pleroma.load_testing - mix pleroma.load_testing
unit-testing-1.12.3: unit-testing-1.13.4-otp-25:
extends: extends:
- .build_changes_policy - .build_changes_policy
- .using-ci-base - .using-ci-base
@ -136,7 +133,7 @@ unit-testing-1.12.3:
script: &testing_script script: &testing_script
- mix ecto.create - mix ecto.create
- mix ecto.migrate - mix ecto.migrate
- mix test --cover --preload-modules - mix pleroma.test_runner --cover --preload-modules
coverage: '/^Line total: ([^ ]*%)$/' coverage: '/^Line total: ([^ ]*%)$/'
artifacts: artifacts:
reports: reports:
@ -144,34 +141,19 @@ unit-testing-1.12.3:
coverage_format: cobertura coverage_format: cobertura
path: coverage.xml path: coverage.xml
unit-testing-1.15.7-otp-25: unit-testing-1.15.8-otp-26:
extends: extends:
- .build_changes_policy - .build_changes_policy
- .using-ci-base - .using-ci-base
stage: test stage: test
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25 image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15.8-otp-26
allow_failure: true
cache: *testing_cache_policy cache: *testing_cache_policy
services: *testing_services services: *testing_services
script: *testing_script script: *testing_script
unit-testing-1.12-erratic: formatting-1.15:
extends:
- .build_changes_policy
- .using-ci-base
stage: test
retry: 2
allow_failure: true
cache: *testing_cache_policy
services: *testing_services
script:
- mix ecto.create
- mix ecto.migrate
- mix test --only=erratic
formatting-1.13:
extends: .build_changes_policy extends: .build_changes_policy
image: &formatting_elixir elixir:1.13-alpine image: &formatting_elixir elixir:1.15-alpine
stage: lint stage: lint
cache: *testing_cache_policy cache: *testing_cache_policy
before_script: &current_bfr_script before_script: &current_bfr_script

View file

@ -0,0 +1 @@
Fix Emoji object IDs not always being valid

View file

@ -0,0 +1 @@
Elixir Logger configuration is now longer permitted through AdminFE and ConfigDB

View file

View file

View file

View file

View file

@ -0,0 +1 @@
Update and extend NetBSD installation docs

View file

@ -0,0 +1 @@
Elixir 1.15 compatibility

View file

@ -0,0 +1 @@
Gun Connection Pool was not retrying to acquire a connection if the pool was full and stale connections were reclaimed

View file

@ -0,0 +1 @@
Rich Media backfilling is now an Oban job

View file

View file

@ -0,0 +1 @@
End of poll notifications were not streamed over websockets or web push

View file

View file

@ -0,0 +1 @@
User profile refreshes are now asynchronous

View file

@ -0,0 +1 @@
Render nice web push notifications for polls

View file

@ -1,4 +1,4 @@
FROM elixir:1.13.4-otp-24 FROM elixir:1.13.4-otp-25
# Single RUN statement, otherwise intermediate images are created # Single RUN statement, otherwise intermediate images are created
# https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#run # https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#run

View file

@ -1 +1 @@
docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp-24 --push . docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp-25 --push .

View file

@ -1 +0,0 @@
docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25 --push .

View file

@ -1,4 +1,4 @@
FROM elixir:1.15.7-otp-25 FROM elixir:1.15.8-otp-26
# Single RUN statement, otherwise intermediate images are created # Single RUN statement, otherwise intermediate images are created
# https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#run # https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#run

View file

@ -0,0 +1 @@
docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15.8-otp-26 --push .

View file

@ -132,6 +132,8 @@
] ]
# Configures Elixir's Logger # Configures Elixir's Logger
config :logger, backends: [:console]
config :logger, :console, config :logger, :console,
level: :debug, level: :debug,
format: "\n$time $metadata[$level] $message\n", format: "\n$time $metadata[$level] $message\n",

View file

@ -1260,79 +1260,6 @@
} }
] ]
}, },
%{
group: :logger,
type: :group,
description: "Logger-related settings",
children: [
%{
key: :backends,
type: [:atom, :tuple, :module],
description:
"Where logs will be sent, :console - send logs to stdout, { ExSyslogger, :ex_syslogger } - to syslog, Quack.Logger - to Slack.",
suggestions: [:console, {ExSyslogger, :ex_syslogger}]
}
]
},
%{
group: :logger,
type: :group,
key: :ex_syslogger,
label: "ExSyslogger",
description: "ExSyslogger-related settings",
children: [
%{
key: :level,
type: {:dropdown, :atom},
description: "Log level",
suggestions: [:debug, :info, :warning, :error]
},
%{
key: :ident,
type: :string,
description:
"A string that's prepended to every message, and is typically set to the app name",
suggestions: ["pleroma"]
},
%{
key: :format,
type: :string,
description: "Default: \"$date $time [$level] $levelpad$node $metadata $message\"",
suggestions: ["$metadata[$level] $message"]
},
%{
key: :metadata,
type: {:list, :atom},
suggestions: [:request_id]
}
]
},
%{
group: :logger,
type: :group,
key: :console,
label: "Console Logger",
description: "Console logger settings",
children: [
%{
key: :level,
type: {:dropdown, :atom},
description: "Log level",
suggestions: [:debug, :info, :warning, :error]
},
%{
key: :format,
type: :string,
description: "Default: \"$date $time [$level] $levelpad$node $metadata $message\"",
suggestions: ["$metadata[$level] $message"]
},
%{
key: :metadata,
type: {:list, :atom},
suggestions: [:request_id]
}
]
},
%{ %{
group: :pleroma, group: :pleroma,
key: :frontend_configurations, key: :frontend_configurations,

View file

@ -36,7 +36,7 @@
# different ports. # different ports.
# Do not include timestamps in development logs # Do not include timestamps in development logs
config :logger, :console, format: "$metadata[$level] $message\n" config :logger, Logger.Backends.Console, format: "$metadata[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such # Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive. # in production as building large stacktraces may be expensive.

View file

@ -20,6 +20,7 @@
config :phoenix, serve_endpoints: true config :phoenix, serve_endpoints: true
# Do not print debug messages in production # Do not print debug messages in production
config :logger, Logger.Backends.Console, level: :info
config :logger, :console, level: :info config :logger, :console, level: :info
config :logger, :ex_syslogger, level: :info config :logger, :ex_syslogger, level: :info

View file

@ -53,7 +53,8 @@
hostname: System.get_env("DB_HOST") || "localhost", hostname: System.get_env("DB_HOST") || "localhost",
port: System.get_env("DB_PORT") || "5432", port: System.get_env("DB_PORT") || "5432",
pool: Ecto.Adapters.SQL.Sandbox, pool: Ecto.Adapters.SQL.Sandbox,
pool_size: System.schedulers_online() * 2 pool_size: System.schedulers_online() * 2,
log: false
config :pleroma, :dangerzone, override_repo_pool_size: true config :pleroma, :dangerzone, override_repo_pool_size: true
@ -189,6 +190,8 @@
streamer_registry: false, streamer_registry: false,
test_http_pools: true test_http_pools: true
config :pleroma, Pleroma.Web.Streaming, sync_streaming: true
config :pleroma, Pleroma.Uploaders.Uploader, timeout: 1_000 config :pleroma, Pleroma.Uploaders.Uploader, timeout: 1_000
config :pleroma, Pleroma.Emoji.Loader, test_emoji: true config :pleroma, Pleroma.Emoji.Loader, test_emoji: true

View file

@ -853,7 +853,7 @@ config :logger,
backends: [{ExSyslogger, :ex_syslogger}] backends: [{ExSyslogger, :ex_syslogger}]
config :logger, :ex_syslogger, config :logger, :ex_syslogger,
level: :warn level: :warning
``` ```
Another example, keeping console output and adding the pid to syslog output: Another example, keeping console output and adding the pid to syslog output:
@ -862,7 +862,7 @@ config :logger,
backends: [:console, {ExSyslogger, :ex_syslogger}] backends: [:console, {ExSyslogger, :ex_syslogger}]
config :logger, :ex_syslogger, config :logger, :ex_syslogger,
level: :warn, level: :warning,
option: [:pid, :ndelay] option: [:pid, :ndelay]
``` ```

View file

@ -1,7 +1,7 @@
## Required dependencies ## Required dependencies
* PostgreSQL >=11.0 * PostgreSQL >=11.0
* Elixir >=1.13.0 <1.15 * Elixir >=1.13.0 <1.17
* Erlang OTP >=22.2.0 (supported: <27) * Erlang OTP >=22.2.0 (supported: <27)
* git * git
* file / libmagic * file / libmagic

View file

@ -2,14 +2,41 @@
{! backend/installation/generic_dependencies.include !} {! backend/installation/generic_dependencies.include !}
## Installing software used in this guide # Installation options
Currently there are two options available for NetBSD: manual installation (from source) or using experimental package from [pkgsrc-wip](https://github.com/NetBSD/pkgsrc-wip/tree/master/pleroma).
WIP package can be installed via pkgsrc and can be crosscompiled for easier binary distribution. Source installation most probably will be restricted to a single machine.
## pkgsrc installation
WIP package creates Mix.Release (similar to how Docker images are built) but doesn't bundle Erlang runtime, listing it as a dependency instead. This allows for easier and more modular installations, especially on weaker machines. Currently this method also does not support all features of `pleroma_ctl` command (like changing installation type or managing frontends) as NetBSD is not yet a supported binary flavour of Pleroma's CI.
In any case, you can install it the same way as any other `pkgsrc-wip` package:
```
cd /usr/pkgsrc
git clone --depth 1 git://wip.pkgsrc.org/pkgsrc-wip.git wip
cp -rf wip/pleroma www
cp -rf wip/libvips graphics
cd /usr/pkgsrc/www/pleroma
bmake && bmake install
```
Use `bmake package` to create a binary package. This can come especially handy if you're targeting embedded or low-power systems and are crosscompiling on a more powerful machine.
> Note: Elixir has [endianness bug](https://github.com/elixir-lang/elixir/issues/2785) which requires it to be compiled on a machine with the same endianness. In other words, package crosscompiled on amd64 (little endian) won't work on powerpc or sparc machines (big endian). While _in theory™_ nothing catastrophic should happen, one can see that for example regexes won't work properly. Some distributions just strip this warning away, so it doesn't bother the users... anyway, you've been warned.
## Source installation
pkgin should have been installed by the NetBSD installer if you selected pkgin should have been installed by the NetBSD installer if you selected
the right options. If it isn't installed, install it using pkg_add. the right options. If it isn't installed, install it using `pkg_add`.
Note that `postgresql11-contrib` is needed for the Postgres extensions Note that `postgresql11-contrib` is needed for the Postgres extensions
Pleroma uses. Pleroma uses.
> Note: you can use modern versions of PostgreSQL. In this case, just use `postgresql16-contrib` and so on.
The `mksh` shell is needed to run the Elixir `mix` script. The `mksh` shell is needed to run the Elixir `mix` script.
`# pkgin install acmesh elixir git-base git-docs mksh nginx postgresql11-server postgresql11-client postgresql11-contrib sudo ffmpeg4 ImageMagick` `# pkgin install acmesh elixir git-base git-docs mksh nginx postgresql11-server postgresql11-client postgresql11-contrib sudo ffmpeg4 ImageMagick`
@ -29,29 +56,6 @@ shells/mksh
www/nginx www/nginx
``` ```
Copy the rc.d scripts to the right directory:
```
# cp /usr/pkg/share/examples/rc.d/nginx /usr/pkg/share/examples/rc.d/pgsql /etc/rc.d
```
Add nginx and Postgres to `/etc/rc.conf`:
```
nginx=YES
pgsql=YES
```
## Configuring postgres
First, run `# /etc/rc.d/pgsql start`. Then, `$ sudo -Hu pgsql -g pgsql createdb`.
### Install media / graphics packages (optional, see [`docs/installation/optional/media_graphics_packages.md`](../installation/optional/media_graphics_packages.md))
`# pkgin install ImageMagick ffmpeg4 p5-Image-ExifTool`
## Configuring Pleroma
Create a user for Pleroma: Create a user for Pleroma:
``` ```
@ -68,41 +72,98 @@ $ cd /home/pleroma
$ git clone -b stable https://git.pleroma.social/pleroma/pleroma.git $ git clone -b stable https://git.pleroma.social/pleroma/pleroma.git
``` ```
Configure Pleroma. Note that you need a domain name at this point: Get deps and compile:
``` ```
$ cd /home/pleroma/pleroma $ cd /home/pleroma/pleroma
$ export MIX_ENV=prod
$ mix deps.get $ mix deps.get
$ MIX_ENV=prod mix pleroma.instance gen # You will be asked a few questions here. $ mix compile
``` ```
Since Postgres is configured, we can now initialize the database. There should ## Install media / graphics packages (optional, see [`docs/installation/optional/media_graphics_packages.md`](../installation/optional/media_graphics_packages.md))
now be a file in `config/setup_db.psql` that makes this easier. Edit it, and
*change the password* to a password of your choice. Make sure it is secure, since `# pkgin install ImageMagick ffmpeg4 p5-Image-ExifTool`
or via pkgsrc:
```
graphics/p5-Image-ExifTool
graphics/ImageMagick
multimedia/ffmpeg4
```
# Configuration
## Understanding $PREFIX
From now on, you may encounter `$PREFIX` variable in the paths. This variable indicates your current local pkgsrc prefix. Usually it's `/usr/pkg` unless you configured it otherwise. Translating to pkgsrc's lingo, it's called `LOCALBASE`, which essentially means the same this. You may want to set it up for your local shell session (this uses `mksh` which should already be installed as one of the required dependencies):
```
$ export PREFIX=$(pkg_info -Q LOCALBASE mksh)
$ echo $PREFIX
/usr/pkg
```
## Setting up your instance
Now, you need to configure your instance. During this initial configuration, you will be asked some questions about your server. You will need a domain name at this point; it doesn't have to be deployed, but changing it later will be very cumbersome.
If you've installed via pkgsrc, `pleroma_ctl` should already be in your `PATH`; if you've installed from source, it's located at `/home/pleroma/pleroma/release/bin/pleroma_ctl`.
```
$ su -l pleroma
$ pleroma_ctl instance gen --output $PREFIX/etc/pleroma/config.exs --output-psql /tmp/setup_db.psql
```
During installation, you will be asked about static and upload directories. Don't forget to create them and update permissions:
```
mkdir -p /var/lib/pleroma/uploads
chown -R pleroma:pleroma /var/lib/pleroma
```
## Setting up the database
First, run `# /etc/rc.d/pgsql start`. Then, `$ sudo -Hu pgsql -g pgsql createdb`.
We can now initialize the database. You'll need to edit generated SQL file from the previous step. It's located at `/tmp/setup_db.psql`.
Edit this file, and *change the password* to a password of your choice. Make sure it is secure, since
it'll be protecting your database. Now initialize the database: it'll be protecting your database. Now initialize the database:
``` ```
$ sudo -Hu pgsql -g pgsql psql -f config/setup_db.psql $ sudo -Hu pgsql -g pgsql psql -f /tmp/setup_db.psql
``` ```
Postgres allows connections from all users without a password by default. To Postgres allows connections from all users without a password by default. To
fix this, edit `/usr/pkg/pgsql/data/pg_hba.conf`. Change every `trust` to fix this, edit `$PREFIX/pgsql/data/pg_hba.conf`. Change every `trust` to
`password`. `password`.
Once this is done, restart Postgres with `# /etc/rc.d/pgsql restart`. Once this is done, restart Postgres with `# /etc/rc.d/pgsql restart`.
Run the database migrations. Run the database migrations.
### pkgsrc installation
```
pleroma_ctl migrate
```
### Source installation
You will need to do this whenever you update with `git pull`: You will need to do this whenever you update with `git pull`:
``` ```
$ cd /home/pleroma/pleroma
$ MIX_ENV=prod mix ecto.migrate $ MIX_ENV=prod mix ecto.migrate
``` ```
## Configuring nginx ## Configuring nginx
Install the example configuration file Install the example configuration file
`/home/pleroma/pleroma/installation/pleroma.nginx` to (`$PREFIX/share/examples/pleroma/pleroma.nginx` or `/home/pleroma/pleroma/installation/pleroma.nginx`) to
`/usr/pkg/etc/nginx.conf`. `$PREFIX/etc/nginx.conf`.
Note that it will need to be wrapped in a `http {}` block. You should add Note that it will need to be wrapped in a `http {}` block. You should add
settings for the nginx daemon outside of the http block, for example: settings for the nginx daemon outside of the http block, for example:
@ -176,27 +237,45 @@ Let's add auto-renewal to `/etc/daily.local`
--stateless --stateless
``` ```
## Creating a startup script for Pleroma ## Autostart
Copy the startup script to the correct location and make sure it's executable: For properly functioning instance, you will need pleroma (backend service), nginx (reverse proxy) and postgresql (database) services running. There's no requirement for them to reside on the same machine, but you have to provide autostart for each of them.
### nginx
```
# cp $PREFIX/share/examples/rc.d/nginx /etc/rc.d
# echo "nginx=YES" >> /etc/rc.conf
```
### postgresql
```
# cp $PREFIX/share/examples/rc.d/pgsql /etc/rc.d
# echo "pgsql=YES" >> /etc/rc.conf
```
### pleroma
First, copy the script (pkgsrc variant)
```
# cp $PREFIX/share/examples/pleroma/pleroma.rc /etc/rc.d/pleroma
```
or source variant
``` ```
# cp /home/pleroma/pleroma/installation/netbsd/rc.d/pleroma /etc/rc.d/pleroma # cp /home/pleroma/pleroma/installation/netbsd/rc.d/pleroma /etc/rc.d/pleroma
# chmod +x /etc/rc.d/pleroma # chmod +x /etc/rc.d/pleroma
``` ```
Add the following to `/etc/rc.conf`: Then, add the following to `/etc/rc.conf`:
``` ```
pleroma=YES pleroma=YES
pleroma_home="/home/pleroma"
pleroma_user="pleroma"
``` ```
Run `# /etc/rc.d/pleroma start` to start Pleroma.
## Conclusion ## Conclusion
Run `# /etc/rc.d/pleroma start` to start Pleroma.
Restart nginx with `# /etc/rc.d/nginx restart` and you should be up and running. Restart nginx with `# /etc/rc.d/nginx restart` and you should be up and running.
Make sure your time is in sync, or other instances will receive your posts with Make sure your time is in sync, or other instances will receive your posts with

View file

@ -1,11 +1,14 @@
#!/bin/sh #!/bin/sh
# PROVIDE: pleroma # PROVIDE: pleroma
# REQUIRE: DAEMON pgsql # REQUIRE: DAEMON pgsql nginx
if [ -f /etc/rc.subr ]; then if [ -f /etc/rc.subr ]; then
. /etc/rc.subr . /etc/rc.subr
fi fi
pleroma_home="/home/pleroma"
pleroma_user="pleroma"
name="pleroma" name="pleroma"
rcvar=${name} rcvar=${name}
command="/usr/pkg/bin/elixir" command="/usr/pkg/bin/elixir"
@ -19,10 +22,10 @@ pleroma_env="HOME=${pleroma_home} MIX_ENV=prod"
check_pidfile() check_pidfile()
{ {
pid=$(pgrep -U "${pleroma_user}" /bin/beam.smp$) pid=$(pgrep -U "${pleroma_user}" /bin/beam.smp$)
echo -n "${pid}" printf '%s' "${pid}"
} }
if [ -f /etc/rc.subr -a -d /etc/rc.d -a -f /etc/rc.d/DAEMON ]; then if [ -f /etc/rc.subr ] && [ -d /etc/rc.d ] && [ -f /etc/rc.d/DAEMON ]; then
# newer NetBSD # newer NetBSD
load_rc_config ${name} load_rc_config ${name}
run_rc_command "$1" run_rc_command "$1"
@ -39,7 +42,7 @@ else
stop) stop)
echo "Stopping ${name}." echo "Stopping ${name}."
check_pidfile check_pidfile
! [ -n ${pid} ] && kill ${pid} ! [ -n "${pid}" ] && kill "${pid}"
;; ;;
restart) restart)

View file

@ -14,7 +14,8 @@ defmodule Mix.Pleroma do
:swoosh, :swoosh,
:timex, :timex,
:fast_html, :fast_html,
:oban :oban,
:logger_backends
] ]
@cachex_children ["object", "user", "scrubber", "web_resp"] @cachex_children ["object", "user", "scrubber", "web_resp"]
@doc "Common functions to be reused in mix tasks" @doc "Common functions to be reused in mix tasks"

View file

@ -351,7 +351,7 @@ def run(["set_text_search_config", tsconfig]) do
) )
end end
shell_info('Done.') shell_info(~c"Done.")
end end
end end

View file

@ -0,0 +1,25 @@
defmodule Mix.Tasks.Pleroma.TestRunner do
@shortdoc "Retries tests once if they fail"
use Mix.Task
def run(args \\ []) do
case System.cmd("mix", ["test"] ++ args, into: IO.stream(:stdio, :line)) do
{_, 0} ->
:ok
_ ->
retry(args)
end
end
def retry(args) do
case System.cmd("mix", ["test", "--failed"] ++ args, into: IO.stream(:stdio, :line)) do
{_, 0} ->
:ok
_ ->
exit(1)
end
end
end

View file

@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server # Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/> # Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Config.TransferTask do defmodule Pleroma.Config.TransferTask do
@ -43,14 +43,9 @@ def load_and_update_env(deleted_settings \\ [], restart_pleroma? \\ true) do
with {_, true} <- {:configurable, Config.get(:configurable_from_database)} do with {_, true} <- {:configurable, Config.get(:configurable_from_database)} do
# We need to restart applications for loaded settings take effect # We need to restart applications for loaded settings take effect
{logger, other} = settings =
(Repo.all(ConfigDB) ++ deleted_settings) (Repo.all(ConfigDB) ++ deleted_settings)
|> Enum.map(&merge_with_default/1) |> Enum.map(&merge_with_default/1)
|> Enum.split_with(fn {group, _, _, _} -> group in [:logger] end)
logger
|> Enum.sort()
|> Enum.each(&configure/1)
started_applications = Application.started_applications() started_applications = Application.started_applications()
@ -63,7 +58,7 @@ def load_and_update_env(deleted_settings \\ [], restart_pleroma? \\ true) do
[:pleroma | reject] [:pleroma | reject]
end end
other settings
|> Enum.map(&update/1) |> Enum.map(&update/1)
|> Enum.uniq() |> Enum.uniq()
|> Enum.reject(&(&1 in reject)) |> Enum.reject(&(&1 in reject))
@ -101,38 +96,6 @@ defp merge_with_default(%{group: group, key: key, value: value} = setting) do
{group, key, value, merged} {group, key, value, merged}
end end
# change logger configuration in runtime, without restart
defp configure({_, :backends, _, merged}) do
# removing current backends
Enum.each(Application.get_env(:logger, :backends), &Logger.remove_backend/1)
Enum.each(merged, &Logger.add_backend/1)
:ok = update_env(:logger, :backends, merged)
end
defp configure({_, key, _, merged}) when key in [:console, :ex_syslogger] do
merged =
if key == :console do
put_in(merged[:format], merged[:format] <> "\n")
else
merged
end
backend =
if key == :ex_syslogger,
do: {ExSyslogger, :ex_syslogger},
else: key
Logger.configure_backend(backend, merged)
:ok = update_env(:logger, key, merged)
end
defp configure({_, key, _, merged}) do
Logger.configure([{key, merged}])
:ok = update_env(:logger, key, merged)
end
defp update({group, key, value, merged}) do defp update({group, key, value, merged}) do
try do try do
:ok = update_env(group, key, merged) :ok = update_env(group, key, merged)

View file

@ -165,8 +165,7 @@ defp only_full_update?(%ConfigDB{group: group, key: key}) do
{:pleroma, :ecto_repos}, {:pleroma, :ecto_repos},
{:mime, :types}, {:mime, :types},
{:cors_plug, [:max_age, :methods, :expose, :headers]}, {:cors_plug, [:max_age, :methods, :expose, :headers]},
{:swarm, :node_blacklist}, {:swarm, :node_blacklist}
{:logger, :backends}
] ]
Enum.any?(full_key_update, fn Enum.any?(full_key_update, fn
@ -385,7 +384,12 @@ defp find_valid_delimiter([delimiter | others], pattern, regex_delimiter) do
@spec module_name?(String.t()) :: boolean() @spec module_name?(String.t()) :: boolean()
def module_name?(string) do def module_name?(string) do
Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Ueberauth|Swoosh)\./, string) or if String.contains?(string, ".") do
string in ["Oban", "Ueberauth", "ExSyslogger", "ConcurrentLimiter"] [name | _] = String.split(string, ".", parts: 2)
name in ~w[Pleroma Phoenix Tesla Ueberauth Swoosh Logger LoggerBackends]
else
string in ~w[Oban Ueberauth ExSyslogger ConcurrentLimiter]
end
end end
end end

View file

@ -418,10 +418,10 @@ defp downloadable?(pack) do
end end
defp create_archive_and_cache(pack, hash) do defp create_archive_and_cache(pack, hash) do
files = ['pack.json' | Enum.map(pack.files, fn {_, file} -> to_charlist(file) end)] files = [~c"pack.json" | Enum.map(pack.files, fn {_, file} -> to_charlist(file) end)]
{:ok, {_, result}} = {:ok, {_, result}} =
:zip.zip('#{pack.name}.zip', files, [:memory, cwd: to_charlist(pack.path)]) :zip.zip(~c"#{pack.name}.zip", files, [:memory, cwd: to_charlist(pack.path)])
ttl_per_file = Pleroma.Config.get!([:emoji, :shared_pack_cache_seconds_per_file]) ttl_per_file = Pleroma.Config.get!([:emoji, :shared_pack_cache_seconds_per_file])
overall_ttl = :timer.seconds(ttl_per_file * Enum.count(files)) overall_ttl = :timer.seconds(ttl_per_file * Enum.count(files))
@ -591,7 +591,7 @@ defp unzip(archive, pack_info, remote_pack, local_pack) do
with :ok <- File.mkdir_p!(local_pack.path) do with :ok <- File.mkdir_p!(local_pack.path) do
files = Enum.map(remote_pack["files"], fn {_, path} -> to_charlist(path) end) files = Enum.map(remote_pack["files"], fn {_, path} -> to_charlist(path) end)
# Fallback cannot contain a pack.json file # Fallback cannot contain a pack.json file
files = if pack_info[:fallback], do: files, else: ['pack.json' | files] files = if pack_info[:fallback], do: files, else: [~c"pack.json" | files]
:zip.unzip(archive, cwd: to_charlist(local_pack.path), file_list: files) :zip.unzip(archive, cwd: to_charlist(local_pack.path), file_list: files)
end end

View file

@ -9,7 +9,7 @@ defp registry, do: Pleroma.Gun.ConnectionPool
def start_monitor do def start_monitor do
pid = pid =
case GenServer.start_link(__MODULE__, [], name: {:via, Registry, {registry(), "reclaimer"}}) do case GenServer.start(__MODULE__, [], name: {:via, Registry, {registry(), "reclaimer"}}) do
{:ok, pid} -> {:ok, pid} ->
pid pid

View file

@ -5,6 +5,9 @@
defmodule Pleroma.Gun.ConnectionPool.WorkerSupervisor do defmodule Pleroma.Gun.ConnectionPool.WorkerSupervisor do
@moduledoc "Supervisor for pool workers. Does not do anything except enforce max connection limit" @moduledoc "Supervisor for pool workers. Does not do anything except enforce max connection limit"
alias Pleroma.Config
alias Pleroma.Gun.ConnectionPool.Worker
use DynamicSupervisor use DynamicSupervisor
def start_link(opts) do def start_link(opts) do
@ -14,21 +17,28 @@ def start_link(opts) do
def init(_opts) do def init(_opts) do
DynamicSupervisor.init( DynamicSupervisor.init(
strategy: :one_for_one, strategy: :one_for_one,
max_children: Pleroma.Config.get([:connections_pool, :max_connections]) max_children: Config.get([:connections_pool, :max_connections])
) )
end end
def start_worker(opts, last_attempt \\ false) do def start_worker(opts, last_attempt \\ false)
case DynamicSupervisor.start_child(__MODULE__, {Pleroma.Gun.ConnectionPool.Worker, opts}) do
{:error, :max_children} ->
funs = [fn -> last_attempt end, fn -> match?(:error, free_pool()) end]
if Enum.any?(funs, fn fun -> fun.() end) do def start_worker(opts, true) do
:telemetry.execute([:pleroma, :connection_pool, :provision_failure], %{opts: opts}) case DynamicSupervisor.start_child(__MODULE__, {Worker, opts}) do
{:error, :pool_full} {:error, :max_children} ->
else :telemetry.execute([:pleroma, :connection_pool, :provision_failure], %{opts: opts})
start_worker(opts, true) {:error, :pool_full}
end
res ->
res
end
end
def start_worker(opts, false) do
case DynamicSupervisor.start_child(__MODULE__, {Worker, opts}) do
{:error, :max_children} ->
free_pool()
start_worker(opts, true)
res -> res ->
res res

View file

@ -865,8 +865,9 @@ def mark_context_as_read(%User{id: id}, context) do
|> Repo.update_all(set: [seen: true]) |> Repo.update_all(set: [seen: true])
end end
@spec send(list(Notification.t())) :: :ok @doc "Streams a list of notifications over websockets and web push"
def send(notifications) do @spec stream(list(Notification.t())) :: :ok
def stream(notifications) do
Enum.each(notifications, fn notification -> Enum.each(notifications, fn notification ->
Streamer.stream(["user", "user:notification"], notification) Streamer.stream(["user", "user:notification"], notification)
Push.send(notification) Push.send(notification)

View file

@ -135,7 +135,10 @@ def make_update_object_data(original_data, new_data, date) do
else else
%{updated_object: updated_data} = %{updated_object: updated_data} =
updated_data updated_data
|> maybe_update_history(original_data, updated: updated, use_history_in_new_object?: false) |> maybe_update_history(original_data,
updated: updated,
use_history_in_new_object?: false
)
updated_data updated_data
|> Map.put("updated", date) |> Map.put("updated", date)

View file

@ -249,14 +249,16 @@ defp url_from_spec(%__MODULE__{name: name}, base_url, {:file, path}) do
defp url_from_spec(_upload, _base_url, {:url, url}), do: url defp url_from_spec(_upload, _base_url, {:url, url}), do: url
@spec base_url() :: binary
def base_url do def base_url do
uploader = @config_impl.get([Pleroma.Upload, :uploader]) uploader = @config_impl.get([Pleroma.Upload, :uploader])
upload_base_url = @config_impl.get([Pleroma.Upload, :base_url]) upload_fallback_url = Pleroma.Web.Endpoint.url() <> "/media/"
upload_base_url = @config_impl.get([Pleroma.Upload, :base_url]) || upload_fallback_url
public_endpoint = @config_impl.get([uploader, :public_endpoint]) public_endpoint = @config_impl.get([uploader, :public_endpoint])
case uploader do case uploader do
Pleroma.Uploaders.Local -> Pleroma.Uploaders.Local ->
upload_base_url || Pleroma.Web.Endpoint.url() <> "/media/" upload_base_url
Pleroma.Uploaders.S3 -> Pleroma.Uploaders.S3 ->
bucket = @config_impl.get([Pleroma.Uploaders.S3, :bucket]) bucket = @config_impl.get([Pleroma.Uploaders.S3, :bucket])
@ -268,11 +270,14 @@ def base_url do
!is_nil(truncated_namespace) -> !is_nil(truncated_namespace) ->
truncated_namespace truncated_namespace
!is_nil(namespace) -> !is_nil(namespace) and !is_nil(bucket) ->
namespace <> ":" <> bucket namespace <> ":" <> bucket
true -> !is_nil(bucket) ->
bucket bucket
true ->
""
end end
if public_endpoint do if public_endpoint do
@ -285,7 +290,7 @@ def base_url do
@config_impl.get([Pleroma.Uploaders.IPFS, :get_gateway_url]) @config_impl.get([Pleroma.Uploaders.IPFS, :get_gateway_url])
_ -> _ ->
public_endpoint || upload_base_url || Pleroma.Web.Endpoint.url() <> "/media/" public_endpoint || upload_base_url
end end
end end
end end

View file

@ -40,6 +40,7 @@ defmodule Pleroma.User do
alias Pleroma.Web.RelMe alias Pleroma.Web.RelMe
alias Pleroma.Webhook.Notify alias Pleroma.Webhook.Notify
alias Pleroma.Workers.BackgroundWorker alias Pleroma.Workers.BackgroundWorker
alias Pleroma.Workers.UserRefreshWorker
require Logger require Logger
require Pleroma.Constants require Pleroma.Constants
@ -2204,20 +2205,20 @@ def html_filter_policy(_), do: Config.get([:markup, :scrub_policy])
def fetch_by_ap_id(ap_id), do: ActivityPub.make_user_from_ap_id(ap_id) def fetch_by_ap_id(ap_id), do: ActivityPub.make_user_from_ap_id(ap_id)
@spec get_or_fetch_by_ap_id(String.t()) :: {:ok, User.t()} | {:error, any()}
def get_or_fetch_by_ap_id(ap_id) do def get_or_fetch_by_ap_id(ap_id) do
cached_user = get_cached_by_ap_id(ap_id) with cached_user = %User{} <- get_cached_by_ap_id(ap_id),
_ <- maybe_refresh(cached_user) do
{:ok, cached_user}
else
_ -> fetch_by_ap_id(ap_id)
end
end
maybe_fetched_user = needs_update?(cached_user) && fetch_by_ap_id(ap_id) defp maybe_refresh(user) do
if needs_update?(user) do
case {cached_user, maybe_fetched_user} do UserRefreshWorker.new(%{"ap_id" => user.ap_id})
{_, {:ok, %User{} = user}} -> |> Oban.insert()
{:ok, user}
{%User{} = user, _} ->
{:ok, user}
_ ->
{:error, :not_found}
end end
end end

View file

@ -198,14 +198,14 @@ defp wait_backup(backup, current_processed, task) do
end end
@files [ @files [
'actor.json', ~c"actor.json",
'outbox.json', ~c"outbox.json",
'likes.json', ~c"likes.json",
'bookmarks.json', ~c"bookmarks.json",
'followers.json', ~c"followers.json",
'following.json', ~c"following.json",
'chats.json', ~c"chats.json",
'chat_messages.json' ~c"chat_messages.json"
] ]
@spec export(Pleroma.User.Backup.t(), pid()) :: {:ok, String.t()} | :error @spec export(Pleroma.User.Backup.t(), pid()) :: {:ok, String.t()} | :error
def export(%__MODULE__{} = backup, caller_pid) do def export(%__MODULE__{} = backup, caller_pid) do

View file

@ -686,9 +686,9 @@ defp delete_object(object) do
with {:ok, _} <- Repo.delete(object), do: :ok with {:ok, _} <- Repo.delete(object), do: :ok
end end
defp send_notifications(meta) do defp stream_notifications(meta) do
Keyword.get(meta, :notifications, []) Keyword.get(meta, :notifications, [])
|> Notification.send() |> Notification.stream()
meta meta
end end
@ -719,7 +719,7 @@ defp add_notifications(meta, notifications) do
@impl true @impl true
def handle_after_transaction(meta) do def handle_after_transaction(meta) do
meta meta
|> send_notifications() |> stream_notifications()
|> send_streamables() |> send_streamables()
end end
end end

View file

@ -927,9 +927,11 @@ def add_emoji_tags(%{"emoji" => emoji} = object) do
def add_emoji_tags(object), do: object def add_emoji_tags(object), do: object
defp build_emoji_tag({name, url}) do def build_emoji_tag({name, url}) do
url = URI.encode(url)
%{ %{
"icon" => %{"url" => "#{URI.encode(url)}", "type" => "Image"}, "icon" => %{"url" => "#{url}", "type" => "Image"},
"name" => ":" <> name <> ":", "name" => ":" <> name <> ":",
"type" => "Emoji", "type" => "Emoji",
"updated" => "1970-01-01T00:00:00Z", "updated" => "1970-01-01T00:00:00Z",

View file

@ -54,7 +54,7 @@ def call(conn, %{operation_id: operation_id, render_error: render_error} = opts)
{:error, reason} -> {:error, reason} ->
Logger.error( Logger.error(
"Strict ApiSpec: request denied to #{conn.path_info} with params #{inspect(conn.params)}" "Strict ApiSpec: request denied to #{conn.request_path} with params #{inspect(conn.params)}"
) )
opts = render_error.init(reason) opts = render_error.init(reason)

View file

@ -79,7 +79,9 @@ def search_operation do
%Schema{type: :string, enum: ["accounts", "hashtags", "statuses"]}, %Schema{type: :string, enum: ["accounts", "hashtags", "statuses"]},
"Search type" "Search type"
), ),
Operation.parameter(:q, :query, %Schema{type: :string}, "The search query", required: true), Operation.parameter(:q, :query, %Schema{type: :string}, "The search query",
required: true
),
Operation.parameter( Operation.parameter(
:resolve, :resolve,
:query, :query,

View file

@ -110,7 +110,7 @@ defp register_user(connection, base, uid, name) do
} }
params = params =
case List.keyfind(attributes, 'mail', 0) do case List.keyfind(attributes, ~c"mail", 0) do
{_, [mail]} -> Map.put_new(params, :email, :erlang.list_to_binary(mail)) {_, [mail]} -> Map.put_new(params, :email, :erlang.list_to_binary(mail))
_ -> params _ -> params
end end

View file

@ -30,7 +30,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
# pagination is restricted to 40 activities at a time # pagination is restricted to 40 activities at a time
defp fetch_rich_media_for_activities(activities) do defp fetch_rich_media_for_activities(activities) do
Enum.each(activities, fn activity -> Enum.each(activities, fn activity ->
spawn(fn -> Card.get_by_activity(activity) end) Card.get_by_activity(activity)
end) end)
end end

View file

@ -104,7 +104,7 @@ def handle_info(:ping, state) do
end end
def handle_info(:close, state) do def handle_info(:close, state) do
{:stop, {:closed, 'connection closed by server'}, state} {:stop, {:closed, ~c"connection closed by server"}, state}
end end
def handle_info(msg, state) do def handle_info(msg, state) do

View file

@ -75,8 +75,7 @@ def whitelisted?(url) do
%{host: domain} = URI.parse(url) %{host: domain} = URI.parse(url)
mediaproxy_whitelist_domains = mediaproxy_whitelist_domains =
[:media_proxy, :whitelist] Config.get([:media_proxy, :whitelist], [])
|> Config.get()
|> Kernel.++(["#{Upload.base_url()}"]) |> Kernel.++(["#{Upload.base_url()}"])
|> Enum.map(&maybe_get_domain_from_url/1) |> Enum.map(&maybe_get_domain_from_url/1)

View file

@ -34,7 +34,9 @@ def perform(%Plug.Conn{assigns: assigns} = conn, %{scopes: scopes} = options) do
permissions = Enum.join(missing_scopes, " #{op} ") permissions = Enum.join(missing_scopes, " #{op} ")
error_message = error_message =
dgettext("errors", "Insufficient permissions: %{permissions}.", permissions: permissions) dgettext("errors", "Insufficient permissions: %{permissions}.",
permissions: permissions
)
conn conn
|> put_resp_content_type("application/json") |> put_resp_content_type("application/json")

View file

@ -20,17 +20,13 @@ def init do
end end
def vapid_config do def vapid_config do
Application.get_env(:web_push_encryption, :vapid_details, []) Application.get_env(:web_push_encryption, :vapid_details, nil)
end end
def enabled do def enabled, do: match?([subject: _, public_key: _, private_key: _], vapid_config())
case vapid_config() do
[] -> false
list when is_list(list) -> true
_ -> false
end
end
@spec send(Pleroma.Notification.t()) ::
{:ok, Oban.Job.t()} | {:error, Oban.Job.changeset() | term()}
def send(notification) do def send(notification) do
WebPusherWorker.enqueue("web_push", %{"notification_id" => notification.id}) WebPusherWorker.enqueue("web_push", %{"notification_id" => notification.id})
end end

View file

@ -16,72 +16,74 @@ defmodule Pleroma.Web.Push.Impl do
require Logger require Logger
import Ecto.Query import Ecto.Query
@body_chars 140
@types ["Create", "Follow", "Announce", "Like", "Move", "EmojiReact", "Update"] @types ["Create", "Follow", "Announce", "Like", "Move", "EmojiReact", "Update"]
@doc "Performs sending notifications for user subscriptions" @doc "Builds webpush notification payloads for the subscriptions enabled by the receiving user"
@spec perform(Notification.t()) :: list(any) | :error | {:error, :unknown_type} @spec build(Notification.t()) ::
def perform( list(%{content: map(), subscription: Subscription.t()}) | []
def build(
%{ %{
activity: %{data: %{"type" => activity_type}} = activity, activity: %{data: %{"type" => activity_type}} = activity,
user: %User{id: user_id} user_id: user_id
} = notification } = notification
) )
when activity_type in @types do when activity_type in @types do
actor = User.get_cached_by_ap_id(notification.activity.data["actor"]) notification_actor = User.get_cached_by_ap_id(notification.activity.data["actor"])
avatar_url = User.avatar_url(notification_actor)
mastodon_type = notification.type
gcm_api_key = Application.get_env(:web_push_encryption, :gcm_api_key)
avatar_url = User.avatar_url(actor)
object = Object.normalize(activity, fetch: false) object = Object.normalize(activity, fetch: false)
user = User.get_cached_by_id(user_id) user = User.get_cached_by_id(user_id)
direct_conversation_id = Activity.direct_conversation_id(activity, user) direct_conversation_id = Activity.direct_conversation_id(activity, user)
for subscription <- fetch_subscriptions(user_id), subscriptions = fetch_subscriptions(user_id)
Subscription.enabled?(subscription, mastodon_type) do
%{ subscriptions
access_token: subscription.token.token, |> Enum.filter(&Subscription.enabled?(&1, notification.type))
notification_id: notification.id, |> Enum.map(fn subscription ->
notification_type: mastodon_type, payload =
icon: avatar_url, %{
preferred_locale: "en", access_token: subscription.token.token,
pleroma: %{ notification_id: notification.id,
activity_id: notification.activity.id, notification_type: notification.type,
direct_conversation_id: direct_conversation_id icon: avatar_url,
preferred_locale: "en",
pleroma: %{
activity_id: notification.activity.id,
direct_conversation_id: direct_conversation_id
}
} }
} |> Map.merge(build_content(notification, notification_actor, object))
|> Map.merge(build_content(notification, actor, object, mastodon_type)) |> Jason.encode!()
|> Jason.encode!()
|> push_message(build_sub(subscription), gcm_api_key, subscription) %{payload: payload, subscription: subscription}
end end)
|> (&{:ok, &1}).()
end end
def perform(_) do def build(notif) do
Logger.warning("Unknown notification type") Logger.warning("WebPush: unknown activity type: #{inspect(notif)}")
{:error, :unknown_type} []
end end
@doc "Push message to web" @doc "Deliver push notification to the provided webpush subscription"
def push_message(body, sub, api_key, subscription) do @spec deliver(%{payload: String.t(), subscription: Subscription.t()}) :: :ok | :error
try do def deliver(%{payload: payload, subscription: subscription}) do
case WebPushEncryption.send_web_push(body, sub, api_key) do gcm_api_key = Application.get_env(:web_push_encryption, :gcm_api_key)
{:ok, %{status: code}} when code in 400..499 -> formatted_subscription = build_sub(subscription)
Logger.debug("Removing subscription record")
Repo.delete!(subscription)
:ok
{:ok, %{status: code}} when code in 200..299 -> case WebPushEncryption.send_web_push(payload, formatted_subscription, gcm_api_key) do
:ok {:ok, %{status: code}} when code in 200..299 ->
:ok
{:ok, %{status: code}} -> {:ok, %{status: code}} when code in 400..499 ->
Logger.error("Web Push Notification failed with code: #{code}") Logger.debug("Removing subscription record")
:error Repo.delete!(subscription)
:ok
{:ok, %{status: code}} ->
Logger.error("Web Push Notification failed with code: #{code}")
:error
error ->
Logger.error("Web Push Notification failed with #{inspect(error)}")
:error
end
rescue
error -> error ->
Logger.error("Web Push Notification failed with #{inspect(error)}") Logger.error("Web Push Notification failed with #{inspect(error)}")
:error :error
@ -106,107 +108,106 @@ def build_sub(subscription) do
} }
end end
def build_content(notification, actor, object, mastodon_type \\ nil)
def build_content( def build_content(
%{ %{
user: %{notification_settings: %{hide_notification_contents: true}} user: %{notification_settings: %{hide_notification_contents: true}}
} = notification, } = notification,
_actor, _user,
_object, _object
mastodon_type
) do ) do
%{body: format_title(notification, mastodon_type)} %{body: format_title(notification)}
end end
def build_content(notification, actor, object, mastodon_type) do def build_content(notification, user, object) do
mastodon_type = mastodon_type || notification.type
%{ %{
title: format_title(notification, mastodon_type), title: format_title(notification),
body: format_body(notification, actor, object, mastodon_type) body: format_body(notification, user, object)
} }
end end
def format_body(activity, actor, object, mastodon_type \\ nil) @spec format_body(Notification.t(), User.t(), Object.t()) :: String.t()
def format_body(_notification, user, %{data: %{"type" => "ChatMessage"} = object}) do
def format_body(_activity, actor, %{data: %{"type" => "ChatMessage"} = data}, _) do case object["content"] do
case data["content"] do nil -> "@#{user.nickname}: (Attachment)"
nil -> "@#{actor.nickname}: (Attachment)" content -> "@#{user.nickname}: #{Utils.scrub_html_and_truncate(content, @body_chars)}"
content -> "@#{actor.nickname}: #{Utils.scrub_html_and_truncate(content, 80)}"
end end
end end
def format_body( def format_body(
%{activity: %{data: %{"type" => "Create"}}}, %{type: "poll"} = _notification,
actor, _user,
%{data: %{"content" => content}}, %{data: %{"content" => content} = data} = _object
_mastodon_type
) do ) do
"@#{actor.nickname}: #{Utils.scrub_html_and_truncate(content, 80)}" options = Map.get(data, "anyOf") || Map.get(data, "oneOf")
content_text = content <> "\n"
options_text = Enum.map_join(options, "\n", fn x -> "#{x["name"]}" end)
[content_text, options_text]
|> Enum.join("\n")
|> Utils.scrub_html_and_truncate(@body_chars)
end
def format_body(
%{activity: %{data: %{"type" => "Create"}}},
user,
%{data: %{"content" => content}}
) do
"@#{user.nickname}: #{Utils.scrub_html_and_truncate(content, @body_chars)}"
end end
def format_body( def format_body(
%{activity: %{data: %{"type" => "Announce"}}}, %{activity: %{data: %{"type" => "Announce"}}},
actor, user,
%{data: %{"content" => content}}, %{data: %{"content" => content}}
_mastodon_type
) do ) do
"@#{actor.nickname} repeated: #{Utils.scrub_html_and_truncate(content, 80)}" "@#{user.nickname} repeated: #{Utils.scrub_html_and_truncate(content, @body_chars)}"
end end
def format_body( def format_body(
%{activity: %{data: %{"type" => "EmojiReact", "content" => content}}}, %{activity: %{data: %{"type" => "EmojiReact", "content" => content}}},
actor, user,
_object, _object
_mastodon_type
) do ) do
"@#{actor.nickname} reacted with #{content}" "@#{user.nickname} reacted with #{content}"
end end
def format_body( def format_body(
%{activity: %{data: %{"type" => type}}} = notification, %{activity: %{data: %{"type" => type}}} = notification,
actor, user,
_object, _object
mastodon_type
) )
when type in ["Follow", "Like"] do when type in ["Follow", "Like"] do
mastodon_type = mastodon_type || notification.type case notification.type do
"follow" -> "@#{user.nickname} has followed you"
case mastodon_type do "follow_request" -> "@#{user.nickname} has requested to follow you"
"follow" -> "@#{actor.nickname} has followed you" "favourite" -> "@#{user.nickname} has favorited your post"
"follow_request" -> "@#{actor.nickname} has requested to follow you"
"favourite" -> "@#{actor.nickname} has favorited your post"
end end
end end
def format_body( def format_body(
%{activity: %{data: %{"type" => "Update"}}}, %{activity: %{data: %{"type" => "Update"}}},
actor, user,
_object, _object
_mastodon_type
) do ) do
"@#{actor.nickname} edited a status" "@#{user.nickname} edited a status"
end end
def format_title(activity, mastodon_type \\ nil) @spec format_title(Notification.t()) :: String.t()
def format_title(%{activity: %{data: %{"directMessage" => true}}}) do
def format_title(%{activity: %{data: %{"directMessage" => true}}}, _mastodon_type) do
"New Direct Message" "New Direct Message"
end end
def format_title(%{type: type}, mastodon_type) do def format_title(%{type: "mention"}), do: "New Mention"
case mastodon_type || type do def format_title(%{type: "status"}), do: "New Status"
"mention" -> "New Mention" def format_title(%{type: "follow"}), do: "New Follower"
"status" -> "New Status" def format_title(%{type: "follow_request"}), do: "New Follow Request"
"follow" -> "New Follower" def format_title(%{type: "reblog"}), do: "New Repeat"
"follow_request" -> "New Follow Request" def format_title(%{type: "favourite"}), do: "New Favorite"
"reblog" -> "New Repeat" def format_title(%{type: "update"}), do: "New Update"
"favourite" -> "New Favorite" def format_title(%{type: "pleroma:chat_mention"}), do: "New Chat Message"
"update" -> "New Update" def format_title(%{type: "pleroma:emoji_reaction"}), do: "New Reaction"
"pleroma:chat_mention" -> "New Chat Message" def format_title(%{type: "poll"}), do: "Poll Results"
"pleroma:emoji_reaction" -> "New Reaction" def format_title(%{type: type}), do: "New #{String.capitalize(type || "event")}"
type -> "New #{String.capitalize(type || "event")}"
end
end
end end

View file

@ -6,35 +6,25 @@ defmodule Pleroma.Web.RichMedia.Backfill do
alias Pleroma.Web.RichMedia.Card alias Pleroma.Web.RichMedia.Card
alias Pleroma.Web.RichMedia.Parser alias Pleroma.Web.RichMedia.Parser
alias Pleroma.Web.RichMedia.Parser.TTL alias Pleroma.Web.RichMedia.Parser.TTL
alias Pleroma.Workers.RichMediaExpirationWorker alias Pleroma.Workers.RichMediaWorker
require Logger require Logger
@backfiller Pleroma.Config.get([__MODULE__, :provider], Pleroma.Web.RichMedia.Backfill.Task)
@cachex Pleroma.Config.get([:cachex, :provider], Cachex) @cachex Pleroma.Config.get([:cachex, :provider], Cachex)
@max_attempts 3
@retry 5_000
def start(%{url: url} = args) when is_binary(url) do @spec run(map()) ::
:ok | {:error, {:invalid_metadata, any()} | :body_too_large | {:content, any()} | any()}
def run(%{"url" => url} = args) do
url_hash = Card.url_to_hash(url) url_hash = Card.url_to_hash(url)
args =
args
|> Map.put(:attempt, 1)
|> Map.put(:url_hash, url_hash)
@backfiller.run(args)
end
def run(%{url: url, url_hash: url_hash, attempt: attempt} = args)
when attempt <= @max_attempts do
case Parser.parse(url) do case Parser.parse(url) do
{:ok, fields} -> {:ok, fields} ->
{:ok, card} = Card.create(url, fields) {:ok, card} = Card.create(url, fields)
maybe_schedule_expiration(url, fields) maybe_schedule_expiration(url, fields)
if Map.has_key?(args, :activity_id) do with %{"activity_id" => activity_id} <- args,
false <- is_nil(activity_id) do
stream_update(args) stream_update(args)
end end
@ -54,25 +44,16 @@ def run(%{url: url, url_hash: url_hash, attempt: attempt} = args)
e -> e ->
Logger.debug("Rich media error for #{url}: #{inspect(e)}") Logger.debug("Rich media error for #{url}: #{inspect(e)}")
{:error, e}
:timer.sleep(@retry * attempt)
run(%{args | attempt: attempt + 1})
end end
end end
def run(%{url: url, url_hash: url_hash}) do
Logger.debug("Rich media failure for #{url}")
negative_cache(url_hash, :timer.minutes(15))
end
defp maybe_schedule_expiration(url, fields) do defp maybe_schedule_expiration(url, fields) do
case TTL.process(fields, url) do case TTL.process(fields, url) do
{:ok, ttl} when is_number(ttl) -> {:ok, ttl} when is_number(ttl) ->
timestamp = DateTime.from_unix!(ttl) timestamp = DateTime.from_unix!(ttl)
RichMediaExpirationWorker.new(%{"url" => url}, scheduled_at: timestamp) RichMediaWorker.new(%{"op" => "expire", "url" => url}, scheduled_at: timestamp)
|> Oban.insert() |> Oban.insert()
_ -> _ ->
@ -80,22 +61,14 @@ defp maybe_schedule_expiration(url, fields) do
end end
end end
defp stream_update(%{activity_id: activity_id}) do defp stream_update(%{"activity_id" => activity_id}) do
Pleroma.Activity.get_by_id(activity_id) Pleroma.Activity.get_by_id(activity_id)
|> Pleroma.Activity.normalize() |> Pleroma.Activity.normalize()
|> Pleroma.Web.ActivityPub.ActivityPub.stream_out() |> Pleroma.Web.ActivityPub.ActivityPub.stream_out()
end end
defp warm_cache(key, val), do: @cachex.put(:rich_media_cache, key, val) defp warm_cache(key, val), do: @cachex.put(:rich_media_cache, key, val)
defp negative_cache(key, ttl \\ nil), do: @cachex.put(:rich_media_cache, key, nil, ttl: ttl)
end
defmodule Pleroma.Web.RichMedia.Backfill.Task do defp negative_cache(key, ttl \\ :timer.minutes(15)),
alias Pleroma.Web.RichMedia.Backfill do: @cachex.put(:rich_media_cache, key, nil, ttl: ttl)
def run(args) do
Task.Supervisor.start_child(Pleroma.TaskSupervisor, Backfill, :run, [args],
name: {:global, {:rich_media, args.url_hash}}
)
end
end end

View file

@ -7,8 +7,8 @@ defmodule Pleroma.Web.RichMedia.Card do
alias Pleroma.HTML alias Pleroma.HTML
alias Pleroma.Object alias Pleroma.Object
alias Pleroma.Repo alias Pleroma.Repo
alias Pleroma.Web.RichMedia.Backfill
alias Pleroma.Web.RichMedia.Parser alias Pleroma.Web.RichMedia.Parser
alias Pleroma.Workers.RichMediaWorker
@cachex Pleroma.Config.get([:cachex, :provider], Cachex) @cachex Pleroma.Config.get([:cachex, :provider], Cachex)
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config) @config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
@ -75,17 +75,18 @@ def get_by_url(url) when is_binary(url) do
def get_by_url(nil), do: nil def get_by_url(nil), do: nil
@spec get_or_backfill_by_url(String.t(), map()) :: t() | nil @spec get_or_backfill_by_url(String.t(), keyword()) :: t() | nil
def get_or_backfill_by_url(url, backfill_opts \\ %{}) do def get_or_backfill_by_url(url, opts \\ []) do
if @config_impl.get([:rich_media, :enabled]) do if @config_impl.get([:rich_media, :enabled]) do
case get_by_url(url) do case get_by_url(url) do
%__MODULE__{} = card -> %__MODULE__{} = card ->
card card
nil -> nil ->
backfill_opts = Map.put(backfill_opts, :url, url) activity_id = Keyword.get(opts, :activity, nil)
Backfill.start(backfill_opts) RichMediaWorker.new(%{"op" => "backfill", "url" => url, "activity_id" => activity_id})
|> Oban.insert()
nil nil
@ -137,7 +138,7 @@ def get_by_activity(activity) do
nil nil
else else
{:cached, url} -> {:cached, url} ->
get_or_backfill_by_url(url, %{activity_id: activity.id}) get_or_backfill_by_url(url, activity_id: activity.id)
_ -> _ ->
:error :error

View file

@ -173,7 +173,13 @@ def remove_socket(topic) do
def stream(topics, items) do def stream(topics, items) do
if should_env_send?() do if should_env_send?() do
for topic <- List.wrap(topics), item <- List.wrap(items) do for topic <- List.wrap(topics), item <- List.wrap(items) do
spawn(fn -> do_stream(topic, item) end) fun = fn -> do_stream(topic, item) end
if Config.get([__MODULE__, :sync_streaming], false) do
fun.()
else
spawn(fun)
end
end end
end end
end end

View file

@ -9,7 +9,7 @@ def string_from_xpath(_, :error), do: nil
def string_from_xpath(xpath, doc) do def string_from_xpath(xpath, doc) do
try do try do
{:xmlObj, :string, res} = :xmerl_xpath.string('string(#{xpath})', doc) {:xmlObj, :string, res} = :xmerl_xpath.string(~c"string(#{xpath})", doc)
res = res =
res res

View file

@ -16,7 +16,7 @@ defmodule Pleroma.Workers.NotificationWorker do
def perform(%Job{args: %{"op" => "create", "activity_id" => activity_id}}) do def perform(%Job{args: %{"op" => "create", "activity_id" => activity_id}}) do
with %Activity{} = activity <- find_activity(activity_id), with %Activity{} = activity <- find_activity(activity_id),
{:ok, notifications} <- Notification.create_notifications(activity) do {:ok, notifications} <- Notification.create_notifications(activity) do
Notification.send(notifications) Notification.stream(notifications)
end end
end end

View file

@ -14,8 +14,9 @@ defmodule Pleroma.Workers.PollWorker do
@impl Oban.Worker @impl Oban.Worker
def perform(%Job{args: %{"op" => "poll_end", "activity_id" => activity_id}}) do def perform(%Job{args: %{"op" => "poll_end", "activity_id" => activity_id}}) do
with %Activity{} = activity <- find_poll_activity(activity_id) do with %Activity{} = activity <- find_poll_activity(activity_id),
Notification.create_poll_notifications(activity) {:ok, notifications} <- Notification.create_poll_notifications(activity) do
Notification.stream(notifications)
end end
end end

View file

@ -1,15 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.RichMediaExpirationWorker do
alias Pleroma.Web.RichMedia.Card
use Oban.Worker,
queue: :background
@impl Oban.Worker
def perform(%Job{args: %{"url" => url} = _args}) do
Card.delete(url)
end
end

View file

@ -0,0 +1,19 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.RichMediaWorker do
alias Pleroma.Web.RichMedia.Backfill
alias Pleroma.Web.RichMedia.Card
use Oban.Worker, queue: :background, max_attempts: 3, unique: [period: 300]
@impl Oban.Worker
def perform(%Job{args: %{"op" => "expire", "url" => url} = _args}) do
Card.delete(url)
end
def perform(%Job{args: %{"op" => "backfill", "url" => _url} = args}) do
Backfill.run(args)
end
end

View file

@ -0,0 +1,14 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.UserRefreshWorker do
use Pleroma.Workers.WorkerHelper, queue: "background", max_attempts: 1, unique: [period: 300]
alias Pleroma.User
@impl Oban.Worker
def perform(%Job{args: %{"ap_id" => ap_id}}) do
User.fetch_by_ap_id(ap_id)
end
end

View file

@ -5,6 +5,7 @@
defmodule Pleroma.Workers.WebPusherWorker do defmodule Pleroma.Workers.WebPusherWorker do
alias Pleroma.Notification alias Pleroma.Notification
alias Pleroma.Repo alias Pleroma.Repo
alias Pleroma.Web.Push.Impl
use Pleroma.Workers.WorkerHelper, queue: "web_push" use Pleroma.Workers.WorkerHelper, queue: "web_push"
@ -15,7 +16,8 @@ def perform(%Job{args: %{"op" => "web_push", "notification_id" => notification_i
|> Repo.get(notification_id) |> Repo.get(notification_id)
|> Repo.preload([:activity, :user]) |> Repo.preload([:activity, :user])
Pleroma.Web.Push.Impl.perform(notification) Impl.build(notification)
|> Enum.each(&Impl.deliver(&1))
end end
@impl Oban.Worker @impl Oban.Worker

37
mix.exs
View file

@ -12,7 +12,7 @@ def project do
elixir: "~> 1.13", elixir: "~> 1.13",
elixirc_paths: elixirc_paths(Mix.env()), elixirc_paths: elixirc_paths(Mix.env()),
compilers: Mix.compilers(), compilers: Mix.compilers(),
elixirc_options: [warnings_as_errors: warnings_as_errors()], elixirc_options: [warnings_as_errors: warnings_as_errors(), prune_code_paths: false],
xref: [exclude: [:eldap]], xref: [exclude: [:eldap]],
dialyzer: [plt_add_apps: [:mix, :eldap]], dialyzer: [plt_add_apps: [:mix, :eldap]],
start_permanent: Mix.env() == :prod, start_permanent: Mix.env() == :prod,
@ -75,14 +75,15 @@ def copy_nginx_config(%{path: target_path} = release) do
def application do def application do
[ [
mod: {Pleroma.Application, []}, mod: {Pleroma.Application, []},
extra_applications: [ extra_applications:
:logger, [
:runtime_tools, :logger,
:comeonin, :runtime_tools,
:fast_sanitize, :comeonin,
:os_mon, :fast_sanitize,
:ssl :os_mon,
], :ssl
] ++ logger_application(),
included_applications: [:ex_syslogger] included_applications: [:ex_syslogger]
] ]
end end
@ -111,6 +112,22 @@ defp oauth_deps do
for s <- oauth_strategy_packages, do: {String.to_atom(s), ">= 0.0.0"} for s <- oauth_strategy_packages, do: {String.to_atom(s), ">= 0.0.0"}
end end
defp logger_application do
if Version.match?(System.version(), "<1.15.0-rc.0") do
[]
else
[:logger_backends]
end
end
defp logger_deps do
if Version.match?(System.version(), "<1.15.0-rc.0") do
[]
else
[{:logger_backends, "~> 1.0"}]
end
end
# Specifies your project dependencies. # Specifies your project dependencies.
# #
# Type `mix help deps` for examples and options. # Type `mix help deps` for examples and options.
@ -202,7 +219,7 @@ defp deps do
{:websockex, "~> 0.4.3", only: :test}, {:websockex, "~> 0.4.3", only: :test},
{:benchee, "~> 1.0", only: :benchmark}, {:benchee, "~> 1.0", only: :benchmark},
{:dialyxir, "~> 1.4", only: [:dev, :test], runtime: false} {:dialyxir, "~> 1.4", only: [:dev, :test], runtime: false}
] ++ oauth_deps() ] ++ oauth_deps() ++ logger_deps()
end end
# Aliases are shortcuts or tasks specific to the current project. # Aliases are shortcuts or tasks specific to the current project.

View file

@ -78,6 +78,7 @@
"jose": {:hex, :jose, "1.11.6", "613fda82552128aa6fb804682e3a616f4bc15565a048dabd05b1ebd5827ed965", [:mix, :rebar3], [], "hexpm", "6275cb75504f9c1e60eeacb771adfeee4905a9e182103aa59b53fed651ff9738"}, "jose": {:hex, :jose, "1.11.6", "613fda82552128aa6fb804682e3a616f4bc15565a048dabd05b1ebd5827ed965", [:mix, :rebar3], [], "hexpm", "6275cb75504f9c1e60eeacb771adfeee4905a9e182103aa59b53fed651ff9738"},
"jumper": {:hex, :jumper, "1.0.2", "68cdcd84472a00ac596b4e6459a41b3062d4427cbd4f1e8c8793c5b54f1406a7", [:mix], [], "hexpm", "9b7782409021e01ab3c08270e26f36eb62976a38c1aa64b2eaf6348422f165e1"}, "jumper": {:hex, :jumper, "1.0.2", "68cdcd84472a00ac596b4e6459a41b3062d4427cbd4f1e8c8793c5b54f1406a7", [:mix], [], "hexpm", "9b7782409021e01ab3c08270e26f36eb62976a38c1aa64b2eaf6348422f165e1"},
"linkify": {:hex, :linkify, "0.5.3", "5f8143d8f61f5ff08d3aeeff47ef6509492b4948d8f08007fbf66e4d2246a7f2", [:mix], [], "hexpm", "3ef35a1377d47c25506e07c1c005ea9d38d700699d92ee92825f024434258177"}, "linkify": {:hex, :linkify, "0.5.3", "5f8143d8f61f5ff08d3aeeff47ef6509492b4948d8f08007fbf66e4d2246a7f2", [:mix], [], "hexpm", "3ef35a1377d47c25506e07c1c005ea9d38d700699d92ee92825f024434258177"},
"logger_backends": {:hex, :logger_backends, "1.0.0", "09c4fad6202e08cb0fbd37f328282f16539aca380f512523ce9472b28edc6bdf", [:mix], [], "hexpm", "1faceb3e7ec3ef66a8f5746c5afd020e63996df6fd4eb8cdb789e5665ae6c9ce"},
"majic": {:hex, :majic, "1.0.0", "37e50648db5f5c2ff0c9fb46454d034d11596c03683807b9fb3850676ffdaab3", [:make, :mix], [{:elixir_make, "~> 0.6.1", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "7905858f76650d49695f14ea55cd9aaaee0c6654fa391671d4cf305c275a0a9e"}, "majic": {:hex, :majic, "1.0.0", "37e50648db5f5c2ff0c9fb46454d034d11596c03683807b9fb3850676ffdaab3", [:make, :mix], [{:elixir_make, "~> 0.6.1", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "7905858f76650d49695f14ea55cd9aaaee0c6654fa391671d4cf305c275a0a9e"},
"makeup": {:hex, :makeup, "1.0.5", "d5a830bc42c9800ce07dd97fa94669dfb93d3bf5fcf6ea7a0c67b2e0e4a7f26c", [:mix], [{:nimble_parsec, "~> 0.5 or ~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "cfa158c02d3f5c0c665d0af11512fed3fba0144cf1aadee0f2ce17747fba2ca9"}, "makeup": {:hex, :makeup, "1.0.5", "d5a830bc42c9800ce07dd97fa94669dfb93d3bf5fcf6ea7a0c67b2e0e4a7f26c", [:mix], [{:nimble_parsec, "~> 0.5 or ~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "cfa158c02d3f5c0c665d0af11512fed3fba0144cf1aadee0f2ce17747fba2ca9"},
"makeup_elixir": {:hex, :makeup_elixir, "0.14.1", "4f0e96847c63c17841d42c08107405a005a2680eb9c7ccadfd757bd31dabccfb", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "f2438b1a80eaec9ede832b5c41cd4f373b38fd7aa33e3b22d9db79e640cbde11"}, "makeup_elixir": {:hex, :makeup_elixir, "0.14.1", "4f0e96847c63c17841d42c08107405a005a2680eb9c7ccadfd757bd31dabccfb", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "f2438b1a80eaec9ede832b5c41cd4f373b38fd7aa33e3b22d9db79e640cbde11"},

View file

@ -7,7 +7,10 @@ defmodule Pleroma.Repo.Migrations.CreateUserTrigramIndex do
def change do def change do
create_if_not_exists( create_if_not_exists(
index(:users, ["(nickname || name) gist_trgm_ops"], name: :users_trigram_index, using: :gist) index(:users, ["(nickname || name) gist_trgm_ops"],
name: :users_trigram_index,
using: :gist
)
) )
end end
end end

View file

@ -20,7 +20,10 @@ def down do
drop_if_exists(index(:users, [], name: :users_trigram_index)) drop_if_exists(index(:users, [], name: :users_trigram_index))
create_if_not_exists( create_if_not_exists(
index(:users, ["(nickname || name) gist_trgm_ops"], name: :users_trigram_index, using: :gist) index(:users, ["(nickname || name) gist_trgm_ops"],
name: :users_trigram_index,
using: :gist
)
) )
end end
end end

View file

@ -7,7 +7,10 @@ defmodule Pleroma.Repo.Migrations.AddTagIndexToObjects do
def change do def change do
drop_if_exists( drop_if_exists(
index(:activities, ["(data #> '{\"object\",\"tag\"}')"], using: :gin, name: :activities_tags) index(:activities, ["(data #> '{\"object\",\"tag\"}')"],
using: :gin,
name: :activities_tags
)
) )
create_if_not_exists(index(:objects, ["(data->'tag')"], using: :gin, name: :objects_tags)) create_if_not_exists(index(:objects, ["(data->'tag')"], using: :gin, name: :objects_tags))

View file

@ -9,7 +9,9 @@ defmodule Pleroma.Repo.Migrations.RemoteipPlugRename do
def up do def up do
config = config =
from(c in Pleroma.ConfigDB, where: c.group == ^:pleroma and c.key == ^Pleroma.Plugs.RemoteIp) from(c in Pleroma.ConfigDB,
where: c.group == ^:pleroma and c.key == ^Pleroma.Plugs.RemoteIp
)
|> Pleroma.Repo.one() |> Pleroma.Repo.one()
if config do if config do

View file

@ -3,7 +3,9 @@ defmodule Pleroma.Repo.Migrations.AddActivityAssignedAccountIndex do
def change do def change do
create_if_not_exists( create_if_not_exists(
index(:activities, ["(data->>'assigned_account')"], name: :activities_assigned_account_index) index(:activities, ["(data->>'assigned_account')"],
name: :activities_assigned_account_index
)
) )
end end
end end

View file

@ -0,0 +1,7 @@
defmodule Pleroma.Repo.Migrations.DeprecateConfigDBLogger do
use Ecto.Migration
def change do
execute("DELETE FROM config WHERE config.group = ':logger'")
end
end

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show more