Merge remote-tracking branch 'origin/develop' into fork
Signed-off-by: marcin mikołajczak <git@mkljczk.pl>
This commit is contained in:
commit
5d1cb904ac
245 changed files with 783 additions and 625 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -61,3 +61,6 @@ pleroma.iml
|
|||
*~
|
||||
*#
|
||||
*.swp
|
||||
|
||||
archive-*
|
||||
.gitlab-ci-local
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp-24
|
||||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp-25
|
||||
|
||||
variables: &global_variables
|
||||
# Only used for the release
|
||||
|
@ -18,9 +18,7 @@ workflow:
|
|||
- if: $CI_COMMIT_BRANCH
|
||||
|
||||
cache: &global_cache_policy
|
||||
key:
|
||||
files:
|
||||
- mix.lock
|
||||
key: $CI_JOB_IMAGE-$CI_COMMIT_SHORT_SHA
|
||||
paths:
|
||||
- deps
|
||||
- _build
|
||||
|
@ -72,7 +70,7 @@ check-changelog:
|
|||
tags:
|
||||
- amd64
|
||||
|
||||
build-1.13.4:
|
||||
build-1.13.4-otp-25:
|
||||
extends:
|
||||
- .build_changes_policy
|
||||
- .using-ci-base
|
||||
|
@ -80,13 +78,12 @@ build-1.13.4:
|
|||
script:
|
||||
- mix compile --force
|
||||
|
||||
build-1.15.7-otp-25:
|
||||
build-1.15.8-otp-26:
|
||||
extends:
|
||||
- .build_changes_policy
|
||||
- .using-ci-base
|
||||
stage: build
|
||||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25
|
||||
allow_failure: true
|
||||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15.8-otp-26
|
||||
script:
|
||||
- mix compile --force
|
||||
|
||||
|
@ -121,7 +118,7 @@ benchmark:
|
|||
- mix ecto.migrate
|
||||
- mix pleroma.load_testing
|
||||
|
||||
unit-testing-1.12.3:
|
||||
unit-testing-1.13.4-otp-25:
|
||||
extends:
|
||||
- .build_changes_policy
|
||||
- .using-ci-base
|
||||
|
@ -136,7 +133,7 @@ unit-testing-1.12.3:
|
|||
script: &testing_script
|
||||
- mix ecto.create
|
||||
- mix ecto.migrate
|
||||
- mix test --cover --preload-modules
|
||||
- mix pleroma.test_runner --cover --preload-modules
|
||||
coverage: '/^Line total: ([^ ]*%)$/'
|
||||
artifacts:
|
||||
reports:
|
||||
|
@ -144,34 +141,19 @@ unit-testing-1.12.3:
|
|||
coverage_format: cobertura
|
||||
path: coverage.xml
|
||||
|
||||
unit-testing-1.15.7-otp-25:
|
||||
unit-testing-1.15.8-otp-26:
|
||||
extends:
|
||||
- .build_changes_policy
|
||||
- .using-ci-base
|
||||
stage: test
|
||||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25
|
||||
allow_failure: true
|
||||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15.8-otp-26
|
||||
cache: *testing_cache_policy
|
||||
services: *testing_services
|
||||
script: *testing_script
|
||||
|
||||
unit-testing-1.12-erratic:
|
||||
extends:
|
||||
- .build_changes_policy
|
||||
- .using-ci-base
|
||||
stage: test
|
||||
retry: 2
|
||||
allow_failure: true
|
||||
cache: *testing_cache_policy
|
||||
services: *testing_services
|
||||
script:
|
||||
- mix ecto.create
|
||||
- mix ecto.migrate
|
||||
- mix test --only=erratic
|
||||
|
||||
formatting-1.13:
|
||||
formatting-1.15:
|
||||
extends: .build_changes_policy
|
||||
image: &formatting_elixir elixir:1.13-alpine
|
||||
image: &formatting_elixir elixir:1.15-alpine
|
||||
stage: lint
|
||||
cache: *testing_cache_policy
|
||||
before_script: ¤t_bfr_script
|
||||
|
|
1
changelog.d/3280-fix-emoji-ids.fix
Normal file
1
changelog.d/3280-fix-emoji-ids.fix
Normal file
|
@ -0,0 +1 @@
|
|||
Fix Emoji object IDs not always being valid
|
1
changelog.d/adminfe-logger.change
Normal file
1
changelog.d/adminfe-logger.change
Normal file
|
@ -0,0 +1 @@
|
|||
Elixir Logger configuration is now longer permitted through AdminFE and ConfigDB
|
0
changelog.d/ci-cache.skip
Normal file
0
changelog.d/ci-cache.skip
Normal file
0
changelog.d/ci-erratic.skip
Normal file
0
changelog.d/ci-erratic.skip
Normal file
0
changelog.d/ci-otp-update.skip
Normal file
0
changelog.d/ci-otp-update.skip
Normal file
0
changelog.d/debug-logs.skip
Normal file
0
changelog.d/debug-logs.skip
Normal file
1
changelog.d/docs-netbsd-update.change
Normal file
1
changelog.d/docs-netbsd-update.change
Normal file
|
@ -0,0 +1 @@
|
|||
Update and extend NetBSD installation docs
|
1
changelog.d/elixir-1.15.fix
Normal file
1
changelog.d/elixir-1.15.fix
Normal file
|
@ -0,0 +1 @@
|
|||
Elixir 1.15 compatibility
|
1
changelog.d/gun_pool4.fix
Normal file
1
changelog.d/gun_pool4.fix
Normal file
|
@ -0,0 +1 @@
|
|||
Gun Connection Pool was not retrying to acquire a connection if the pool was full and stale connections were reclaimed
|
1
changelog.d/rich_media_backfill.change
Normal file
1
changelog.d/rich_media_backfill.change
Normal file
|
@ -0,0 +1 @@
|
|||
Rich Media backfilling is now an Oban job
|
0
changelog.d/spex-error-log.skip
Normal file
0
changelog.d/spex-error-log.skip
Normal file
1
changelog.d/stream-end-poll.fix
Normal file
1
changelog.d/stream-end-poll.fix
Normal file
|
@ -0,0 +1 @@
|
|||
End of poll notifications were not streamed over websockets or web push
|
0
changelog.d/user-refresh-rework.skip
Normal file
0
changelog.d/user-refresh-rework.skip
Normal file
1
changelog.d/user-refresh.change
Normal file
1
changelog.d/user-refresh.change
Normal file
|
@ -0,0 +1 @@
|
|||
User profile refreshes are now asynchronous
|
0
changelog.d/web_push_actor_regression.skip
Normal file
0
changelog.d/web_push_actor_regression.skip
Normal file
1
changelog.d/webpush-polls.change
Normal file
1
changelog.d/webpush-polls.change
Normal file
|
@ -0,0 +1 @@
|
|||
Render nice web push notifications for polls
|
|
@ -1,4 +1,4 @@
|
|||
FROM elixir:1.13.4-otp-24
|
||||
FROM elixir:1.13.4-otp-25
|
||||
|
||||
# Single RUN statement, otherwise intermediate images are created
|
||||
# https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#run
|
|
@ -1 +1 @@
|
|||
docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp-24 --push .
|
||||
docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp-25 --push .
|
|
@ -1 +0,0 @@
|
|||
docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25 --push .
|
|
@ -1,4 +1,4 @@
|
|||
FROM elixir:1.15.7-otp-25
|
||||
FROM elixir:1.15.8-otp-26
|
||||
|
||||
# Single RUN statement, otherwise intermediate images are created
|
||||
# https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#run
|
1
ci/elixir-1.15.8-otp-26/build_and_push.sh
Executable file
1
ci/elixir-1.15.8-otp-26/build_and_push.sh
Executable file
|
@ -0,0 +1 @@
|
|||
docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15.8-otp-26 --push .
|
|
@ -132,6 +132,8 @@
|
|||
]
|
||||
|
||||
# Configures Elixir's Logger
|
||||
config :logger, backends: [:console]
|
||||
|
||||
config :logger, :console,
|
||||
level: :debug,
|
||||
format: "\n$time $metadata[$level] $message\n",
|
||||
|
|
|
@ -1260,79 +1260,6 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
group: :logger,
|
||||
type: :group,
|
||||
description: "Logger-related settings",
|
||||
children: [
|
||||
%{
|
||||
key: :backends,
|
||||
type: [:atom, :tuple, :module],
|
||||
description:
|
||||
"Where logs will be sent, :console - send logs to stdout, { ExSyslogger, :ex_syslogger } - to syslog, Quack.Logger - to Slack.",
|
||||
suggestions: [:console, {ExSyslogger, :ex_syslogger}]
|
||||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
group: :logger,
|
||||
type: :group,
|
||||
key: :ex_syslogger,
|
||||
label: "ExSyslogger",
|
||||
description: "ExSyslogger-related settings",
|
||||
children: [
|
||||
%{
|
||||
key: :level,
|
||||
type: {:dropdown, :atom},
|
||||
description: "Log level",
|
||||
suggestions: [:debug, :info, :warning, :error]
|
||||
},
|
||||
%{
|
||||
key: :ident,
|
||||
type: :string,
|
||||
description:
|
||||
"A string that's prepended to every message, and is typically set to the app name",
|
||||
suggestions: ["pleroma"]
|
||||
},
|
||||
%{
|
||||
key: :format,
|
||||
type: :string,
|
||||
description: "Default: \"$date $time [$level] $levelpad$node $metadata $message\"",
|
||||
suggestions: ["$metadata[$level] $message"]
|
||||
},
|
||||
%{
|
||||
key: :metadata,
|
||||
type: {:list, :atom},
|
||||
suggestions: [:request_id]
|
||||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
group: :logger,
|
||||
type: :group,
|
||||
key: :console,
|
||||
label: "Console Logger",
|
||||
description: "Console logger settings",
|
||||
children: [
|
||||
%{
|
||||
key: :level,
|
||||
type: {:dropdown, :atom},
|
||||
description: "Log level",
|
||||
suggestions: [:debug, :info, :warning, :error]
|
||||
},
|
||||
%{
|
||||
key: :format,
|
||||
type: :string,
|
||||
description: "Default: \"$date $time [$level] $levelpad$node $metadata $message\"",
|
||||
suggestions: ["$metadata[$level] $message"]
|
||||
},
|
||||
%{
|
||||
key: :metadata,
|
||||
type: {:list, :atom},
|
||||
suggestions: [:request_id]
|
||||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
group: :pleroma,
|
||||
key: :frontend_configurations,
|
||||
|
|
|
@ -36,7 +36,7 @@
|
|||
# different ports.
|
||||
|
||||
# Do not include timestamps in development logs
|
||||
config :logger, :console, format: "$metadata[$level] $message\n"
|
||||
config :logger, Logger.Backends.Console, format: "$metadata[$level] $message\n"
|
||||
|
||||
# Set a higher stacktrace during development. Avoid configuring such
|
||||
# in production as building large stacktraces may be expensive.
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
config :phoenix, serve_endpoints: true
|
||||
|
||||
# Do not print debug messages in production
|
||||
config :logger, Logger.Backends.Console, level: :info
|
||||
config :logger, :console, level: :info
|
||||
config :logger, :ex_syslogger, level: :info
|
||||
|
||||
|
|
|
@ -53,7 +53,8 @@
|
|||
hostname: System.get_env("DB_HOST") || "localhost",
|
||||
port: System.get_env("DB_PORT") || "5432",
|
||||
pool: Ecto.Adapters.SQL.Sandbox,
|
||||
pool_size: System.schedulers_online() * 2
|
||||
pool_size: System.schedulers_online() * 2,
|
||||
log: false
|
||||
|
||||
config :pleroma, :dangerzone, override_repo_pool_size: true
|
||||
|
||||
|
@ -189,6 +190,8 @@
|
|||
streamer_registry: false,
|
||||
test_http_pools: true
|
||||
|
||||
config :pleroma, Pleroma.Web.Streaming, sync_streaming: true
|
||||
|
||||
config :pleroma, Pleroma.Uploaders.Uploader, timeout: 1_000
|
||||
|
||||
config :pleroma, Pleroma.Emoji.Loader, test_emoji: true
|
||||
|
|
|
@ -853,7 +853,7 @@ config :logger,
|
|||
backends: [{ExSyslogger, :ex_syslogger}]
|
||||
|
||||
config :logger, :ex_syslogger,
|
||||
level: :warn
|
||||
level: :warning
|
||||
```
|
||||
|
||||
Another example, keeping console output and adding the pid to syslog output:
|
||||
|
@ -862,7 +862,7 @@ config :logger,
|
|||
backends: [:console, {ExSyslogger, :ex_syslogger}]
|
||||
|
||||
config :logger, :ex_syslogger,
|
||||
level: :warn,
|
||||
level: :warning,
|
||||
option: [:pid, :ndelay]
|
||||
```
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
## Required dependencies
|
||||
|
||||
* PostgreSQL >=11.0
|
||||
* Elixir >=1.13.0 <1.15
|
||||
* Elixir >=1.13.0 <1.17
|
||||
* Erlang OTP >=22.2.0 (supported: <27)
|
||||
* git
|
||||
* file / libmagic
|
||||
|
|
|
@ -2,14 +2,41 @@
|
|||
|
||||
{! backend/installation/generic_dependencies.include !}
|
||||
|
||||
## Installing software used in this guide
|
||||
# Installation options
|
||||
|
||||
Currently there are two options available for NetBSD: manual installation (from source) or using experimental package from [pkgsrc-wip](https://github.com/NetBSD/pkgsrc-wip/tree/master/pleroma).
|
||||
|
||||
WIP package can be installed via pkgsrc and can be crosscompiled for easier binary distribution. Source installation most probably will be restricted to a single machine.
|
||||
|
||||
## pkgsrc installation
|
||||
|
||||
WIP package creates Mix.Release (similar to how Docker images are built) but doesn't bundle Erlang runtime, listing it as a dependency instead. This allows for easier and more modular installations, especially on weaker machines. Currently this method also does not support all features of `pleroma_ctl` command (like changing installation type or managing frontends) as NetBSD is not yet a supported binary flavour of Pleroma's CI.
|
||||
|
||||
In any case, you can install it the same way as any other `pkgsrc-wip` package:
|
||||
|
||||
```
|
||||
cd /usr/pkgsrc
|
||||
git clone --depth 1 git://wip.pkgsrc.org/pkgsrc-wip.git wip
|
||||
cp -rf wip/pleroma www
|
||||
cp -rf wip/libvips graphics
|
||||
cd /usr/pkgsrc/www/pleroma
|
||||
bmake && bmake install
|
||||
```
|
||||
|
||||
Use `bmake package` to create a binary package. This can come especially handy if you're targeting embedded or low-power systems and are crosscompiling on a more powerful machine.
|
||||
|
||||
> Note: Elixir has [endianness bug](https://github.com/elixir-lang/elixir/issues/2785) which requires it to be compiled on a machine with the same endianness. In other words, package crosscompiled on amd64 (little endian) won't work on powerpc or sparc machines (big endian). While _in theory™_ nothing catastrophic should happen, one can see that for example regexes won't work properly. Some distributions just strip this warning away, so it doesn't bother the users... anyway, you've been warned.
|
||||
|
||||
## Source installation
|
||||
|
||||
pkgin should have been installed by the NetBSD installer if you selected
|
||||
the right options. If it isn't installed, install it using pkg_add.
|
||||
the right options. If it isn't installed, install it using `pkg_add`.
|
||||
|
||||
Note that `postgresql11-contrib` is needed for the Postgres extensions
|
||||
Pleroma uses.
|
||||
|
||||
> Note: you can use modern versions of PostgreSQL. In this case, just use `postgresql16-contrib` and so on.
|
||||
|
||||
The `mksh` shell is needed to run the Elixir `mix` script.
|
||||
|
||||
`# pkgin install acmesh elixir git-base git-docs mksh nginx postgresql11-server postgresql11-client postgresql11-contrib sudo ffmpeg4 ImageMagick`
|
||||
|
@ -29,29 +56,6 @@ shells/mksh
|
|||
www/nginx
|
||||
```
|
||||
|
||||
Copy the rc.d scripts to the right directory:
|
||||
|
||||
```
|
||||
# cp /usr/pkg/share/examples/rc.d/nginx /usr/pkg/share/examples/rc.d/pgsql /etc/rc.d
|
||||
```
|
||||
|
||||
Add nginx and Postgres to `/etc/rc.conf`:
|
||||
|
||||
```
|
||||
nginx=YES
|
||||
pgsql=YES
|
||||
```
|
||||
|
||||
## Configuring postgres
|
||||
|
||||
First, run `# /etc/rc.d/pgsql start`. Then, `$ sudo -Hu pgsql -g pgsql createdb`.
|
||||
|
||||
### Install media / graphics packages (optional, see [`docs/installation/optional/media_graphics_packages.md`](../installation/optional/media_graphics_packages.md))
|
||||
|
||||
`# pkgin install ImageMagick ffmpeg4 p5-Image-ExifTool`
|
||||
|
||||
## Configuring Pleroma
|
||||
|
||||
Create a user for Pleroma:
|
||||
|
||||
```
|
||||
|
@ -68,41 +72,98 @@ $ cd /home/pleroma
|
|||
$ git clone -b stable https://git.pleroma.social/pleroma/pleroma.git
|
||||
```
|
||||
|
||||
Configure Pleroma. Note that you need a domain name at this point:
|
||||
Get deps and compile:
|
||||
|
||||
```
|
||||
$ cd /home/pleroma/pleroma
|
||||
$ export MIX_ENV=prod
|
||||
$ mix deps.get
|
||||
$ MIX_ENV=prod mix pleroma.instance gen # You will be asked a few questions here.
|
||||
$ mix compile
|
||||
```
|
||||
|
||||
Since Postgres is configured, we can now initialize the database. There should
|
||||
now be a file in `config/setup_db.psql` that makes this easier. Edit it, and
|
||||
*change the password* to a password of your choice. Make sure it is secure, since
|
||||
## Install media / graphics packages (optional, see [`docs/installation/optional/media_graphics_packages.md`](../installation/optional/media_graphics_packages.md))
|
||||
|
||||
`# pkgin install ImageMagick ffmpeg4 p5-Image-ExifTool`
|
||||
|
||||
or via pkgsrc:
|
||||
|
||||
```
|
||||
graphics/p5-Image-ExifTool
|
||||
graphics/ImageMagick
|
||||
multimedia/ffmpeg4
|
||||
```
|
||||
|
||||
# Configuration
|
||||
|
||||
## Understanding $PREFIX
|
||||
|
||||
From now on, you may encounter `$PREFIX` variable in the paths. This variable indicates your current local pkgsrc prefix. Usually it's `/usr/pkg` unless you configured it otherwise. Translating to pkgsrc's lingo, it's called `LOCALBASE`, which essentially means the same this. You may want to set it up for your local shell session (this uses `mksh` which should already be installed as one of the required dependencies):
|
||||
|
||||
```
|
||||
$ export PREFIX=$(pkg_info -Q LOCALBASE mksh)
|
||||
$ echo $PREFIX
|
||||
/usr/pkg
|
||||
```
|
||||
|
||||
## Setting up your instance
|
||||
|
||||
Now, you need to configure your instance. During this initial configuration, you will be asked some questions about your server. You will need a domain name at this point; it doesn't have to be deployed, but changing it later will be very cumbersome.
|
||||
|
||||
If you've installed via pkgsrc, `pleroma_ctl` should already be in your `PATH`; if you've installed from source, it's located at `/home/pleroma/pleroma/release/bin/pleroma_ctl`.
|
||||
|
||||
```
|
||||
$ su -l pleroma
|
||||
$ pleroma_ctl instance gen --output $PREFIX/etc/pleroma/config.exs --output-psql /tmp/setup_db.psql
|
||||
```
|
||||
|
||||
During installation, you will be asked about static and upload directories. Don't forget to create them and update permissions:
|
||||
|
||||
```
|
||||
mkdir -p /var/lib/pleroma/uploads
|
||||
chown -R pleroma:pleroma /var/lib/pleroma
|
||||
```
|
||||
|
||||
## Setting up the database
|
||||
|
||||
First, run `# /etc/rc.d/pgsql start`. Then, `$ sudo -Hu pgsql -g pgsql createdb`.
|
||||
|
||||
We can now initialize the database. You'll need to edit generated SQL file from the previous step. It's located at `/tmp/setup_db.psql`.
|
||||
|
||||
Edit this file, and *change the password* to a password of your choice. Make sure it is secure, since
|
||||
it'll be protecting your database. Now initialize the database:
|
||||
|
||||
```
|
||||
$ sudo -Hu pgsql -g pgsql psql -f config/setup_db.psql
|
||||
$ sudo -Hu pgsql -g pgsql psql -f /tmp/setup_db.psql
|
||||
```
|
||||
|
||||
Postgres allows connections from all users without a password by default. To
|
||||
fix this, edit `/usr/pkg/pgsql/data/pg_hba.conf`. Change every `trust` to
|
||||
fix this, edit `$PREFIX/pgsql/data/pg_hba.conf`. Change every `trust` to
|
||||
`password`.
|
||||
|
||||
Once this is done, restart Postgres with `# /etc/rc.d/pgsql restart`.
|
||||
|
||||
Run the database migrations.
|
||||
|
||||
### pkgsrc installation
|
||||
|
||||
```
|
||||
pleroma_ctl migrate
|
||||
```
|
||||
|
||||
### Source installation
|
||||
|
||||
You will need to do this whenever you update with `git pull`:
|
||||
|
||||
```
|
||||
$ cd /home/pleroma/pleroma
|
||||
$ MIX_ENV=prod mix ecto.migrate
|
||||
```
|
||||
|
||||
## Configuring nginx
|
||||
|
||||
Install the example configuration file
|
||||
`/home/pleroma/pleroma/installation/pleroma.nginx` to
|
||||
`/usr/pkg/etc/nginx.conf`.
|
||||
(`$PREFIX/share/examples/pleroma/pleroma.nginx` or `/home/pleroma/pleroma/installation/pleroma.nginx`) to
|
||||
`$PREFIX/etc/nginx.conf`.
|
||||
|
||||
Note that it will need to be wrapped in a `http {}` block. You should add
|
||||
settings for the nginx daemon outside of the http block, for example:
|
||||
|
@ -176,27 +237,45 @@ Let's add auto-renewal to `/etc/daily.local`
|
|||
--stateless
|
||||
```
|
||||
|
||||
## Creating a startup script for Pleroma
|
||||
## Autostart
|
||||
|
||||
Copy the startup script to the correct location and make sure it's executable:
|
||||
For properly functioning instance, you will need pleroma (backend service), nginx (reverse proxy) and postgresql (database) services running. There's no requirement for them to reside on the same machine, but you have to provide autostart for each of them.
|
||||
|
||||
### nginx
|
||||
```
|
||||
# cp $PREFIX/share/examples/rc.d/nginx /etc/rc.d
|
||||
# echo "nginx=YES" >> /etc/rc.conf
|
||||
```
|
||||
|
||||
### postgresql
|
||||
|
||||
```
|
||||
# cp $PREFIX/share/examples/rc.d/pgsql /etc/rc.d
|
||||
# echo "pgsql=YES" >> /etc/rc.conf
|
||||
```
|
||||
|
||||
### pleroma
|
||||
|
||||
First, copy the script (pkgsrc variant)
|
||||
```
|
||||
# cp $PREFIX/share/examples/pleroma/pleroma.rc /etc/rc.d/pleroma
|
||||
```
|
||||
|
||||
or source variant
|
||||
```
|
||||
# cp /home/pleroma/pleroma/installation/netbsd/rc.d/pleroma /etc/rc.d/pleroma
|
||||
# chmod +x /etc/rc.d/pleroma
|
||||
```
|
||||
|
||||
Add the following to `/etc/rc.conf`:
|
||||
Then, add the following to `/etc/rc.conf`:
|
||||
|
||||
```
|
||||
pleroma=YES
|
||||
pleroma_home="/home/pleroma"
|
||||
pleroma_user="pleroma"
|
||||
```
|
||||
|
||||
Run `# /etc/rc.d/pleroma start` to start Pleroma.
|
||||
|
||||
## Conclusion
|
||||
|
||||
Run `# /etc/rc.d/pleroma start` to start Pleroma.
|
||||
Restart nginx with `# /etc/rc.d/nginx restart` and you should be up and running.
|
||||
|
||||
Make sure your time is in sync, or other instances will receive your posts with
|
||||
|
|
|
@ -1,11 +1,14 @@
|
|||
#!/bin/sh
|
||||
# PROVIDE: pleroma
|
||||
# REQUIRE: DAEMON pgsql
|
||||
# REQUIRE: DAEMON pgsql nginx
|
||||
|
||||
if [ -f /etc/rc.subr ]; then
|
||||
. /etc/rc.subr
|
||||
fi
|
||||
|
||||
pleroma_home="/home/pleroma"
|
||||
pleroma_user="pleroma"
|
||||
|
||||
name="pleroma"
|
||||
rcvar=${name}
|
||||
command="/usr/pkg/bin/elixir"
|
||||
|
@ -19,10 +22,10 @@ pleroma_env="HOME=${pleroma_home} MIX_ENV=prod"
|
|||
check_pidfile()
|
||||
{
|
||||
pid=$(pgrep -U "${pleroma_user}" /bin/beam.smp$)
|
||||
echo -n "${pid}"
|
||||
printf '%s' "${pid}"
|
||||
}
|
||||
|
||||
if [ -f /etc/rc.subr -a -d /etc/rc.d -a -f /etc/rc.d/DAEMON ]; then
|
||||
if [ -f /etc/rc.subr ] && [ -d /etc/rc.d ] && [ -f /etc/rc.d/DAEMON ]; then
|
||||
# newer NetBSD
|
||||
load_rc_config ${name}
|
||||
run_rc_command "$1"
|
||||
|
@ -39,7 +42,7 @@ else
|
|||
stop)
|
||||
echo "Stopping ${name}."
|
||||
check_pidfile
|
||||
! [ -n ${pid} ] && kill ${pid}
|
||||
! [ -n "${pid}" ] && kill "${pid}"
|
||||
;;
|
||||
|
||||
restart)
|
||||
|
|
|
@ -14,7 +14,8 @@ defmodule Mix.Pleroma do
|
|||
:swoosh,
|
||||
:timex,
|
||||
:fast_html,
|
||||
:oban
|
||||
:oban,
|
||||
:logger_backends
|
||||
]
|
||||
@cachex_children ["object", "user", "scrubber", "web_resp"]
|
||||
@doc "Common functions to be reused in mix tasks"
|
||||
|
|
|
@ -351,7 +351,7 @@ def run(["set_text_search_config", tsconfig]) do
|
|||
)
|
||||
end
|
||||
|
||||
shell_info('Done.')
|
||||
shell_info(~c"Done.")
|
||||
end
|
||||
end
|
||||
|
||||
|
|
25
lib/mix/tasks/pleroma/test_runner.ex
Normal file
25
lib/mix/tasks/pleroma/test_runner.ex
Normal file
|
@ -0,0 +1,25 @@
|
|||
defmodule Mix.Tasks.Pleroma.TestRunner do
|
||||
@shortdoc "Retries tests once if they fail"
|
||||
|
||||
use Mix.Task
|
||||
|
||||
def run(args \\ []) do
|
||||
case System.cmd("mix", ["test"] ++ args, into: IO.stream(:stdio, :line)) do
|
||||
{_, 0} ->
|
||||
:ok
|
||||
|
||||
_ ->
|
||||
retry(args)
|
||||
end
|
||||
end
|
||||
|
||||
def retry(args) do
|
||||
case System.cmd("mix", ["test", "--failed"] ++ args, into: IO.stream(:stdio, :line)) do
|
||||
{_, 0} ->
|
||||
:ok
|
||||
|
||||
_ ->
|
||||
exit(1)
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,5 +1,5 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Config.TransferTask do
|
||||
|
@ -43,14 +43,9 @@ def load_and_update_env(deleted_settings \\ [], restart_pleroma? \\ true) do
|
|||
with {_, true} <- {:configurable, Config.get(:configurable_from_database)} do
|
||||
# We need to restart applications for loaded settings take effect
|
||||
|
||||
{logger, other} =
|
||||
settings =
|
||||
(Repo.all(ConfigDB) ++ deleted_settings)
|
||||
|> Enum.map(&merge_with_default/1)
|
||||
|> Enum.split_with(fn {group, _, _, _} -> group in [:logger] end)
|
||||
|
||||
logger
|
||||
|> Enum.sort()
|
||||
|> Enum.each(&configure/1)
|
||||
|
||||
started_applications = Application.started_applications()
|
||||
|
||||
|
@ -63,7 +58,7 @@ def load_and_update_env(deleted_settings \\ [], restart_pleroma? \\ true) do
|
|||
[:pleroma | reject]
|
||||
end
|
||||
|
||||
other
|
||||
settings
|
||||
|> Enum.map(&update/1)
|
||||
|> Enum.uniq()
|
||||
|> Enum.reject(&(&1 in reject))
|
||||
|
@ -101,38 +96,6 @@ defp merge_with_default(%{group: group, key: key, value: value} = setting) do
|
|||
{group, key, value, merged}
|
||||
end
|
||||
|
||||
# change logger configuration in runtime, without restart
|
||||
defp configure({_, :backends, _, merged}) do
|
||||
# removing current backends
|
||||
Enum.each(Application.get_env(:logger, :backends), &Logger.remove_backend/1)
|
||||
|
||||
Enum.each(merged, &Logger.add_backend/1)
|
||||
|
||||
:ok = update_env(:logger, :backends, merged)
|
||||
end
|
||||
|
||||
defp configure({_, key, _, merged}) when key in [:console, :ex_syslogger] do
|
||||
merged =
|
||||
if key == :console do
|
||||
put_in(merged[:format], merged[:format] <> "\n")
|
||||
else
|
||||
merged
|
||||
end
|
||||
|
||||
backend =
|
||||
if key == :ex_syslogger,
|
||||
do: {ExSyslogger, :ex_syslogger},
|
||||
else: key
|
||||
|
||||
Logger.configure_backend(backend, merged)
|
||||
:ok = update_env(:logger, key, merged)
|
||||
end
|
||||
|
||||
defp configure({_, key, _, merged}) do
|
||||
Logger.configure([{key, merged}])
|
||||
:ok = update_env(:logger, key, merged)
|
||||
end
|
||||
|
||||
defp update({group, key, value, merged}) do
|
||||
try do
|
||||
:ok = update_env(group, key, merged)
|
||||
|
|
|
@ -165,8 +165,7 @@ defp only_full_update?(%ConfigDB{group: group, key: key}) do
|
|||
{:pleroma, :ecto_repos},
|
||||
{:mime, :types},
|
||||
{:cors_plug, [:max_age, :methods, :expose, :headers]},
|
||||
{:swarm, :node_blacklist},
|
||||
{:logger, :backends}
|
||||
{:swarm, :node_blacklist}
|
||||
]
|
||||
|
||||
Enum.any?(full_key_update, fn
|
||||
|
@ -385,7 +384,12 @@ defp find_valid_delimiter([delimiter | others], pattern, regex_delimiter) do
|
|||
|
||||
@spec module_name?(String.t()) :: boolean()
|
||||
def module_name?(string) do
|
||||
Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Ueberauth|Swoosh)\./, string) or
|
||||
string in ["Oban", "Ueberauth", "ExSyslogger", "ConcurrentLimiter"]
|
||||
if String.contains?(string, ".") do
|
||||
[name | _] = String.split(string, ".", parts: 2)
|
||||
|
||||
name in ~w[Pleroma Phoenix Tesla Ueberauth Swoosh Logger LoggerBackends]
|
||||
else
|
||||
string in ~w[Oban Ueberauth ExSyslogger ConcurrentLimiter]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -418,10 +418,10 @@ defp downloadable?(pack) do
|
|||
end
|
||||
|
||||
defp create_archive_and_cache(pack, hash) do
|
||||
files = ['pack.json' | Enum.map(pack.files, fn {_, file} -> to_charlist(file) end)]
|
||||
files = [~c"pack.json" | Enum.map(pack.files, fn {_, file} -> to_charlist(file) end)]
|
||||
|
||||
{:ok, {_, result}} =
|
||||
:zip.zip('#{pack.name}.zip', files, [:memory, cwd: to_charlist(pack.path)])
|
||||
:zip.zip(~c"#{pack.name}.zip", files, [:memory, cwd: to_charlist(pack.path)])
|
||||
|
||||
ttl_per_file = Pleroma.Config.get!([:emoji, :shared_pack_cache_seconds_per_file])
|
||||
overall_ttl = :timer.seconds(ttl_per_file * Enum.count(files))
|
||||
|
@ -591,7 +591,7 @@ defp unzip(archive, pack_info, remote_pack, local_pack) do
|
|||
with :ok <- File.mkdir_p!(local_pack.path) do
|
||||
files = Enum.map(remote_pack["files"], fn {_, path} -> to_charlist(path) end)
|
||||
# Fallback cannot contain a pack.json file
|
||||
files = if pack_info[:fallback], do: files, else: ['pack.json' | files]
|
||||
files = if pack_info[:fallback], do: files, else: [~c"pack.json" | files]
|
||||
|
||||
:zip.unzip(archive, cwd: to_charlist(local_pack.path), file_list: files)
|
||||
end
|
||||
|
|
|
@ -9,7 +9,7 @@ defp registry, do: Pleroma.Gun.ConnectionPool
|
|||
|
||||
def start_monitor do
|
||||
pid =
|
||||
case GenServer.start_link(__MODULE__, [], name: {:via, Registry, {registry(), "reclaimer"}}) do
|
||||
case GenServer.start(__MODULE__, [], name: {:via, Registry, {registry(), "reclaimer"}}) do
|
||||
{:ok, pid} ->
|
||||
pid
|
||||
|
||||
|
|
|
@ -5,6 +5,9 @@
|
|||
defmodule Pleroma.Gun.ConnectionPool.WorkerSupervisor do
|
||||
@moduledoc "Supervisor for pool workers. Does not do anything except enforce max connection limit"
|
||||
|
||||
alias Pleroma.Config
|
||||
alias Pleroma.Gun.ConnectionPool.Worker
|
||||
|
||||
use DynamicSupervisor
|
||||
|
||||
def start_link(opts) do
|
||||
|
@ -14,21 +17,28 @@ def start_link(opts) do
|
|||
def init(_opts) do
|
||||
DynamicSupervisor.init(
|
||||
strategy: :one_for_one,
|
||||
max_children: Pleroma.Config.get([:connections_pool, :max_connections])
|
||||
max_children: Config.get([:connections_pool, :max_connections])
|
||||
)
|
||||
end
|
||||
|
||||
def start_worker(opts, last_attempt \\ false) do
|
||||
case DynamicSupervisor.start_child(__MODULE__, {Pleroma.Gun.ConnectionPool.Worker, opts}) do
|
||||
{:error, :max_children} ->
|
||||
funs = [fn -> last_attempt end, fn -> match?(:error, free_pool()) end]
|
||||
def start_worker(opts, last_attempt \\ false)
|
||||
|
||||
if Enum.any?(funs, fn fun -> fun.() end) do
|
||||
:telemetry.execute([:pleroma, :connection_pool, :provision_failure], %{opts: opts})
|
||||
{:error, :pool_full}
|
||||
else
|
||||
start_worker(opts, true)
|
||||
end
|
||||
def start_worker(opts, true) do
|
||||
case DynamicSupervisor.start_child(__MODULE__, {Worker, opts}) do
|
||||
{:error, :max_children} ->
|
||||
:telemetry.execute([:pleroma, :connection_pool, :provision_failure], %{opts: opts})
|
||||
{:error, :pool_full}
|
||||
|
||||
res ->
|
||||
res
|
||||
end
|
||||
end
|
||||
|
||||
def start_worker(opts, false) do
|
||||
case DynamicSupervisor.start_child(__MODULE__, {Worker, opts}) do
|
||||
{:error, :max_children} ->
|
||||
free_pool()
|
||||
start_worker(opts, true)
|
||||
|
||||
res ->
|
||||
res
|
||||
|
|
|
@ -865,8 +865,9 @@ def mark_context_as_read(%User{id: id}, context) do
|
|||
|> Repo.update_all(set: [seen: true])
|
||||
end
|
||||
|
||||
@spec send(list(Notification.t())) :: :ok
|
||||
def send(notifications) do
|
||||
@doc "Streams a list of notifications over websockets and web push"
|
||||
@spec stream(list(Notification.t())) :: :ok
|
||||
def stream(notifications) do
|
||||
Enum.each(notifications, fn notification ->
|
||||
Streamer.stream(["user", "user:notification"], notification)
|
||||
Push.send(notification)
|
||||
|
|
|
@ -135,7 +135,10 @@ def make_update_object_data(original_data, new_data, date) do
|
|||
else
|
||||
%{updated_object: updated_data} =
|
||||
updated_data
|
||||
|> maybe_update_history(original_data, updated: updated, use_history_in_new_object?: false)
|
||||
|> maybe_update_history(original_data,
|
||||
updated: updated,
|
||||
use_history_in_new_object?: false
|
||||
)
|
||||
|
||||
updated_data
|
||||
|> Map.put("updated", date)
|
||||
|
|
|
@ -249,14 +249,16 @@ defp url_from_spec(%__MODULE__{name: name}, base_url, {:file, path}) do
|
|||
|
||||
defp url_from_spec(_upload, _base_url, {:url, url}), do: url
|
||||
|
||||
@spec base_url() :: binary
|
||||
def base_url do
|
||||
uploader = @config_impl.get([Pleroma.Upload, :uploader])
|
||||
upload_base_url = @config_impl.get([Pleroma.Upload, :base_url])
|
||||
upload_fallback_url = Pleroma.Web.Endpoint.url() <> "/media/"
|
||||
upload_base_url = @config_impl.get([Pleroma.Upload, :base_url]) || upload_fallback_url
|
||||
public_endpoint = @config_impl.get([uploader, :public_endpoint])
|
||||
|
||||
case uploader do
|
||||
Pleroma.Uploaders.Local ->
|
||||
upload_base_url || Pleroma.Web.Endpoint.url() <> "/media/"
|
||||
upload_base_url
|
||||
|
||||
Pleroma.Uploaders.S3 ->
|
||||
bucket = @config_impl.get([Pleroma.Uploaders.S3, :bucket])
|
||||
|
@ -268,11 +270,14 @@ def base_url do
|
|||
!is_nil(truncated_namespace) ->
|
||||
truncated_namespace
|
||||
|
||||
!is_nil(namespace) ->
|
||||
!is_nil(namespace) and !is_nil(bucket) ->
|
||||
namespace <> ":" <> bucket
|
||||
|
||||
true ->
|
||||
!is_nil(bucket) ->
|
||||
bucket
|
||||
|
||||
true ->
|
||||
""
|
||||
end
|
||||
|
||||
if public_endpoint do
|
||||
|
@ -285,7 +290,7 @@ def base_url do
|
|||
@config_impl.get([Pleroma.Uploaders.IPFS, :get_gateway_url])
|
||||
|
||||
_ ->
|
||||
public_endpoint || upload_base_url || Pleroma.Web.Endpoint.url() <> "/media/"
|
||||
public_endpoint || upload_base_url
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -40,6 +40,7 @@ defmodule Pleroma.User do
|
|||
alias Pleroma.Web.RelMe
|
||||
alias Pleroma.Webhook.Notify
|
||||
alias Pleroma.Workers.BackgroundWorker
|
||||
alias Pleroma.Workers.UserRefreshWorker
|
||||
|
||||
require Logger
|
||||
require Pleroma.Constants
|
||||
|
@ -2204,20 +2205,20 @@ def html_filter_policy(_), do: Config.get([:markup, :scrub_policy])
|
|||
|
||||
def fetch_by_ap_id(ap_id), do: ActivityPub.make_user_from_ap_id(ap_id)
|
||||
|
||||
@spec get_or_fetch_by_ap_id(String.t()) :: {:ok, User.t()} | {:error, any()}
|
||||
def get_or_fetch_by_ap_id(ap_id) do
|
||||
cached_user = get_cached_by_ap_id(ap_id)
|
||||
with cached_user = %User{} <- get_cached_by_ap_id(ap_id),
|
||||
_ <- maybe_refresh(cached_user) do
|
||||
{:ok, cached_user}
|
||||
else
|
||||
_ -> fetch_by_ap_id(ap_id)
|
||||
end
|
||||
end
|
||||
|
||||
maybe_fetched_user = needs_update?(cached_user) && fetch_by_ap_id(ap_id)
|
||||
|
||||
case {cached_user, maybe_fetched_user} do
|
||||
{_, {:ok, %User{} = user}} ->
|
||||
{:ok, user}
|
||||
|
||||
{%User{} = user, _} ->
|
||||
{:ok, user}
|
||||
|
||||
_ ->
|
||||
{:error, :not_found}
|
||||
defp maybe_refresh(user) do
|
||||
if needs_update?(user) do
|
||||
UserRefreshWorker.new(%{"ap_id" => user.ap_id})
|
||||
|> Oban.insert()
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -198,14 +198,14 @@ defp wait_backup(backup, current_processed, task) do
|
|||
end
|
||||
|
||||
@files [
|
||||
'actor.json',
|
||||
'outbox.json',
|
||||
'likes.json',
|
||||
'bookmarks.json',
|
||||
'followers.json',
|
||||
'following.json',
|
||||
'chats.json',
|
||||
'chat_messages.json'
|
||||
~c"actor.json",
|
||||
~c"outbox.json",
|
||||
~c"likes.json",
|
||||
~c"bookmarks.json",
|
||||
~c"followers.json",
|
||||
~c"following.json",
|
||||
~c"chats.json",
|
||||
~c"chat_messages.json"
|
||||
]
|
||||
@spec export(Pleroma.User.Backup.t(), pid()) :: {:ok, String.t()} | :error
|
||||
def export(%__MODULE__{} = backup, caller_pid) do
|
||||
|
|
|
@ -686,9 +686,9 @@ defp delete_object(object) do
|
|||
with {:ok, _} <- Repo.delete(object), do: :ok
|
||||
end
|
||||
|
||||
defp send_notifications(meta) do
|
||||
defp stream_notifications(meta) do
|
||||
Keyword.get(meta, :notifications, [])
|
||||
|> Notification.send()
|
||||
|> Notification.stream()
|
||||
|
||||
meta
|
||||
end
|
||||
|
@ -719,7 +719,7 @@ defp add_notifications(meta, notifications) do
|
|||
@impl true
|
||||
def handle_after_transaction(meta) do
|
||||
meta
|
||||
|> send_notifications()
|
||||
|> stream_notifications()
|
||||
|> send_streamables()
|
||||
end
|
||||
end
|
||||
|
|
|
@ -927,9 +927,11 @@ def add_emoji_tags(%{"emoji" => emoji} = object) do
|
|||
|
||||
def add_emoji_tags(object), do: object
|
||||
|
||||
defp build_emoji_tag({name, url}) do
|
||||
def build_emoji_tag({name, url}) do
|
||||
url = URI.encode(url)
|
||||
|
||||
%{
|
||||
"icon" => %{"url" => "#{URI.encode(url)}", "type" => "Image"},
|
||||
"icon" => %{"url" => "#{url}", "type" => "Image"},
|
||||
"name" => ":" <> name <> ":",
|
||||
"type" => "Emoji",
|
||||
"updated" => "1970-01-01T00:00:00Z",
|
||||
|
|
|
@ -54,7 +54,7 @@ def call(conn, %{operation_id: operation_id, render_error: render_error} = opts)
|
|||
|
||||
{:error, reason} ->
|
||||
Logger.error(
|
||||
"Strict ApiSpec: request denied to #{conn.path_info} with params #{inspect(conn.params)}"
|
||||
"Strict ApiSpec: request denied to #{conn.request_path} with params #{inspect(conn.params)}"
|
||||
)
|
||||
|
||||
opts = render_error.init(reason)
|
||||
|
|
|
@ -79,7 +79,9 @@ def search_operation do
|
|||
%Schema{type: :string, enum: ["accounts", "hashtags", "statuses"]},
|
||||
"Search type"
|
||||
),
|
||||
Operation.parameter(:q, :query, %Schema{type: :string}, "The search query", required: true),
|
||||
Operation.parameter(:q, :query, %Schema{type: :string}, "The search query",
|
||||
required: true
|
||||
),
|
||||
Operation.parameter(
|
||||
:resolve,
|
||||
:query,
|
||||
|
|
|
@ -110,7 +110,7 @@ defp register_user(connection, base, uid, name) do
|
|||
}
|
||||
|
||||
params =
|
||||
case List.keyfind(attributes, 'mail', 0) do
|
||||
case List.keyfind(attributes, ~c"mail", 0) do
|
||||
{_, [mail]} -> Map.put_new(params, :email, :erlang.list_to_binary(mail))
|
||||
_ -> params
|
||||
end
|
||||
|
|
|
@ -30,7 +30,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
|
|||
# pagination is restricted to 40 activities at a time
|
||||
defp fetch_rich_media_for_activities(activities) do
|
||||
Enum.each(activities, fn activity ->
|
||||
spawn(fn -> Card.get_by_activity(activity) end)
|
||||
Card.get_by_activity(activity)
|
||||
end)
|
||||
end
|
||||
|
||||
|
|
|
@ -104,7 +104,7 @@ def handle_info(:ping, state) do
|
|||
end
|
||||
|
||||
def handle_info(:close, state) do
|
||||
{:stop, {:closed, 'connection closed by server'}, state}
|
||||
{:stop, {:closed, ~c"connection closed by server"}, state}
|
||||
end
|
||||
|
||||
def handle_info(msg, state) do
|
||||
|
|
|
@ -75,8 +75,7 @@ def whitelisted?(url) do
|
|||
%{host: domain} = URI.parse(url)
|
||||
|
||||
mediaproxy_whitelist_domains =
|
||||
[:media_proxy, :whitelist]
|
||||
|> Config.get()
|
||||
Config.get([:media_proxy, :whitelist], [])
|
||||
|> Kernel.++(["#{Upload.base_url()}"])
|
||||
|> Enum.map(&maybe_get_domain_from_url/1)
|
||||
|
||||
|
|
|
@ -34,7 +34,9 @@ def perform(%Plug.Conn{assigns: assigns} = conn, %{scopes: scopes} = options) do
|
|||
permissions = Enum.join(missing_scopes, " #{op} ")
|
||||
|
||||
error_message =
|
||||
dgettext("errors", "Insufficient permissions: %{permissions}.", permissions: permissions)
|
||||
dgettext("errors", "Insufficient permissions: %{permissions}.",
|
||||
permissions: permissions
|
||||
)
|
||||
|
||||
conn
|
||||
|> put_resp_content_type("application/json")
|
||||
|
|
|
@ -20,17 +20,13 @@ def init do
|
|||
end
|
||||
|
||||
def vapid_config do
|
||||
Application.get_env(:web_push_encryption, :vapid_details, [])
|
||||
Application.get_env(:web_push_encryption, :vapid_details, nil)
|
||||
end
|
||||
|
||||
def enabled do
|
||||
case vapid_config() do
|
||||
[] -> false
|
||||
list when is_list(list) -> true
|
||||
_ -> false
|
||||
end
|
||||
end
|
||||
def enabled, do: match?([subject: _, public_key: _, private_key: _], vapid_config())
|
||||
|
||||
@spec send(Pleroma.Notification.t()) ::
|
||||
{:ok, Oban.Job.t()} | {:error, Oban.Job.changeset() | term()}
|
||||
def send(notification) do
|
||||
WebPusherWorker.enqueue("web_push", %{"notification_id" => notification.id})
|
||||
end
|
||||
|
|
|
@ -16,72 +16,74 @@ defmodule Pleroma.Web.Push.Impl do
|
|||
require Logger
|
||||
import Ecto.Query
|
||||
|
||||
@body_chars 140
|
||||
@types ["Create", "Follow", "Announce", "Like", "Move", "EmojiReact", "Update"]
|
||||
|
||||
@doc "Performs sending notifications for user subscriptions"
|
||||
@spec perform(Notification.t()) :: list(any) | :error | {:error, :unknown_type}
|
||||
def perform(
|
||||
@doc "Builds webpush notification payloads for the subscriptions enabled by the receiving user"
|
||||
@spec build(Notification.t()) ::
|
||||
list(%{content: map(), subscription: Subscription.t()}) | []
|
||||
def build(
|
||||
%{
|
||||
activity: %{data: %{"type" => activity_type}} = activity,
|
||||
user: %User{id: user_id}
|
||||
user_id: user_id
|
||||
} = notification
|
||||
)
|
||||
when activity_type in @types do
|
||||
actor = User.get_cached_by_ap_id(notification.activity.data["actor"])
|
||||
notification_actor = User.get_cached_by_ap_id(notification.activity.data["actor"])
|
||||
avatar_url = User.avatar_url(notification_actor)
|
||||
|
||||
mastodon_type = notification.type
|
||||
gcm_api_key = Application.get_env(:web_push_encryption, :gcm_api_key)
|
||||
avatar_url = User.avatar_url(actor)
|
||||
object = Object.normalize(activity, fetch: false)
|
||||
user = User.get_cached_by_id(user_id)
|
||||
direct_conversation_id = Activity.direct_conversation_id(activity, user)
|
||||
|
||||
for subscription <- fetch_subscriptions(user_id),
|
||||
Subscription.enabled?(subscription, mastodon_type) do
|
||||
%{
|
||||
access_token: subscription.token.token,
|
||||
notification_id: notification.id,
|
||||
notification_type: mastodon_type,
|
||||
icon: avatar_url,
|
||||
preferred_locale: "en",
|
||||
pleroma: %{
|
||||
activity_id: notification.activity.id,
|
||||
direct_conversation_id: direct_conversation_id
|
||||
subscriptions = fetch_subscriptions(user_id)
|
||||
|
||||
subscriptions
|
||||
|> Enum.filter(&Subscription.enabled?(&1, notification.type))
|
||||
|> Enum.map(fn subscription ->
|
||||
payload =
|
||||
%{
|
||||
access_token: subscription.token.token,
|
||||
notification_id: notification.id,
|
||||
notification_type: notification.type,
|
||||
icon: avatar_url,
|
||||
preferred_locale: "en",
|
||||
pleroma: %{
|
||||
activity_id: notification.activity.id,
|
||||
direct_conversation_id: direct_conversation_id
|
||||
}
|
||||
}
|
||||
}
|
||||
|> Map.merge(build_content(notification, actor, object, mastodon_type))
|
||||
|> Jason.encode!()
|
||||
|> push_message(build_sub(subscription), gcm_api_key, subscription)
|
||||
end
|
||||
|> (&{:ok, &1}).()
|
||||
|> Map.merge(build_content(notification, notification_actor, object))
|
||||
|> Jason.encode!()
|
||||
|
||||
%{payload: payload, subscription: subscription}
|
||||
end)
|
||||
end
|
||||
|
||||
def perform(_) do
|
||||
Logger.warning("Unknown notification type")
|
||||
{:error, :unknown_type}
|
||||
def build(notif) do
|
||||
Logger.warning("WebPush: unknown activity type: #{inspect(notif)}")
|
||||
[]
|
||||
end
|
||||
|
||||
@doc "Push message to web"
|
||||
def push_message(body, sub, api_key, subscription) do
|
||||
try do
|
||||
case WebPushEncryption.send_web_push(body, sub, api_key) do
|
||||
{:ok, %{status: code}} when code in 400..499 ->
|
||||
Logger.debug("Removing subscription record")
|
||||
Repo.delete!(subscription)
|
||||
:ok
|
||||
@doc "Deliver push notification to the provided webpush subscription"
|
||||
@spec deliver(%{payload: String.t(), subscription: Subscription.t()}) :: :ok | :error
|
||||
def deliver(%{payload: payload, subscription: subscription}) do
|
||||
gcm_api_key = Application.get_env(:web_push_encryption, :gcm_api_key)
|
||||
formatted_subscription = build_sub(subscription)
|
||||
|
||||
{:ok, %{status: code}} when code in 200..299 ->
|
||||
:ok
|
||||
case WebPushEncryption.send_web_push(payload, formatted_subscription, gcm_api_key) do
|
||||
{:ok, %{status: code}} when code in 200..299 ->
|
||||
:ok
|
||||
|
||||
{:ok, %{status: code}} ->
|
||||
Logger.error("Web Push Notification failed with code: #{code}")
|
||||
:error
|
||||
{:ok, %{status: code}} when code in 400..499 ->
|
||||
Logger.debug("Removing subscription record")
|
||||
Repo.delete!(subscription)
|
||||
:ok
|
||||
|
||||
{:ok, %{status: code}} ->
|
||||
Logger.error("Web Push Notification failed with code: #{code}")
|
||||
:error
|
||||
|
||||
error ->
|
||||
Logger.error("Web Push Notification failed with #{inspect(error)}")
|
||||
:error
|
||||
end
|
||||
rescue
|
||||
error ->
|
||||
Logger.error("Web Push Notification failed with #{inspect(error)}")
|
||||
:error
|
||||
|
@ -106,107 +108,106 @@ def build_sub(subscription) do
|
|||
}
|
||||
end
|
||||
|
||||
def build_content(notification, actor, object, mastodon_type \\ nil)
|
||||
|
||||
def build_content(
|
||||
%{
|
||||
user: %{notification_settings: %{hide_notification_contents: true}}
|
||||
} = notification,
|
||||
_actor,
|
||||
_object,
|
||||
mastodon_type
|
||||
_user,
|
||||
_object
|
||||
) do
|
||||
%{body: format_title(notification, mastodon_type)}
|
||||
%{body: format_title(notification)}
|
||||
end
|
||||
|
||||
def build_content(notification, actor, object, mastodon_type) do
|
||||
mastodon_type = mastodon_type || notification.type
|
||||
|
||||
def build_content(notification, user, object) do
|
||||
%{
|
||||
title: format_title(notification, mastodon_type),
|
||||
body: format_body(notification, actor, object, mastodon_type)
|
||||
title: format_title(notification),
|
||||
body: format_body(notification, user, object)
|
||||
}
|
||||
end
|
||||
|
||||
def format_body(activity, actor, object, mastodon_type \\ nil)
|
||||
|
||||
def format_body(_activity, actor, %{data: %{"type" => "ChatMessage"} = data}, _) do
|
||||
case data["content"] do
|
||||
nil -> "@#{actor.nickname}: (Attachment)"
|
||||
content -> "@#{actor.nickname}: #{Utils.scrub_html_and_truncate(content, 80)}"
|
||||
@spec format_body(Notification.t(), User.t(), Object.t()) :: String.t()
|
||||
def format_body(_notification, user, %{data: %{"type" => "ChatMessage"} = object}) do
|
||||
case object["content"] do
|
||||
nil -> "@#{user.nickname}: (Attachment)"
|
||||
content -> "@#{user.nickname}: #{Utils.scrub_html_and_truncate(content, @body_chars)}"
|
||||
end
|
||||
end
|
||||
|
||||
def format_body(
|
||||
%{activity: %{data: %{"type" => "Create"}}},
|
||||
actor,
|
||||
%{data: %{"content" => content}},
|
||||
_mastodon_type
|
||||
%{type: "poll"} = _notification,
|
||||
_user,
|
||||
%{data: %{"content" => content} = data} = _object
|
||||
) do
|
||||
"@#{actor.nickname}: #{Utils.scrub_html_and_truncate(content, 80)}"
|
||||
options = Map.get(data, "anyOf") || Map.get(data, "oneOf")
|
||||
|
||||
content_text = content <> "\n"
|
||||
|
||||
options_text = Enum.map_join(options, "\n", fn x -> "○ #{x["name"]}" end)
|
||||
|
||||
[content_text, options_text]
|
||||
|> Enum.join("\n")
|
||||
|> Utils.scrub_html_and_truncate(@body_chars)
|
||||
end
|
||||
|
||||
def format_body(
|
||||
%{activity: %{data: %{"type" => "Create"}}},
|
||||
user,
|
||||
%{data: %{"content" => content}}
|
||||
) do
|
||||
"@#{user.nickname}: #{Utils.scrub_html_and_truncate(content, @body_chars)}"
|
||||
end
|
||||
|
||||
def format_body(
|
||||
%{activity: %{data: %{"type" => "Announce"}}},
|
||||
actor,
|
||||
%{data: %{"content" => content}},
|
||||
_mastodon_type
|
||||
user,
|
||||
%{data: %{"content" => content}}
|
||||
) do
|
||||
"@#{actor.nickname} repeated: #{Utils.scrub_html_and_truncate(content, 80)}"
|
||||
"@#{user.nickname} repeated: #{Utils.scrub_html_and_truncate(content, @body_chars)}"
|
||||
end
|
||||
|
||||
def format_body(
|
||||
%{activity: %{data: %{"type" => "EmojiReact", "content" => content}}},
|
||||
actor,
|
||||
_object,
|
||||
_mastodon_type
|
||||
user,
|
||||
_object
|
||||
) do
|
||||
"@#{actor.nickname} reacted with #{content}"
|
||||
"@#{user.nickname} reacted with #{content}"
|
||||
end
|
||||
|
||||
def format_body(
|
||||
%{activity: %{data: %{"type" => type}}} = notification,
|
||||
actor,
|
||||
_object,
|
||||
mastodon_type
|
||||
user,
|
||||
_object
|
||||
)
|
||||
when type in ["Follow", "Like"] do
|
||||
mastodon_type = mastodon_type || notification.type
|
||||
|
||||
case mastodon_type do
|
||||
"follow" -> "@#{actor.nickname} has followed you"
|
||||
"follow_request" -> "@#{actor.nickname} has requested to follow you"
|
||||
"favourite" -> "@#{actor.nickname} has favorited your post"
|
||||
case notification.type do
|
||||
"follow" -> "@#{user.nickname} has followed you"
|
||||
"follow_request" -> "@#{user.nickname} has requested to follow you"
|
||||
"favourite" -> "@#{user.nickname} has favorited your post"
|
||||
end
|
||||
end
|
||||
|
||||
def format_body(
|
||||
%{activity: %{data: %{"type" => "Update"}}},
|
||||
actor,
|
||||
_object,
|
||||
_mastodon_type
|
||||
user,
|
||||
_object
|
||||
) do
|
||||
"@#{actor.nickname} edited a status"
|
||||
"@#{user.nickname} edited a status"
|
||||
end
|
||||
|
||||
def format_title(activity, mastodon_type \\ nil)
|
||||
|
||||
def format_title(%{activity: %{data: %{"directMessage" => true}}}, _mastodon_type) do
|
||||
@spec format_title(Notification.t()) :: String.t()
|
||||
def format_title(%{activity: %{data: %{"directMessage" => true}}}) do
|
||||
"New Direct Message"
|
||||
end
|
||||
|
||||
def format_title(%{type: type}, mastodon_type) do
|
||||
case mastodon_type || type do
|
||||
"mention" -> "New Mention"
|
||||
"status" -> "New Status"
|
||||
"follow" -> "New Follower"
|
||||
"follow_request" -> "New Follow Request"
|
||||
"reblog" -> "New Repeat"
|
||||
"favourite" -> "New Favorite"
|
||||
"update" -> "New Update"
|
||||
"pleroma:chat_mention" -> "New Chat Message"
|
||||
"pleroma:emoji_reaction" -> "New Reaction"
|
||||
type -> "New #{String.capitalize(type || "event")}"
|
||||
end
|
||||
end
|
||||
def format_title(%{type: "mention"}), do: "New Mention"
|
||||
def format_title(%{type: "status"}), do: "New Status"
|
||||
def format_title(%{type: "follow"}), do: "New Follower"
|
||||
def format_title(%{type: "follow_request"}), do: "New Follow Request"
|
||||
def format_title(%{type: "reblog"}), do: "New Repeat"
|
||||
def format_title(%{type: "favourite"}), do: "New Favorite"
|
||||
def format_title(%{type: "update"}), do: "New Update"
|
||||
def format_title(%{type: "pleroma:chat_mention"}), do: "New Chat Message"
|
||||
def format_title(%{type: "pleroma:emoji_reaction"}), do: "New Reaction"
|
||||
def format_title(%{type: "poll"}), do: "Poll Results"
|
||||
def format_title(%{type: type}), do: "New #{String.capitalize(type || "event")}"
|
||||
end
|
||||
|
|
|
@ -6,35 +6,25 @@ defmodule Pleroma.Web.RichMedia.Backfill do
|
|||
alias Pleroma.Web.RichMedia.Card
|
||||
alias Pleroma.Web.RichMedia.Parser
|
||||
alias Pleroma.Web.RichMedia.Parser.TTL
|
||||
alias Pleroma.Workers.RichMediaExpirationWorker
|
||||
alias Pleroma.Workers.RichMediaWorker
|
||||
|
||||
require Logger
|
||||
|
||||
@backfiller Pleroma.Config.get([__MODULE__, :provider], Pleroma.Web.RichMedia.Backfill.Task)
|
||||
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
|
||||
@max_attempts 3
|
||||
@retry 5_000
|
||||
|
||||
def start(%{url: url} = args) when is_binary(url) do
|
||||
@spec run(map()) ::
|
||||
:ok | {:error, {:invalid_metadata, any()} | :body_too_large | {:content, any()} | any()}
|
||||
def run(%{"url" => url} = args) do
|
||||
url_hash = Card.url_to_hash(url)
|
||||
|
||||
args =
|
||||
args
|
||||
|> Map.put(:attempt, 1)
|
||||
|> Map.put(:url_hash, url_hash)
|
||||
|
||||
@backfiller.run(args)
|
||||
end
|
||||
|
||||
def run(%{url: url, url_hash: url_hash, attempt: attempt} = args)
|
||||
when attempt <= @max_attempts do
|
||||
case Parser.parse(url) do
|
||||
{:ok, fields} ->
|
||||
{:ok, card} = Card.create(url, fields)
|
||||
|
||||
maybe_schedule_expiration(url, fields)
|
||||
|
||||
if Map.has_key?(args, :activity_id) do
|
||||
with %{"activity_id" => activity_id} <- args,
|
||||
false <- is_nil(activity_id) do
|
||||
stream_update(args)
|
||||
end
|
||||
|
||||
|
@ -54,25 +44,16 @@ def run(%{url: url, url_hash: url_hash, attempt: attempt} = args)
|
|||
|
||||
e ->
|
||||
Logger.debug("Rich media error for #{url}: #{inspect(e)}")
|
||||
|
||||
:timer.sleep(@retry * attempt)
|
||||
|
||||
run(%{args | attempt: attempt + 1})
|
||||
{:error, e}
|
||||
end
|
||||
end
|
||||
|
||||
def run(%{url: url, url_hash: url_hash}) do
|
||||
Logger.debug("Rich media failure for #{url}")
|
||||
|
||||
negative_cache(url_hash, :timer.minutes(15))
|
||||
end
|
||||
|
||||
defp maybe_schedule_expiration(url, fields) do
|
||||
case TTL.process(fields, url) do
|
||||
{:ok, ttl} when is_number(ttl) ->
|
||||
timestamp = DateTime.from_unix!(ttl)
|
||||
|
||||
RichMediaExpirationWorker.new(%{"url" => url}, scheduled_at: timestamp)
|
||||
RichMediaWorker.new(%{"op" => "expire", "url" => url}, scheduled_at: timestamp)
|
||||
|> Oban.insert()
|
||||
|
||||
_ ->
|
||||
|
@ -80,22 +61,14 @@ defp maybe_schedule_expiration(url, fields) do
|
|||
end
|
||||
end
|
||||
|
||||
defp stream_update(%{activity_id: activity_id}) do
|
||||
defp stream_update(%{"activity_id" => activity_id}) do
|
||||
Pleroma.Activity.get_by_id(activity_id)
|
||||
|> Pleroma.Activity.normalize()
|
||||
|> Pleroma.Web.ActivityPub.ActivityPub.stream_out()
|
||||
end
|
||||
|
||||
defp warm_cache(key, val), do: @cachex.put(:rich_media_cache, key, val)
|
||||
defp negative_cache(key, ttl \\ nil), do: @cachex.put(:rich_media_cache, key, nil, ttl: ttl)
|
||||
end
|
||||
|
||||
defmodule Pleroma.Web.RichMedia.Backfill.Task do
|
||||
alias Pleroma.Web.RichMedia.Backfill
|
||||
|
||||
def run(args) do
|
||||
Task.Supervisor.start_child(Pleroma.TaskSupervisor, Backfill, :run, [args],
|
||||
name: {:global, {:rich_media, args.url_hash}}
|
||||
)
|
||||
end
|
||||
defp negative_cache(key, ttl \\ :timer.minutes(15)),
|
||||
do: @cachex.put(:rich_media_cache, key, nil, ttl: ttl)
|
||||
end
|
||||
|
|
|
@ -7,8 +7,8 @@ defmodule Pleroma.Web.RichMedia.Card do
|
|||
alias Pleroma.HTML
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.Web.RichMedia.Backfill
|
||||
alias Pleroma.Web.RichMedia.Parser
|
||||
alias Pleroma.Workers.RichMediaWorker
|
||||
|
||||
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
|
||||
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
|
||||
|
@ -75,17 +75,18 @@ def get_by_url(url) when is_binary(url) do
|
|||
|
||||
def get_by_url(nil), do: nil
|
||||
|
||||
@spec get_or_backfill_by_url(String.t(), map()) :: t() | nil
|
||||
def get_or_backfill_by_url(url, backfill_opts \\ %{}) do
|
||||
@spec get_or_backfill_by_url(String.t(), keyword()) :: t() | nil
|
||||
def get_or_backfill_by_url(url, opts \\ []) do
|
||||
if @config_impl.get([:rich_media, :enabled]) do
|
||||
case get_by_url(url) do
|
||||
%__MODULE__{} = card ->
|
||||
card
|
||||
|
||||
nil ->
|
||||
backfill_opts = Map.put(backfill_opts, :url, url)
|
||||
activity_id = Keyword.get(opts, :activity, nil)
|
||||
|
||||
Backfill.start(backfill_opts)
|
||||
RichMediaWorker.new(%{"op" => "backfill", "url" => url, "activity_id" => activity_id})
|
||||
|> Oban.insert()
|
||||
|
||||
nil
|
||||
|
||||
|
@ -137,7 +138,7 @@ def get_by_activity(activity) do
|
|||
nil
|
||||
else
|
||||
{:cached, url} ->
|
||||
get_or_backfill_by_url(url, %{activity_id: activity.id})
|
||||
get_or_backfill_by_url(url, activity_id: activity.id)
|
||||
|
||||
_ ->
|
||||
:error
|
||||
|
|
|
@ -173,7 +173,13 @@ def remove_socket(topic) do
|
|||
def stream(topics, items) do
|
||||
if should_env_send?() do
|
||||
for topic <- List.wrap(topics), item <- List.wrap(items) do
|
||||
spawn(fn -> do_stream(topic, item) end)
|
||||
fun = fn -> do_stream(topic, item) end
|
||||
|
||||
if Config.get([__MODULE__, :sync_streaming], false) do
|
||||
fun.()
|
||||
else
|
||||
spawn(fun)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -9,7 +9,7 @@ def string_from_xpath(_, :error), do: nil
|
|||
|
||||
def string_from_xpath(xpath, doc) do
|
||||
try do
|
||||
{:xmlObj, :string, res} = :xmerl_xpath.string('string(#{xpath})', doc)
|
||||
{:xmlObj, :string, res} = :xmerl_xpath.string(~c"string(#{xpath})", doc)
|
||||
|
||||
res =
|
||||
res
|
||||
|
|
|
@ -16,7 +16,7 @@ defmodule Pleroma.Workers.NotificationWorker do
|
|||
def perform(%Job{args: %{"op" => "create", "activity_id" => activity_id}}) do
|
||||
with %Activity{} = activity <- find_activity(activity_id),
|
||||
{:ok, notifications} <- Notification.create_notifications(activity) do
|
||||
Notification.send(notifications)
|
||||
Notification.stream(notifications)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -14,8 +14,9 @@ defmodule Pleroma.Workers.PollWorker do
|
|||
|
||||
@impl Oban.Worker
|
||||
def perform(%Job{args: %{"op" => "poll_end", "activity_id" => activity_id}}) do
|
||||
with %Activity{} = activity <- find_poll_activity(activity_id) do
|
||||
Notification.create_poll_notifications(activity)
|
||||
with %Activity{} = activity <- find_poll_activity(activity_id),
|
||||
{:ok, notifications} <- Notification.create_poll_notifications(activity) do
|
||||
Notification.stream(notifications)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Workers.RichMediaExpirationWorker do
|
||||
alias Pleroma.Web.RichMedia.Card
|
||||
|
||||
use Oban.Worker,
|
||||
queue: :background
|
||||
|
||||
@impl Oban.Worker
|
||||
def perform(%Job{args: %{"url" => url} = _args}) do
|
||||
Card.delete(url)
|
||||
end
|
||||
end
|
19
lib/pleroma/workers/rich_media_worker.ex
Normal file
19
lib/pleroma/workers/rich_media_worker.ex
Normal file
|
@ -0,0 +1,19 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Workers.RichMediaWorker do
|
||||
alias Pleroma.Web.RichMedia.Backfill
|
||||
alias Pleroma.Web.RichMedia.Card
|
||||
|
||||
use Oban.Worker, queue: :background, max_attempts: 3, unique: [period: 300]
|
||||
|
||||
@impl Oban.Worker
|
||||
def perform(%Job{args: %{"op" => "expire", "url" => url} = _args}) do
|
||||
Card.delete(url)
|
||||
end
|
||||
|
||||
def perform(%Job{args: %{"op" => "backfill", "url" => _url} = args}) do
|
||||
Backfill.run(args)
|
||||
end
|
||||
end
|
14
lib/pleroma/workers/user_refresh_worker.ex
Normal file
14
lib/pleroma/workers/user_refresh_worker.ex
Normal file
|
@ -0,0 +1,14 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Workers.UserRefreshWorker do
|
||||
use Pleroma.Workers.WorkerHelper, queue: "background", max_attempts: 1, unique: [period: 300]
|
||||
|
||||
alias Pleroma.User
|
||||
|
||||
@impl Oban.Worker
|
||||
def perform(%Job{args: %{"ap_id" => ap_id}}) do
|
||||
User.fetch_by_ap_id(ap_id)
|
||||
end
|
||||
end
|
|
@ -5,6 +5,7 @@
|
|||
defmodule Pleroma.Workers.WebPusherWorker do
|
||||
alias Pleroma.Notification
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.Web.Push.Impl
|
||||
|
||||
use Pleroma.Workers.WorkerHelper, queue: "web_push"
|
||||
|
||||
|
@ -15,7 +16,8 @@ def perform(%Job{args: %{"op" => "web_push", "notification_id" => notification_i
|
|||
|> Repo.get(notification_id)
|
||||
|> Repo.preload([:activity, :user])
|
||||
|
||||
Pleroma.Web.Push.Impl.perform(notification)
|
||||
Impl.build(notification)
|
||||
|> Enum.each(&Impl.deliver(&1))
|
||||
end
|
||||
|
||||
@impl Oban.Worker
|
||||
|
|
37
mix.exs
37
mix.exs
|
@ -12,7 +12,7 @@ def project do
|
|||
elixir: "~> 1.13",
|
||||
elixirc_paths: elixirc_paths(Mix.env()),
|
||||
compilers: Mix.compilers(),
|
||||
elixirc_options: [warnings_as_errors: warnings_as_errors()],
|
||||
elixirc_options: [warnings_as_errors: warnings_as_errors(), prune_code_paths: false],
|
||||
xref: [exclude: [:eldap]],
|
||||
dialyzer: [plt_add_apps: [:mix, :eldap]],
|
||||
start_permanent: Mix.env() == :prod,
|
||||
|
@ -75,14 +75,15 @@ def copy_nginx_config(%{path: target_path} = release) do
|
|||
def application do
|
||||
[
|
||||
mod: {Pleroma.Application, []},
|
||||
extra_applications: [
|
||||
:logger,
|
||||
:runtime_tools,
|
||||
:comeonin,
|
||||
:fast_sanitize,
|
||||
:os_mon,
|
||||
:ssl
|
||||
],
|
||||
extra_applications:
|
||||
[
|
||||
:logger,
|
||||
:runtime_tools,
|
||||
:comeonin,
|
||||
:fast_sanitize,
|
||||
:os_mon,
|
||||
:ssl
|
||||
] ++ logger_application(),
|
||||
included_applications: [:ex_syslogger]
|
||||
]
|
||||
end
|
||||
|
@ -111,6 +112,22 @@ defp oauth_deps do
|
|||
for s <- oauth_strategy_packages, do: {String.to_atom(s), ">= 0.0.0"}
|
||||
end
|
||||
|
||||
defp logger_application do
|
||||
if Version.match?(System.version(), "<1.15.0-rc.0") do
|
||||
[]
|
||||
else
|
||||
[:logger_backends]
|
||||
end
|
||||
end
|
||||
|
||||
defp logger_deps do
|
||||
if Version.match?(System.version(), "<1.15.0-rc.0") do
|
||||
[]
|
||||
else
|
||||
[{:logger_backends, "~> 1.0"}]
|
||||
end
|
||||
end
|
||||
|
||||
# Specifies your project dependencies.
|
||||
#
|
||||
# Type `mix help deps` for examples and options.
|
||||
|
@ -202,7 +219,7 @@ defp deps do
|
|||
{:websockex, "~> 0.4.3", only: :test},
|
||||
{:benchee, "~> 1.0", only: :benchmark},
|
||||
{:dialyxir, "~> 1.4", only: [:dev, :test], runtime: false}
|
||||
] ++ oauth_deps()
|
||||
] ++ oauth_deps() ++ logger_deps()
|
||||
end
|
||||
|
||||
# Aliases are shortcuts or tasks specific to the current project.
|
||||
|
|
1
mix.lock
1
mix.lock
|
@ -78,6 +78,7 @@
|
|||
"jose": {:hex, :jose, "1.11.6", "613fda82552128aa6fb804682e3a616f4bc15565a048dabd05b1ebd5827ed965", [:mix, :rebar3], [], "hexpm", "6275cb75504f9c1e60eeacb771adfeee4905a9e182103aa59b53fed651ff9738"},
|
||||
"jumper": {:hex, :jumper, "1.0.2", "68cdcd84472a00ac596b4e6459a41b3062d4427cbd4f1e8c8793c5b54f1406a7", [:mix], [], "hexpm", "9b7782409021e01ab3c08270e26f36eb62976a38c1aa64b2eaf6348422f165e1"},
|
||||
"linkify": {:hex, :linkify, "0.5.3", "5f8143d8f61f5ff08d3aeeff47ef6509492b4948d8f08007fbf66e4d2246a7f2", [:mix], [], "hexpm", "3ef35a1377d47c25506e07c1c005ea9d38d700699d92ee92825f024434258177"},
|
||||
"logger_backends": {:hex, :logger_backends, "1.0.0", "09c4fad6202e08cb0fbd37f328282f16539aca380f512523ce9472b28edc6bdf", [:mix], [], "hexpm", "1faceb3e7ec3ef66a8f5746c5afd020e63996df6fd4eb8cdb789e5665ae6c9ce"},
|
||||
"majic": {:hex, :majic, "1.0.0", "37e50648db5f5c2ff0c9fb46454d034d11596c03683807b9fb3850676ffdaab3", [:make, :mix], [{:elixir_make, "~> 0.6.1", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "7905858f76650d49695f14ea55cd9aaaee0c6654fa391671d4cf305c275a0a9e"},
|
||||
"makeup": {:hex, :makeup, "1.0.5", "d5a830bc42c9800ce07dd97fa94669dfb93d3bf5fcf6ea7a0c67b2e0e4a7f26c", [:mix], [{:nimble_parsec, "~> 0.5 or ~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "cfa158c02d3f5c0c665d0af11512fed3fba0144cf1aadee0f2ce17747fba2ca9"},
|
||||
"makeup_elixir": {:hex, :makeup_elixir, "0.14.1", "4f0e96847c63c17841d42c08107405a005a2680eb9c7ccadfd757bd31dabccfb", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "f2438b1a80eaec9ede832b5c41cd4f373b38fd7aa33e3b22d9db79e640cbde11"},
|
||||
|
|
|
@ -7,7 +7,10 @@ defmodule Pleroma.Repo.Migrations.CreateUserTrigramIndex do
|
|||
|
||||
def change do
|
||||
create_if_not_exists(
|
||||
index(:users, ["(nickname || name) gist_trgm_ops"], name: :users_trigram_index, using: :gist)
|
||||
index(:users, ["(nickname || name) gist_trgm_ops"],
|
||||
name: :users_trigram_index,
|
||||
using: :gist
|
||||
)
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -20,7 +20,10 @@ def down do
|
|||
drop_if_exists(index(:users, [], name: :users_trigram_index))
|
||||
|
||||
create_if_not_exists(
|
||||
index(:users, ["(nickname || name) gist_trgm_ops"], name: :users_trigram_index, using: :gist)
|
||||
index(:users, ["(nickname || name) gist_trgm_ops"],
|
||||
name: :users_trigram_index,
|
||||
using: :gist
|
||||
)
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -7,7 +7,10 @@ defmodule Pleroma.Repo.Migrations.AddTagIndexToObjects do
|
|||
|
||||
def change do
|
||||
drop_if_exists(
|
||||
index(:activities, ["(data #> '{\"object\",\"tag\"}')"], using: :gin, name: :activities_tags)
|
||||
index(:activities, ["(data #> '{\"object\",\"tag\"}')"],
|
||||
using: :gin,
|
||||
name: :activities_tags
|
||||
)
|
||||
)
|
||||
|
||||
create_if_not_exists(index(:objects, ["(data->'tag')"], using: :gin, name: :objects_tags))
|
||||
|
|
|
@ -9,7 +9,9 @@ defmodule Pleroma.Repo.Migrations.RemoteipPlugRename do
|
|||
|
||||
def up do
|
||||
config =
|
||||
from(c in Pleroma.ConfigDB, where: c.group == ^:pleroma and c.key == ^Pleroma.Plugs.RemoteIp)
|
||||
from(c in Pleroma.ConfigDB,
|
||||
where: c.group == ^:pleroma and c.key == ^Pleroma.Plugs.RemoteIp
|
||||
)
|
||||
|> Pleroma.Repo.one()
|
||||
|
||||
if config do
|
||||
|
|
|
@ -3,7 +3,9 @@ defmodule Pleroma.Repo.Migrations.AddActivityAssignedAccountIndex do
|
|||
|
||||
def change do
|
||||
create_if_not_exists(
|
||||
index(:activities, ["(data->>'assigned_account')"], name: :activities_assigned_account_index)
|
||||
index(:activities, ["(data->>'assigned_account')"],
|
||||
name: :activities_assigned_account_index
|
||||
)
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
defmodule Pleroma.Repo.Migrations.DeprecateConfigDBLogger do
|
||||
use Ecto.Migration
|
||||
|
||||
def change do
|
||||
execute("DELETE FROM config WHERE config.group = ':logger'")
|
||||
end
|
||||
end
|
Binary file not shown.
Binary file not shown.
BIN
priv/static/adminfe/chunk-091e.07f692aa.css
Normal file
BIN
priv/static/adminfe/chunk-091e.07f692aa.css
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
priv/static/adminfe/chunk-3779.8ac501a6.css
Normal file
BIN
priv/static/adminfe/chunk-3779.8ac501a6.css
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
priv/static/adminfe/chunk-5290.9a003297.css
Normal file
BIN
priv/static/adminfe/chunk-5290.9a003297.css
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
priv/static/adminfe/chunk-6816.60ad31eb.css
Normal file
BIN
priv/static/adminfe/chunk-6816.60ad31eb.css
Normal file
Binary file not shown.
Binary file not shown.
BIN
priv/static/adminfe/chunk-9bb9.ca31fc42.css
Normal file
BIN
priv/static/adminfe/chunk-9bb9.ca31fc42.css
Normal file
Binary file not shown.
BIN
priv/static/adminfe/chunk-da78.ba246a04.css
Normal file
BIN
priv/static/adminfe/chunk-da78.ba246a04.css
Normal file
Binary file not shown.
Binary file not shown.
BIN
priv/static/adminfe/chunk-elementUI.9644454c.css
Normal file
BIN
priv/static/adminfe/chunk-elementUI.9644454c.css
Normal file
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue