Merge branch 'develop' of git.pleroma.social:pleroma/pleroma into pleroma-meilisearch

This commit is contained in:
Lain Soykaf 2023-11-12 13:53:18 +04:00
commit 0c5cc51983
745 changed files with 11312 additions and 4608 deletions

View File

@ -8,6 +8,13 @@ variables: &global_variables
DB_PORT: 5432 DB_PORT: 5432
MIX_ENV: test MIX_ENV: test
workflow:
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS
when: never
- if: $CI_COMMIT_BRANCH
cache: &global_cache_policy cache: &global_cache_policy
key: key:
files: files:
@ -17,12 +24,14 @@ cache: &global_cache_policy
- _build - _build
stages: stages:
- check-changelog
- build - build
- test - test
- benchmark - benchmark
- deploy - deploy
- release - release
- docker - docker
- docker-combine
before_script: before_script:
- echo $MIX_ENV - echo $MIX_ENV
@ -32,24 +41,51 @@ before_script:
after_script: after_script:
- rm -rf _build/*/lib/pleroma - rm -rf _build/*/lib/pleroma
check-changelog:
stage: check-changelog
image: alpine
rules:
- if: $CI_MERGE_REQUEST_SOURCE_PROJECT_PATH == 'pleroma/pleroma' && $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME == 'weblate-extract'
when: never
- if: $CI_MERGE_REQUEST_SOURCE_PROJECT_PATH == 'pleroma/pleroma' && $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME == 'weblate'
when: never
- if: $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop"
before_script: ''
after_script: ''
cache: {}
script:
- apk add git
- sh ./tools/check-changelog
.build_changes_policy:
rules:
- changes:
- ".gitlab-ci.yml"
- "**/*.ex"
- "**/*.exs"
- "mix.lock"
.using-ci-base:
tags:
- amd64
build: build:
extends:
- .build_changes_policy
- .using-ci-base
stage: build stage: build
only:
changes: &build_changes_policy
- ".gitlab-ci.yml"
- "**/*.ex"
- "**/*.exs"
- "mix.lock"
script: script:
- mix compile --force - mix compile --force
spec-build: spec-build:
extends:
- .using-ci-base
stage: test stage: test
only: rules:
changes: - changes:
- ".gitlab-ci.yml" - ".gitlab-ci.yml"
- "lib/pleroma/web/api_spec/**/*.ex" - "lib/pleroma/web/api_spec/**/*.ex"
- "lib/pleroma/web/api_spec.ex" - "lib/pleroma/web/api_spec.ex"
artifacts: artifacts:
paths: paths:
- spec.json - spec.json
@ -57,6 +93,8 @@ spec-build:
- mix pleroma.openapi_spec spec.json - mix pleroma.openapi_spec spec.json
benchmark: benchmark:
extends:
- .using-ci-base
stage: benchmark stage: benchmark
when: manual when: manual
variables: variables:
@ -71,9 +109,10 @@ benchmark:
- mix pleroma.load_testing - mix pleroma.load_testing
unit-testing: unit-testing:
extends:
- .build_changes_policy
- .using-ci-base
stage: test stage: test
only:
changes: *build_changes_policy
cache: &testing_cache_policy cache: &testing_cache_policy
<<: *global_cache_policy <<: *global_cache_policy
policy: pull policy: pull
@ -94,11 +133,12 @@ unit-testing:
path: coverage.xml path: coverage.xml
unit-testing-erratic: unit-testing-erratic:
extends:
- .build_changes_policy
- .using-ci-base
stage: test stage: test
retry: 2 retry: 2
allow_failure: true allow_failure: true
only:
changes: *build_changes_policy
cache: &testing_cache_policy cache: &testing_cache_policy
<<: *global_cache_policy <<: *global_cache_policy
policy: pull policy: pull
@ -129,9 +169,10 @@ unit-testing-erratic:
# - mix test --trace --only federated # - mix test --trace --only federated
unit-testing-rum: unit-testing-rum:
extends:
- .build_changes_policy
- .using-ci-base
stage: test stage: test
only:
changes: *build_changes_policy
cache: *testing_cache_policy cache: *testing_cache_policy
services: services:
- name: minibikini/postgres-with-rum:12 - name: minibikini/postgres-with-rum:12
@ -147,10 +188,9 @@ unit-testing-rum:
- mix test --preload-modules - mix test --preload-modules
lint: lint:
extends: .build_changes_policy
image: &current_elixir elixir:1.12-alpine image: &current_elixir elixir:1.12-alpine
stage: test stage: test
only:
changes: *build_changes_policy
cache: *testing_cache_policy cache: *testing_cache_policy
before_script: &current_bfr_script before_script: &current_bfr_script
- apk update - apk update
@ -162,18 +202,18 @@ lint:
- mix format --check-formatted - mix format --check-formatted
analysis: analysis:
extends:
- .build_changes_policy
- .using-ci-base
stage: test stage: test
only:
changes: *build_changes_policy
cache: *testing_cache_policy cache: *testing_cache_policy
script: script:
- mix credo --strict --only=warnings,todo,fixme,consistency,readability - mix credo --strict --only=warnings,todo,fixme,consistency,readability
cycles: cycles:
extends: .build_changes_policy
image: *current_elixir image: *current_elixir
stage: test stage: test
only:
changes: *build_changes_policy
cache: {} cache: {}
before_script: *current_bfr_script before_script: *current_bfr_script
script: script:
@ -190,7 +230,7 @@ docs-deploy:
before_script: before_script:
- apk add curl - apk add curl
script: script:
- curl -X POST -F"token=$DOCS_PIPELINE_TRIGGER" -F'ref=master' -F"variables[BRANCH]=$CI_COMMIT_REF_NAME" https://git.pleroma.social/api/v4/projects/673/trigger/pipeline - curl --fail-with-body -X POST -F"token=$CI_JOB_TOKEN" -F'ref=master' -F"variables[BRANCH]=$CI_COMMIT_REF_NAME" https://git.pleroma.social/api/v4/projects/673/trigger/pipeline
review_app: review_app:
image: alpine:3.9 image: alpine:3.9
stage: deploy stage: deploy
@ -231,7 +271,7 @@ spec-deploy:
before_script: before_script:
- apk add curl - apk add curl
script: script:
- curl -X POST -F"token=$API_DOCS_PIPELINE_TRIGGER" -F'ref=master' -F"variables[BRANCH]=$CI_COMMIT_REF_NAME" -F"variables[JOB_REF]=$CI_JOB_ID" https://git.pleroma.social/api/v4/projects/1130/trigger/pipeline - curl --fail-with-body -X POST -F"token=$CI_JOB_TOKEN" -F'ref=master' -F"variables[BRANCH]=$CI_COMMIT_REF_NAME" -F"variables[JOB_REF]=$CI_JOB_ID" https://git.pleroma.social/api/v4/projects/1130/trigger/pipeline
stop_review_app: stop_review_app:
@ -354,104 +394,167 @@ arm64-musl:
before_script: *before-release-musl before_script: *before-release-musl
script: *release script: *release
docker: .kaniko:
stage: docker stage: docker
image: docker:latest image:
name: gcr.io/kaniko-project/executor:debug
entrypoint: [""]
cache: {} cache: {}
dependencies: [] dependencies: []
variables: &docker-variables before_script: &before-kaniko
DOCKER_DRIVER: overlay2
DOCKER_HOST: unix:///var/run/docker.sock
IMAGE_TAG: $CI_REGISTRY_IMAGE:$CI_COMMIT_SHORT_SHA
IMAGE_TAG_SLUG: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG
IMAGE_TAG_LATEST: $CI_REGISTRY_IMAGE:latest
IMAGE_TAG_LATEST_STABLE: $CI_REGISTRY_IMAGE:latest-stable
DOCKER_BUILDX_URL: https://github.com/docker/buildx/releases/download/v0.6.3/buildx-v0.6.3.linux-amd64
DOCKER_BUILDX_HASH: 980e6b9655f971991fbbb5fd6cd19f1672386195
before_script: &before-docker
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- docker pull $IMAGE_TAG_SLUG || true
- export CI_JOB_TIMESTAMP=$(date --utc -Iseconds) - export CI_JOB_TIMESTAMP=$(date --utc -Iseconds)
- export CI_VCS_REF=$CI_COMMIT_SHORT_SHA - export CI_VCS_REF=$CI_COMMIT_SHORT_SHA
allow_failure: true - export IMAGE_TAG=$CI_REGISTRY_IMAGE/$BUILD_ARCH_IMG_SUFFIX:$CI_COMMIT_SHORT_SHA
script: - export IMAGE_TAG_SLUG=$CI_REGISTRY_IMAGE/$BUILD_ARCH_IMG_SUFFIX:$CI_COMMIT_REF_SLUG
- mkdir -p /root/.docker/cli-plugins - export IMAGE_TAG_LATEST=$CI_REGISTRY_IMAGE/$BUILD_ARCH_IMG_SUFFIX:latest
- wget "${DOCKER_BUILDX_URL}" -O ~/.docker/cli-plugins/docker-buildx - export IMAGE_TAG_LATEST_STABLE=$CI_REGISTRY_IMAGE/$BUILD_ARCH_IMG_SUFFIX:latest-stable
- echo "${DOCKER_BUILDX_HASH} /root/.docker/cli-plugins/docker-buildx" | sha1sum -c - mkdir -p /kaniko/.docker
- chmod +x ~/.docker/cli-plugins/docker-buildx - echo "{\"auths\":{\"$CI_REGISTRY\":{\"username\":\"$CI_REGISTRY_USER\",\"password\":\"$CI_REGISTRY_PASSWORD\"}}}" > /kaniko/.docker/config.json
- docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
- docker buildx create --name mbuilder --driver docker-container --use .kaniko-latest:
- docker buildx inspect --bootstrap extends: .kaniko
- docker buildx build --platform linux/amd64,linux/arm/v7,linux/arm64/v8 --push --cache-from $IMAGE_TAG_SLUG --build-arg VCS_REF=$CI_VCS_REF --build-arg BUILD_DATE=$CI_JOB_TIMESTAMP -t $IMAGE_TAG -t $IMAGE_TAG_SLUG -t $IMAGE_TAG_LATEST .
tags:
- dind
only: only:
- develop@pleroma/pleroma - develop@pleroma/pleroma
docker-stable:
stage: docker
image: docker:latest
cache: {}
dependencies: []
variables: *docker-variables
before_script: *before-docker
allow_failure: true
script: script:
- mkdir -p /root/.docker/cli-plugins - /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --custom-platform=$BUILD_ARCH --build-arg VCS_REF=$CI_VCS_REF --build-arg BUILD_DATE=$CI_JOB_TIMESTAMP --build-arg ELIXIR_IMG=$ELIXIR_IMG --destination $IMAGE_TAG --destination $IMAGE_TAG_SLUG --destination $IMAGE_TAG_LATEST
- wget "${DOCKER_BUILDX_URL}" -O ~/.docker/cli-plugins/docker-buildx
- echo "${DOCKER_BUILDX_HASH} /root/.docker/cli-plugins/docker-buildx" | sha1sum -c .kaniko-stable:
- chmod +x ~/.docker/cli-plugins/docker-buildx extends: .kaniko
- docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
- docker buildx create --name mbuilder --driver docker-container --use
- docker buildx inspect --bootstrap
- docker buildx build --platform linux/amd64,linux/arm/v7,linux/arm64/v8 --push --cache-from $IMAGE_TAG_SLUG --build-arg VCS_REF=$CI_VCS_REF --build-arg BUILD_DATE=$CI_JOB_TIMESTAMP -t $IMAGE_TAG -t $IMAGE_TAG_SLUG -t $IMAGE_TAG_LATEST_STABLE .
tags:
- dind
only: only:
- stable@pleroma/pleroma - stable@pleroma/pleroma
script:
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --custom-platform=$BUILD_ARCH --build-arg VCS_REF=$CI_VCS_REF --build-arg BUILD_DATE=$CI_JOB_TIMESTAMP --build-arg ELIXIR_IMG=$ELIXIR_IMG --destination $IMAGE_TAG --destination $IMAGE_TAG_SLUG --destination $IMAGE_TAG_LATEST_STABLE
docker-release: .kaniko-release:
stage: docker extends: .kaniko
image: docker:latest
cache: {}
dependencies: []
variables: *docker-variables
before_script: *before-docker
allow_failure: true
script:
script:
- mkdir -p /root/.docker/cli-plugins
- wget "${DOCKER_BUILDX_URL}" -O ~/.docker/cli-plugins/docker-buildx
- echo "${DOCKER_BUILDX_HASH} /root/.docker/cli-plugins/docker-buildx" | sha1sum -c
- chmod +x ~/.docker/cli-plugins/docker-buildx
- docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
- docker buildx create --name mbuilder --driver docker-container --use
- docker buildx inspect --bootstrap
- docker buildx build --platform linux/amd64,linux/arm/v7,linux/arm64/v8 --push --cache-from $IMAGE_TAG_SLUG --build-arg VCS_REF=$CI_VCS_REF --build-arg BUILD_DATE=$CI_JOB_TIMESTAMP -t $IMAGE_TAG -t $IMAGE_TAG_SLUG .
tags:
- dind
only: only:
- /^release/.*$/@pleroma/pleroma - /^release/.*$/@pleroma/pleroma
script:
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --custom-platform=$BUILD_ARCH --build-arg VCS_REF=$CI_VCS_REF --build-arg BUILD_DATE=$CI_JOB_TIMESTAMP --build-arg ELIXIR_IMG=$ELIXIR_IMG --destination $IMAGE_TAG --destination $IMAGE_TAG_SLUG
docker-adhoc: .kaniko-adhoc:
stage: docker extends: .kaniko
image: docker:latest
cache: {}
dependencies: []
variables: *docker-variables
before_script: *before-docker
allow_failure: true
script:
script:
- mkdir -p /root/.docker/cli-plugins
- wget "${DOCKER_BUILDX_URL}" -O ~/.docker/cli-plugins/docker-buildx
- echo "${DOCKER_BUILDX_HASH} /root/.docker/cli-plugins/docker-buildx" | sha1sum -c
- chmod +x ~/.docker/cli-plugins/docker-buildx
- docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
- docker buildx create --name mbuilder --driver docker-container --use
- docker buildx inspect --bootstrap
- docker buildx build --platform linux/amd64,linux/arm/v7,linux/arm64/v8 --push --cache-from $IMAGE_TAG_SLUG --build-arg VCS_REF=$CI_VCS_REF --build-arg BUILD_DATE=$CI_JOB_TIMESTAMP -t $IMAGE_TAG -t $IMAGE_TAG_SLUG .
tags:
- dind
only: only:
- /^build-docker/.*$/@pleroma/pleroma - /^build-docker/.*$/@pleroma/pleroma
script:
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --custom-platform=$BUILD_ARCH --build-arg VCS_REF=$CI_VCS_REF --build-arg BUILD_DATE=$CI_JOB_TIMESTAMP --build-arg ELIXIR_IMG=$ELIXIR_IMG --destination $IMAGE_TAG --destination $IMAGE_TAG_SLUG
.kaniko:linux/amd64:
variables:
BUILD_ARCH: linux/amd64
BUILD_ARCH_IMG_SUFFIX: linux-amd64
ELIXIR_IMG: hexpm/elixir
tags:
- amd64
.kaniko:linux/arm64:
variables:
BUILD_ARCH: linux/arm64/v8
BUILD_ARCH_IMG_SUFFIX: linux-arm64-v8
ELIXIR_IMG: hexpm/elixir
tags:
- arm
.kaniko:linux/arm:
variables:
BUILD_ARCH: linux/arm/v7
BUILD_ARCH_IMG_SUFFIX: linux-arm-v7
ELIXIR_IMG: git.pleroma.social:5050/pleroma/ci-image/elixir-linux-arm-v7
tags:
- arm32-specified
kaniko-latest:linux/amd64:
extends:
- .kaniko-latest
- .kaniko:linux/amd64
kaniko-latest:linux/arm64:
extends:
- .kaniko-latest
- .kaniko:linux/arm64
kaniko-latest:linux/arm:
extends:
- .kaniko-latest
- .kaniko:linux/arm
kaniko-stable:linux/amd64:
extends:
- .kaniko-stable
- .kaniko:linux/amd64
kaniko-stable:linux/arm64:
extends:
- .kaniko-stable
- .kaniko:linux/arm64
kaniko-stable:linux/arm:
extends:
- .kaniko-stable
- .kaniko:linux/arm
kaniko-release:linux/amd64:
extends:
- .kaniko-release
- .kaniko:linux/amd64
kaniko-release:linux/arm64:
extends:
- .kaniko-release
- .kaniko:linux/arm64
kaniko-release:linux/arm:
extends:
- .kaniko-release
- .kaniko:linux/arm
.docker-combine:
stage: docker-combine
image: docker:cli
cache: {}
before_script:
- 'BUILD_ARCHES="linux-amd64 linux-arm64-v8 linux-arm-v7"'
- export IMAGE_TAG=$CI_REGISTRY_IMAGE:$CI_COMMIT_SHORT_SHA
- export IMAGE_TAG_SLUG=$CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG
- export IMAGE_TAG_LATEST=$CI_REGISTRY_IMAGE:latest
- export IMAGE_TAG_LATEST_STABLE=$CI_REGISTRY_IMAGE:latest-stable
- 'IMAGES=; for arch in $BUILD_ARCHES; do IMAGES="$IMAGES $CI_REGISTRY_IMAGE/$arch:$CI_COMMIT_SHORT_SHA"; done'
- 'IMAGES_SLUG=; for arch in $BUILD_ARCHES; do IMAGES_SLUG="$IMAGES_SLUG $CI_REGISTRY_IMAGE/$arch:$CI_COMMIT_REF_SLUG"; done'
- 'IMAGES_LATEST=; for arch in $BUILD_ARCHES; do IMAGES_LATEST="$IMAGES_LATEST $CI_REGISTRY_IMAGE/$arch:latest"; done'
- 'IMAGES_LATEST_STABLE=; for arch in $BUILD_ARCHES; do IMAGES_LATEST_STABLE="$IMAGES_LATEST_STABLE $CI_REGISTRY_IMAGE/$arch:latest"; done'
- mkdir -p ~/.docker
- echo "{\"auths\":{\"$CI_REGISTRY\":{\"username\":\"$CI_REGISTRY_USER\",\"password\":\"$CI_REGISTRY_PASSWORD\"}}}" > ~/.docker/config.json
docker-combine:latest:
extends: .docker-combine
only:
- develop@pleroma/pleroma
script:
- 'docker manifest create $IMAGE_TAG $IMAGES'
- 'docker manifest push $IMAGE_TAG'
- 'docker manifest create $IMAGE_TAG_SLUG $IMAGES_SLUG'
- 'docker manifest push $IMAGE_TAG_SLUG'
- 'docker manifest create $IMAGE_TAG_LATEST $IMAGES_LATEST'
- 'docker manifest push $IMAGE_TAG_LATEST'
docker-combine:stable:
extends: .docker-combine
only:
- stable@pleroma/pleroma
script:
- 'docker manifest create $IMAGE_TAG $IMAGES'
- 'docker manifest push $IMAGE_TAG'
- 'docker manifest create $IMAGE_TAG_SLUG $IMAGES_SLUG'
- 'docker manifest push $IMAGE_TAG_SLUG'
- 'docker manifest create $IMAGE_TAG_LATEST_STABLE $IMAGES_LATEST_STABLE'
- 'docker manifest push $IMAGE_TAG_LATEST_STABLE'
docker-combine:release:
extends: .docker-combine
only:
- /^release/.*$/@pleroma/pleroma
script:
- 'docker manifest create $IMAGE_TAG $IMAGES'
- 'docker manifest push $IMAGE_TAG'
- 'docker manifest create $IMAGE_TAG_SLUG $IMAGES_SLUG'
- 'docker manifest push $IMAGE_TAG_SLUG'

View File

@ -0,0 +1,10 @@
### Checklist
- [ ] Adding a changelog: In the `changelog.d` directory, create a file named `<code>.<type>`.
`<code>` can be anything, but we recommend using a more or less unique identifier to avoid collisions, such as the branch name.
`<type>` can be `add`, `change`, `remove`, `fix`, `security` or `skip`. `skip` is only used if there is no user-visible change in the MR (for example, only editing comments in the code). Otherwise, choose a type that corresponds to your change.
In the file, write the changelog entry. For example, if an MR adds group functionality, we can create a file named `group.add` and write `Add group functionality` in it.
If one changelog entry is not enough, you may add more. But that might mean you can split it into two MRs. Only use more than one changelog entry if you really need to (for example, when one change in the code fix two different bugs, or when refactoring).

View File

@ -1,6 +1,6 @@
### Release checklist ### Release checklist
* [ ] Bump version in `mix.exs` * [ ] Bump version in `mix.exs`
* [ ] Compile a changelog * [ ] Compile a changelog with the `tools/collect-changelog` script
* [ ] Create an MR with an announcement to pleroma.social * [ ] Create an MR with an announcement to pleroma.social
#### post-merge #### post-merge
* [ ] Tag the release on the merge commit * [ ] Tag the release on the merge commit

View File

@ -4,14 +4,124 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## Unreleased ## 2.6.0
### Security
- Preload: Make generated JSON html-safe. It already was html safe because it only consists of config data that is base64 encoded, but this will keep it safe it that ever changes.
- CommonAPI: Prevent users from accessing media of other users by creating a status with reused attachment ID
- Disable XML entity resolution completely to fix a dos vulnerability
### Added
- Support for Image activities, namely from Hubzilla
- Add OAuth scope descriptions
- Allow lang attribute in status text
- OnlyMedia Upload Filter
- Implement MRF policy to reject or delist according to emojis
- (hardening) Add no_new_privs=yes to OpenRC service files
- Implement quotes
- Add unified streaming endpoint
### Fixed
- rel="me" was missing its cache
- MediaProxy responses now return a sandbox CSP header
- Filter context activities using Visibility.visible_for_user?
- UploadedMedia: Add missing disposition_type to Content-Disposition
- fix not being able to fetch flash file from remote instance
- Fix abnormal behaviour when refetching a poll
- Allow non-HTTP(s) URIs in "url" fields for compatibility with "FEP-fffd: Proxy Objects"
- Fix opengraph and twitter card meta tags
- ForceMentionsInContent: fix double mentions for Mastodon/Misskey posts
- OEmbed HTML tags are now filtered
- Restrict attachments to only uploaded files only
- Fix error 404 when deleting status of a banned user
- Fix config ownership in dockerfile to pass restriction test
- Fix user fetch completely broken if featured collection is not in a supported form
- Correctly handle the situation when a poll has both "anyOf" and "oneOf" but one of them being empty
- Fix handling report from a deactivated user
- Prevent using the .json format to bypass authorized fetch mode
- Fix mentioning punycode domains when using Markdown
- Show more informative errors when profile exceeds char limits
### Removed
- BREAKING: Support for passwords generated with `crypt(3)` (Gnu Social migration artifact)
- remove BBS/SSH feature, replaced by an external bridge.
- Remove a few unused indexes.
- Cleanup OStatus-era user upgrades and ap_enabled indicator
- Deprecate Pleroma's audio scrobbling
## 2.5.4
## Security
- Fix XML External Entity (XXE) loading vulnerability allowing to fetch arbitary files from the server's filesystem
## 2.5.3
### Security
- Emoji pack loader sanitizes pack names
- Reduced permissions of config files and directories, distros requiring greater permissions like group-read need to pre-create the directories
## 2.5.5
## Security
- Prevent users from accessing media of other users by creating a status with reused attachment ID
## 2.5.4
## Security
- Fix XML External Entity (XXE) loading vulnerability allowing to fetch arbitary files from the server's filesystem
## 2.5.3
### Security
- Emoji pack loader sanitizes pack names
- Reduced permissions of config files and directories, distros requiring greater permissions like group-read need to pre-create the directories
## 2.5.2
### Security
- `/proxy` endpoint now sets a Content-Security-Policy (sandbox)
- WebSocket endpoint now respects unauthenticated restrictions for streams of public posts
- OEmbed HTML tags are now filtered
### Changed
- docs: Be more explicit about the level of compatibility of OTP releases
- Set default background worker timeout to 15 minutes
### Fixed
- Atom/RSS formatting (HTML truncation, published, missing summary)
- Remove `static_fe` pipeline for `/users/:nickname/feed`
- Stop oban from retrying if validating errors occur when processing incoming data
- Make sure object refetching as used by already received polls follows MRF rules
### Removed
- BREAKING: Support for passwords generated with `crypt(3)` (Gnu Social migration artifact)
## 2.5.1
### Added
- Allow customizing instance languages
### Fixed
- Security: uploading HTTP endpoint can no longer create directories in the upload dir (internal APIs, like backup, still can do it.)
- ~ character in urls in Markdown posts are handled properly
- Exiftool upload filter will now ignore SVG files
- Fix `block_from_stranger` setting
- Fix rel="me"
- Docker images will now run properly
- Fix inproper content being cached in report content
- Notification filter on object content will not operate on the ones that inherently have no content
- ZWNJ and double dots in links are parsed properly for Plain-text posts
- OTP releases will work on systems with a newer libcrypt
- Errors when running Exiftool.ReadDescription filter will not be filled into the image description
## 2.5.0 - 2022-12-23
### Removed ### Removed
- MastoFE - MastoFE
- Quack, the logging backend that pushes to Slack channels
### Changed ### Changed
- **Breaking:** Elixir >=1.10 is now required (was >= 1.9) - **Breaking:** Elixir >=1.11 is now required (was >= 1.9)
- Allow users to remove their emails if instance does not need email to register - Allow users to remove their emails if instance does not need email to register
- Uploadfilter `Pleroma.Upload.Filter.Exiftool` has been renamed to `Pleroma.Upload.Filter.Exiftool.StripLocation` - Uploadfilter `Pleroma.Upload.Filter.Exiftool` has been renamed to `Pleroma.Upload.Filter.Exiftool.StripLocation`
- **Breaking**: `/api/v1/pleroma/backups` endpoints now requires `read:backups` scope instead of `read:accounts` - **Breaking**: `/api/v1/pleroma/backups` endpoints now requires `read:backups` scope instead of `read:accounts`
@ -24,8 +134,6 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- `activeMonth` and `activeHalfyear` fields in NodeInfo usage.users object - `activeMonth` and `activeHalfyear` fields in NodeInfo usage.users object
- Experimental support for Finch. Put `config :tesla, :adapter, {Tesla.Adapter.Finch, name: MyFinch}` in your secrets file to use it. Reverse Proxy will still use Hackney. - Experimental support for Finch. Put `config :tesla, :adapter, {Tesla.Adapter.Finch, name: MyFinch}` in your secrets file to use it. Reverse Proxy will still use Hackney.
- `ForceMentionsInPostContent` MRF policy - `ForceMentionsInPostContent` MRF policy
- AdminAPI: allow moderators to manage reports, users, invites, and custom emojis
- AdminAPI: restrict moderators to access sensitive data: change user credentials, get password reset token, read private statuses and chats, etc
- PleromaAPI: Add remote follow API endpoint at `POST /api/v1/pleroma/remote_interaction` - PleromaAPI: Add remote follow API endpoint at `POST /api/v1/pleroma/remote_interaction`
- MastoAPI: Add `GET /api/v1/accounts/lookup` - MastoAPI: Add `GET /api/v1/accounts/lookup`
- MastoAPI: Profile Directory support - MastoAPI: Profile Directory support
@ -37,6 +145,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Configuration: Add `birthday_required` and `birthday_min_age` settings to provide a way to require users to enter their birth date. - Configuration: Add `birthday_required` and `birthday_min_age` settings to provide a way to require users to enter their birth date.
- PleromaAPI: Add `GET /api/v1/pleroma/birthdays` API endpoint - PleromaAPI: Add `GET /api/v1/pleroma/birthdays` API endpoint
- Make backend-rendered pages translatable. This includes emails. Pages returned as a HTTP response are translated using the language specified in the `userLanguage` cookie, or the `Accept-Language` header. Emails are translated using the `language` field when registering. This language can be changed by `PATCH /api/v1/accounts/update_credentials` with the `language` field. - Make backend-rendered pages translatable. This includes emails. Pages returned as a HTTP response are translated using the language specified in the `userLanguage` cookie, or the `Accept-Language` header. Emails are translated using the `language` field when registering. This language can be changed by `PATCH /api/v1/accounts/update_credentials` with the `language` field.
- Add fine grained options to provide privileges to moderators and admins (e.g. delete messages, manage reports...)
- Uploadfilter `Pleroma.Upload.Filter.Exiftool.ReadDescription` returns description values to the FE so they can pre fill the image description field - Uploadfilter `Pleroma.Upload.Filter.Exiftool.ReadDescription` returns description values to the FE so they can pre fill the image description field
- Added move account API - Added move account API
- Enable remote users to interact with posts - Enable remote users to interact with posts
@ -62,10 +171,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- RSS and Atom feeds for users work again - RSS and Atom feeds for users work again
- TwitterCard meta tags conformance - TwitterCard meta tags conformance
### Removed ## 2.4.5 - 2022-11-27
- Quack, the logging backend that pushes to Slack channels
## 2.4.5 - 2022-08-27
## Fixed ## Fixed
- Image `class` attributes not being scrubbed, allowing to exploit frontend special classes [!3792](https://git.pleroma.social/pleroma/pleroma/-/merge_requests/3792) - Image `class` attributes not being scrubbed, allowing to exploit frontend special classes [!3792](https://git.pleroma.social/pleroma/pleroma/-/merge_requests/3792)

View File

@ -1,4 +1,9 @@
FROM elixir:1.11.4-alpine as build ARG ELIXIR_IMG=hexpm/elixir
ARG ELIXIR_VER=1.11.4
ARG ERLANG_VER=24.2.1
ARG ALPINE_VER=3.17.0
FROM ${ELIXIR_IMG}:${ELIXIR_VER}-erlang-${ERLANG_VER}-alpine-${ALPINE_VER} as build
COPY . . COPY . .
@ -12,7 +17,7 @@ RUN apk add git gcc g++ musl-dev make cmake file-dev &&\
mkdir release &&\ mkdir release &&\
mix release --path release mix release --path release
FROM alpine FROM alpine:${ALPINE_VER}
ARG BUILD_DATE ARG BUILD_DATE
ARG VCS_REF ARG VCS_REF
@ -44,7 +49,7 @@ USER pleroma
COPY --from=build --chown=pleroma:0 /release ${HOME} COPY --from=build --chown=pleroma:0 /release ${HOME}
COPY ./config/docker.exs /etc/pleroma/config.exs COPY --chown=pleroma --chmod=640 ./config/docker.exs /etc/pleroma/config.exs
COPY ./docker-entrypoint.sh ${HOME} COPY ./docker-entrypoint.sh ${HOME}
EXPOSE 4000 EXPOSE 4000

View File

@ -30,7 +30,8 @@ If your platform is not supported, or you just want to be able to edit the sourc
- [OpenBSD (fi)](https://docs-develop.pleroma.social/backend/installation/openbsd_fi/) - [OpenBSD (fi)](https://docs-develop.pleroma.social/backend/installation/openbsd_fi/)
### OS/Distro packages ### OS/Distro packages
Currently Pleroma is packaged for [YunoHost](https://yunohost.org) and [NixOS](https://nixos.org). If you want to package Pleroma for any OS/Distros, we can guide you through the process on our [community channels](#community-channels). If you want to change default options in your Pleroma package, please **discuss it with us first**. Currently Pleroma is packaged for [YunoHost](https://yunohost.org), [NixOS](https://nixos.org), [Gentoo through GURU](https://gentoo.org/) and [Archlinux through AUR](https://aur.archlinux.org/packages/pleroma). You may find more at <https://repology.org/project/pleroma/versions>.
If you want to package Pleroma for any OS/Distros, we can guide you through the process on our [community channels](#community-channels). If you want to change default options in your Pleroma package, please **discuss it with us first**.
### Docker ### Docker
While we dont provide docker files, other people have written very good ones. Take a look at <https://github.com/angristan/docker-pleroma> or <https://glitch.sh/sn0w/pleroma-docker>. While we dont provide docker files, other people have written very good ones. Take a look at <https://github.com/angristan/docker-pleroma> or <https://glitch.sh/sn0w/pleroma-docker>.

View File

@ -3,8 +3,20 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
defmodule Mix.Tasks.Pleroma.Benchmark do defmodule Mix.Tasks.Pleroma.Benchmark do
import Mix.Pleroma @shortdoc "Benchmarks"
@moduledoc """
Benchmark tasks available:
adapters
render_timeline
search
tag
MIX_ENV=benchmark mix pleroma.benchmark adapters
"""
use Mix.Task use Mix.Task
import Mix.Pleroma
def run(["search"]) do def run(["search"]) do
start_pleroma() start_pleroma()
@ -63,7 +75,7 @@ def run(["render_timeline", nickname | _] = args) do
Benchee.run( Benchee.run(
%{ %{
"Standart rendering" => fn activities -> "Standard rendering" => fn activities ->
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{ Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
activities: activities, activities: activities,
for: user, for: user,

View File

View File

@ -0,0 +1 @@
Fix XML External Entity (XXE) loading vulnerability allowing to fetch arbitary files from the server's filesystem

View File

0
changelog.d/benchee.skip Normal file
View File

View File

@ -0,0 +1 @@
CommonAPI: Prevent users from accessing media of other users by creating a status with reused attachment ID

View File

@ -0,0 +1 @@
Fix the processing of email digest jobs.

View File

@ -0,0 +1 @@
Emoji pack loader sanitizes pack names

View File

@ -0,0 +1 @@
- Reduced permissions of config files and directories, distros requiring greater permissions like group-read need to pre-create the directories

View File

@ -253,7 +253,23 @@
show_reactions: true, show_reactions: true,
password_reset_token_validity: 60 * 60 * 24, password_reset_token_validity: 60 * 60 * 24,
profile_directory: true, profile_directory: true,
privileged_staff: false, admin_privileges: [
:users_read,
:users_manage_invites,
:users_manage_activation_state,
:users_manage_tags,
:users_manage_credentials,
:users_delete,
:messages_read,
:messages_delete,
:instances_delete,
:reports_manage_reports,
:moderation_log_read,
:announcements_manage_announcements,
:emoji_manage_emoji,
:statistics_read
],
moderator_privileges: [:messages_delete, :reports_manage_reports],
max_endorsed_users: 20, max_endorsed_users: 20,
birthday_required: false, birthday_required: false,
birthday_min_age: 0, birthday_min_age: 0,
@ -392,6 +408,12 @@
federated_timeline_removal: [], federated_timeline_removal: [],
replace: [] replace: []
config :pleroma, :mrf_emoji,
remove_url: [],
remove_shortcode: [],
federated_timeline_removal_url: [],
federated_timeline_removal_shortcode: []
config :pleroma, :mrf_hashtag, config :pleroma, :mrf_hashtag,
sensitive: ["nsfw"], sensitive: ["nsfw"],
reject: [], reject: [],
@ -412,6 +434,8 @@
config :pleroma, :mrf_follow_bot, follower_nickname: nil config :pleroma, :mrf_follow_bot, follower_nickname: nil
config :pleroma, :mrf_inline_quote, template: "<bdi>RT:</bdi> {url}"
config :pleroma, :rich_media, config :pleroma, :rich_media,
enabled: true, enabled: true,
ignore_hosts: [], ignore_hosts: [],
@ -603,9 +627,6 @@
base: System.get_env("LDAP_BASE") || "dc=example,dc=com", base: System.get_env("LDAP_BASE") || "dc=example,dc=com",
uid: System.get_env("LDAP_UID") || "cn" uid: System.get_env("LDAP_UID") || "cn"
config :esshd,
enabled: false
oauth_consumer_strategies = oauth_consumer_strategies =
System.get_env("OAUTH_CONSUMER_STRATEGIES") System.get_env("OAUTH_CONSUMER_STRATEGIES")
|> to_string() |> to_string()
@ -841,7 +862,11 @@
config :pleroma, Pleroma.Web.ApiSpec.CastAndValidate, strict: false config :pleroma, Pleroma.Web.ApiSpec.CastAndValidate, strict: false
config :pleroma, :mrf, config :pleroma, :mrf,
policies: [Pleroma.Web.ActivityPub.MRF.ObjectAgePolicy, Pleroma.Web.ActivityPub.MRF.TagPolicy], policies: [
Pleroma.Web.ActivityPub.MRF.ObjectAgePolicy,
Pleroma.Web.ActivityPub.MRF.TagPolicy,
Pleroma.Web.ActivityPub.MRF.InlineQuotePolicy
],
transparency: true, transparency: true,
transparency_exclusions: [] transparency_exclusions: []
@ -860,7 +885,9 @@
config :pleroma, Pleroma.User.Backup, config :pleroma, Pleroma.User.Backup,
purge_after_days: 30, purge_after_days: 30,
limit_days: 7, limit_days: 7,
dir: nil dir: nil,
process_wait_time: 30_000,
process_chunk_size: 100
config :pleroma, ConcurrentLimiter, [ config :pleroma, ConcurrentLimiter, [
{Pleroma.Web.RichMedia.Helpers, [max_running: 5, max_waiting: 5]}, {Pleroma.Web.RichMedia.Helpers, [max_running: 5, max_waiting: 5]},

View File

@ -998,10 +998,48 @@
description: "Enable profile directory." description: "Enable profile directory."
}, },
%{ %{
key: :privileged_staff, key: :admin_privileges,
type: :boolean, type: {:list, :atom},
suggestions: [
:users_read,
:users_manage_invites,
:users_manage_activation_state,
:users_manage_tags,
:users_manage_credentials,
:users_delete,
:messages_read,
:messages_delete,
:instances_delete,
:reports_manage_reports,
:moderation_log_read,
:announcements_manage_announcements,
:emoji_manage_emoji,
:statistics_read
],
description: description:
"Let moderators access sensitive data (e.g. updating user credentials, get password reset token, delete users, index and read private statuses and chats)" "What extra privileges to allow admins (e.g. updating user credentials, get password reset token, delete users, index and read private statuses and chats)"
},
%{
key: :moderator_privileges,
type: {:list, :atom},
suggestions: [
:users_read,
:users_manage_invites,
:users_manage_activation_state,
:users_manage_tags,
:users_manage_credentials,
:users_delete,
:messages_read,
:messages_delete,
:instances_delete,
:reports_manage_reports,
:moderation_log_read,
:announcements_manage_announcements,
:emoji_manage_emoji,
:statistics_read
],
description:
"What extra privileges to allow moderators (e.g. updating user credentials, get password reset token, delete users, index and read private statuses and chats)"
}, },
%{ %{
key: :birthday_required, key: :birthday_required,
@ -1014,6 +1052,15 @@
description: description:
"Minimum required age (in days) for users to create account. Only used if birthday is required.", "Minimum required age (in days) for users to create account. Only used if birthday is required.",
suggestions: [6570] suggestions: [6570]
},
%{
key: :languages,
type: {:list, :string},
description:
"Languages to be exposed in /api/v1/instance. Should be in the format of BCP47 language codes.",
suggestions: [
"en"
]
} }
] ]
}, },
@ -2581,45 +2628,6 @@
} }
] ]
}, },
%{
group: :esshd,
label: "ESSHD",
type: :group,
description:
"Before enabling this you must add :esshd to mix.exs as one of the extra_applications " <>
"and generate host keys in your priv dir with ssh-keygen -m PEM -N \"\" -b 2048 -t rsa -f ssh_host_rsa_key",
children: [
%{
key: :enabled,
type: :boolean,
description: "Enables SSH"
},
%{
key: :priv_dir,
type: :string,
description: "Dir with SSH keys",
suggestions: ["/some/path/ssh_keys"]
},
%{
key: :handler,
type: :string,
description: "Handler module",
suggestions: ["Pleroma.BBS.Handler"]
},
%{
key: :port,
type: :integer,
description: "Port to connect",
suggestions: [10_022]
},
%{
key: :password_authenticator,
type: :string,
description: "Authenticator module",
suggestions: ["Pleroma.BBS.Authenticator"]
}
]
},
%{ %{
group: :mime, group: :mime,
label: "Mime Types", label: "Mime Types",
@ -3356,6 +3364,21 @@
type: :integer, type: :integer,
description: "Limit user to export not more often than once per N days", description: "Limit user to export not more often than once per N days",
suggestions: [7] suggestions: [7]
},
%{
key: :process_wait_time,
type: :integer,
label: "Process Wait Time",
description:
"The amount of time to wait for backup to report progress, in milliseconds. If no progress is received from the backup job for that much time, terminate it and deem it failed.",
suggestions: [30_000]
},
%{
key: :process_chunk_size,
type: :integer,
label: "Process Chunk Size",
description: "The number of activities to fetch in the backup job for each chunk.",
suggestions: [100]
} }
] ]
}, },

View File

@ -3,12 +3,6 @@ Note: Additional clients may be working but theses are officially supporting Ple
Feel free to contact us to be added to this list! Feel free to contact us to be added to this list!
## Desktop ## Desktop
### Roma for Desktop
- Homepage: <https://www.pleroma.com/#desktopApp>
- Source Code: <https://github.com/roma-apps/roma-desktop>
- Platforms: Windows, Mac, Linux
- Features: MastoAPI, Streaming Ready
### Social ### Social
- Source Code: <https://gitlab.gnome.org/World/Social> - Source Code: <https://gitlab.gnome.org/World/Social>
- Contact: [@brainblasted@social.libre.fi](https://social.libre.fi/users/brainblasted) - Contact: [@brainblasted@social.libre.fi](https://social.libre.fi/users/brainblasted)
@ -19,7 +13,14 @@ Feel free to contact us to be added to this list!
### Whalebird ### Whalebird
- Homepage: <https://whalebird.social/> - Homepage: <https://whalebird.social/>
- Source Code: <https://github.com/h3poteto/whalebird-desktop> - Source Code: <https://github.com/h3poteto/whalebird-desktop>
- Contact: [@h3poteto@pleroma.io](https://pleroma.io/users/h3poteto) - Contact: [@whalebird@pleroma.io](https://pleroma.io/users/whalebird)
- Platforms: Windows, Mac, Linux
- Features: MastoAPI, Streaming Ready
### Fedistar
- Homepage: <https://fedistar.net>
- Source Code: <https://github.com/h3poteto/fedistar>
- Contact: [@fedistar@pleroma.io](https://pleroma.io/users/fedistar)
- Platforms: Windows, Mac, Linux - Platforms: Windows, Mac, Linux
- Features: MastoAPI, Streaming Ready - Features: MastoAPI, Streaming Ready

View File

@ -66,6 +66,36 @@ To add configuration to your config file, you can copy it from the base config.
* `cleanup_attachments`: Remove attachments along with statuses. Does not affect duplicate files and attachments without status. Enabling this will increase load to database when deleting statuses on larger instances. * `cleanup_attachments`: Remove attachments along with statuses. Does not affect duplicate files and attachments without status. Enabling this will increase load to database when deleting statuses on larger instances.
* `show_reactions`: Let favourites and emoji reactions be viewed through the API (default: `true`). * `show_reactions`: Let favourites and emoji reactions be viewed through the API (default: `true`).
* `password_reset_token_validity`: The time after which reset tokens aren't accepted anymore, in seconds (default: one day). * `password_reset_token_validity`: The time after which reset tokens aren't accepted anymore, in seconds (default: one day).
* `admin_privileges`: A list of privileges an admin has (e.g. delete messages, manage reports...)
* Possible values are:
* `:users_read`
* Allows admins to fetch users through the admin API.
* `:users_manage_invites`
* Allows admins to manage invites. This includes sending, resending, revoking and approving invites.
* `:users_manage_activation_state`
* Allows admins to activate and deactivate accounts. This also allows them to see deactivated users through the Mastodon API.
* `:users_manage_tags`
* Allows admins to set and remove tags for users. This can be useful in combination with MRF policies, such as `Pleroma.Web.ActivityPub.MRF.TagPolicy`.
* `:users_manage_credentials`
* Allows admins to trigger a password reset and set new credentials for an user.
* `:users_delete`
* Allows admins to delete accounts. Note that deleting an account is actually deactivating it and removing all data like posts, profile information, etc.
* `:messages_read`
* Allows admins to read messages through the admin API, including non-public posts and chats.
* `:messages_delete`
* Allows admins to delete messages from other users.
* `:instances_delete,`
* Allows admins to remove a whole remote instance from your instance. This will delete all users and messages from that remote instance.
* `:reports_manage_reports`
* Allows admins to see and manage reports.
* `:moderation_log_read,`
* Allows admins to read the entries in the moderation log.
* `:emoji_manage_emoji`
* Allows admins to manage custom emoji on the instance.
* `:statistics_read,`
* Allows admins to see some simple statistics about the instance.
* `moderator_privileges`: A list of privileges a moderator has (e.g. delete messages, manage reports...)
* Possible values are the same as for `admin_privileges`
## :database ## :database
* `improved_hashtag_timeline`: Setting to force toggle / force disable improved hashtags timeline. `:enabled` forces hashtags to be fetched from `hashtags` table for hashtags timeline. `:disabled` forces object-embedded hashtags to be used (slower). Keep it `:auto` for automatic behaviour (it is auto-set to `:enabled` [unless overridden] when HashtagsTableMigrator completes). * `improved_hashtag_timeline`: Setting to force toggle / force disable improved hashtags timeline. `:enabled` forces hashtags to be fetched from `hashtags` table for hashtags timeline. `:disabled` forces object-embedded hashtags to be used (slower). Keep it `:auto` for automatic behaviour (it is auto-set to `:enabled` [unless overridden] when HashtagsTableMigrator completes).
@ -130,6 +160,8 @@ To add configuration to your config file, you can copy it from the base config.
* `Pleroma.Web.ActivityPub.MRF.AntiFollowbotPolicy`: Drops follow requests from followbots. Users can still allow bots to follow them by first following the bot. * `Pleroma.Web.ActivityPub.MRF.AntiFollowbotPolicy`: Drops follow requests from followbots. Users can still allow bots to follow them by first following the bot.
* `Pleroma.Web.ActivityPub.MRF.KeywordPolicy`: Rejects or removes from the federated timeline or replaces keywords. (See [`:mrf_keyword`](#mrf_keyword)). * `Pleroma.Web.ActivityPub.MRF.KeywordPolicy`: Rejects or removes from the federated timeline or replaces keywords. (See [`:mrf_keyword`](#mrf_keyword)).
* `Pleroma.Web.ActivityPub.MRF.ForceMentionsInContent`: Forces every mentioned user to be reflected in the post content. * `Pleroma.Web.ActivityPub.MRF.ForceMentionsInContent`: Forces every mentioned user to be reflected in the post content.
* `Pleroma.Web.ActivityPub.MRF.InlineQuotePolicy`: Forces quote post URLs to be reflected in the message content inline.
* `Pleroma.Web.ActivityPub.MRF.QuoteToLinkTagPolicy`: Force a Link tag for posts quoting another post. (may break outgoing federation of quote posts with older Pleroma versions)
* `transparency`: Make the content of your Message Rewrite Facility settings public (via nodeinfo). * `transparency`: Make the content of your Message Rewrite Facility settings public (via nodeinfo).
* `transparency_exclusions`: Exclude specific instance names from MRF transparency. The use of the exclusions feature will be disclosed in nodeinfo as a boolean value. * `transparency_exclusions`: Exclude specific instance names from MRF transparency. The use of the exclusions feature will be disclosed in nodeinfo as a boolean value.
@ -231,6 +263,14 @@ Notes:
* `follower_nickname`: The name of the bot account to use for following newly discovered users. Using `followbot` or similar is strongly suggested. * `follower_nickname`: The name of the bot account to use for following newly discovered users. Using `followbot` or similar is strongly suggested.
#### :mrf_emoji
* `remove_url`: A list of patterns which result in emoji whose URL matches being removed from the message. This will apply to statuses, emoji reactions, and user profiles. Each pattern can be a string or a [regular expression](https://hexdocs.pm/elixir/Regex.html).
* `remove_shortcode`: A list of patterns which result in emoji whose shortcode matches being removed from the message. This will apply to statuses, emoji reactions, and user profiles. Each pattern can be a string or a [regular expression](https://hexdocs.pm/elixir/Regex.html).
* `federated_timeline_removal_url`: A list of patterns which result in message with emojis whose URLs match being removed from federated timelines (a.k.a unlisted). This will apply only to statuses. Each pattern can be a string or a [regular expression](https://hexdocs.pm/elixir/Regex.html).
* `federated_timeline_removal_shortcode`: A list of patterns which result in message with emojis whose shortcodes match being removed from federated timelines (a.k.a unlisted). This will apply only to statuses. Each pattern can be a string or a [regular expression](https://hexdocs.pm/elixir/Regex.html).
#### :mrf_inline_quote
* `template`: The template to append to the post. `{url}` will be replaced with the actual link to the quoted post. Default: `<bdi>RT:</bdi> {url}`
### :activitypub ### :activitypub
* `unfollow_blocked`: Whether blocks result in people getting unfollowed * `unfollow_blocked`: Whether blocks result in people getting unfollowed
@ -641,6 +681,12 @@ This filter reads the ImageDescription and iptc:Caption-Abstract fields with Exi
No specific configuration. No specific configuration.
#### Pleroma.Upload.Filter.OnlyMedia
This filter rejects uploads that are not identified with Content-Type matching audio/\*, image/\*, or video/\*
No specific configuration.
#### Pleroma.Upload.Filter.Mogrify #### Pleroma.Upload.Filter.Mogrify
* `args`: List of actions for the `mogrify` command like `"strip"` or `["strip", "auto-orient", {"implode", "1"}]`. * `args`: List of actions for the `mogrify` command like `"strip"` or `["strip", "auto-orient", {"implode", "1"}]`.
@ -843,21 +889,8 @@ This will probably take a long time.
### BBS / SSH access ### BBS / SSH access
To enable simple command line interface accessible over ssh, add a setting like this to your configuration file: This feature has been removed from Pleroma core.
However, a client has been made and is available at https://git.pleroma.social/Duponin/sshocial.
```exs
app_dir = File.cwd!
priv_dir = Path.join([app_dir, "priv/ssh_keys"])
config :esshd,
enabled: true,
priv_dir: priv_dir,
handler: "Pleroma.BBS.Handler",
port: 10_022,
password_authenticator: "Pleroma.BBS.Authenticator"
```
Feel free to adjust the priv_dir and port number. Then you will have to create the key for the keys (in the example `priv/ssh_keys`) and create the host keys with `ssh-keygen -m PEM -N "" -b 2048 -t rsa -f ssh_host_rsa_key`. After restarting, you should be able to connect to your Pleroma instance with `ssh username@server -p $PORT`
### :gopher ### :gopher
* `enabled`: Enables the gopher interface * `enabled`: Enables the gopher interface

View File

@ -62,6 +62,20 @@ An additional “Expect-CT” header will be sent with the configured `ct_max_ag
If you click on a link, your browsers request to the other site will include from where it is coming from. The “Referrer policy” header tells the browser how and if it should send this information. (see [Referrer policy](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy)) If you click on a link, your browsers request to the other site will include from where it is coming from. The “Referrer policy” header tells the browser how and if it should send this information. (see [Referrer policy](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy))
### Uploaded media and media proxy
It is STRONGLY RECOMMENDED to serve both the locally-uploaded media and the media proxy from another domain than the domain that Pleroma runs on, if applicable.
```elixir
config :pleroma, :media_proxy,
base_url: "https://some.other.domain"
config :pleroma, Pleroma.Upload,
base_url: "https://some.other.domain/media"
```
See `installation/pleroma-mediaproxy.nginx` for examples on how to configure your media proxy.
## systemd ## systemd
A systemd unit example is provided at `installation/pleroma.service`. A systemd unit example is provided at `installation/pleroma.service`.

View File

@ -1585,6 +1585,7 @@ Returns the content of the document
"build_url": "https://git.pleroma.social/pleroma/fedi-fe/-/jobs/artifacts/${ref}/download?job=build", "build_url": "https://git.pleroma.social/pleroma/fedi-fe/-/jobs/artifacts/${ref}/download?job=build",
"git": "https://git.pleroma.social/pleroma/fedi-fe", "git": "https://git.pleroma.social/pleroma/fedi-fe",
"installed": true, "installed": true,
"installed_refs": ["master"],
"name": "fedi-fe", "name": "fedi-fe",
"ref": "master" "ref": "master"
}, },
@ -1592,6 +1593,7 @@ Returns the content of the document
"build_url": "https://git.pleroma.social/lambadalambda/kenoma/-/jobs/artifacts/${ref}/download?job=build", "build_url": "https://git.pleroma.social/lambadalambda/kenoma/-/jobs/artifacts/${ref}/download?job=build",
"git": "https://git.pleroma.social/lambadalambda/kenoma", "git": "https://git.pleroma.social/lambadalambda/kenoma",
"installed": false, "installed": false,
"installed_refs": [],
"name": "kenoma", "name": "kenoma",
"ref": "master" "ref": "master"
} }

View File

@ -357,6 +357,122 @@ The message payload consist of:
- `follower_count`: follower count - `follower_count`: follower count
- `following_count`: following count - `following_count`: following count
### Authenticating via `sec-websocket-protocol` header
Pleroma allows to authenticate via the `sec-websocket-protocol` header, for example, if your access token is `your-access-token`, you can authenticate using the following:
```
sec-websocket-protocol: your-access-token
```
### Authenticating after connection via `pleroma:authenticate` event
Pleroma allows to authenticate after connection is established, via the `pleroma:authenticate` event. For example, if your access token is `your-access-token`, you can send the following after the connection is established:
```
{"type": "pleroma:authenticate", "token": "your-access-token"}
```
### Response to client-sent events
Pleroma will respond to client-sent events that it recognizes. Supported event types are:
- `subscribe`
- `unsubscribe`
- `pleroma:authenticate`
The reply will be in the following format:
```
{
"event": "pleroma:respond",
"payload": "{\"type\": \"<type of the client-sent event>\", \"result\": \"<result of the action>\", \"error\": \"<error code>\"}"
}
```
Result of the action can be either `success`, `ignored` or `error`. If it is `error`, the `error` property will contain the error code. Otherwise, the `error` property will not be present. Below are some examples:
```
{
"event": "pleroma:respond",
"payload": "{\"type\": \"pleroma:authenticate\", \"result\": \"success\"}"
}
{
"event": "pleroma:respond",
"payload": "{\"type\": \"subscribe\", \"result\": \"ignored\"}"
}
{
"event": "pleroma:respond",
"payload": "{\"type\": \"unsubscribe\", \"result\": \"error\", \"error\": \"bad_topic\"}"
}
```
If the sent event is not of a type that Pleroma supports, it will not reply.
### The `stream` attribute of a server-sent event
Technically, this is in Mastodon, but its documentation does nothing to specify its format.
This attribute appears on every event type except `pleroma:respond` and `delete`. It helps clients determine where they should display the new statuses.
The value of the attribute is an array containing one or two elements. The first element is the type of the stream. The second is the identifier related to that specific stream, if applicable.
For the following stream types, there is a second element in the array:
- `list`: The second element is the id of the list, as a string.
- `hashtag`: The second element is the name of the hashtag.
- `public:remote:media` and `public:remote`: The second element is the domain of the corresponding instance.
For all other stream types, there is no second element.
Some examples of valid `stream` values:
- `["list", "1"]`: List of id 1.
- `["hashtag", "mew"]`: The hashtag #mew.
- `["user:notifications"]`: Notifications for the current user.
- `["user"]`: Home timeline.
- `["public:remote", "mew.moe"]`: Public posts from the instance mew.moe .
### The unified streaming endpoint
If you do not specify a stream to connect to when requesting `/api/v1/streaming`, you will enter a connection that subscribes to no streams. After the connection is established, you can authenticate and then subscribe to different streams.
### List of supported streams
Below is a list of supported streams by Pleroma. To make a single-stream WebSocket connection, append the string specified in "Query style" to the streaming endpoint url.
To subscribe to a stream after the connection is established, merge the JSON object specified in "Subscribe style" with `{"type": "subscribe"}`. To unsubscribe, merge it with `{"type": "unsubscribe"}`.
For example, to receive updates on the list 1, you can connect to `/api/v1/streaming/?stream=list&list=1`, or send
```
{"type": "subscribe", "stream": "list", "list": "1"}
```
upon establishing the websocket connection.
To unsubscribe to list 1, send
```
{"type": "unsubscribe", "stream": "list", "list": "1"}
```
Note that if you specify a stream that requires a logged-in user in the query string (for example, `user` or `list`), you have to specify the access token when you are trying to establish the connection, i.e. in the query string or via the `sec-websocket-protocol` header.
- `list`
- Query style: `?stream=list&list=<id>`
- Subscribe style: `{"stream": "list", "list": "<id>"}`
- `public`, `public:local`, `public:media`, `public:local:media`, `user`, `user:pleroma_chat`, `user:notifications`, `direct`
- Query style: `?stream=<stream name>`
- Subscribe style: `{"stream": "<stream name>"}`
- `hashtag`
- Query style: `?stream=hashtag&tag=<name>`
- Subscribe style: `{"stream": "hashtag", "tag": "<name>"}`
- `public:remote`, `public:remote:media`
- Query style: `?stream=<stream name>&instance=<instance domain>`
- Subscribe style: `{"stream": "<stream name>", "instance": "<instance domain>"}`
## User muting and thread muting ## User muting and thread muting
Both user muting and thread muting can be done for only a certain time by adding an `expires_in` parameter to the API calls and giving the expiration time in seconds. Both user muting and thread muting can be done for only a certain time by adding an `expires_in` parameter to the API calls and giving the expiration time in seconds.

View File

@ -577,6 +577,9 @@ The status posting endpoint takes an additional parameter, `in_reply_to_conversa
404 if the pack does not exist 404 if the pack does not exist
## `GET /api/v1/pleroma/accounts/:id/scrobbles` ## `GET /api/v1/pleroma/accounts/:id/scrobbles`
Audio scrobbling in Pleroma is **deprecated**.
### Requests a list of current and recent Listen activities for an account ### Requests a list of current and recent Listen activities for an account
* Method `GET` * Method `GET`
* Authentication: not required * Authentication: not required
@ -598,6 +601,9 @@ The status posting endpoint takes an additional parameter, `in_reply_to_conversa
``` ```
## `POST /api/v1/pleroma/scrobble` ## `POST /api/v1/pleroma/scrobble`
Audio scrobbling in Pleroma is **deprecated**.
### Creates a new Listen activity for an account ### Creates a new Listen activity for an account
* Method `POST` * Method `POST`
* Authentication: required * Authentication: required

View File

@ -183,6 +183,9 @@ server {
... ...
} }
``` ```
* (Strongly recommended) serve media on another domain
Refer to the [Hardening your instance](../configuration/hardening.md) document on how to serve media on another domain. We STRONGLY RECOMMEND you to do this to minimize attack vectors.
* Enable and start nginx: * Enable and start nginx:

View File

@ -173,6 +173,11 @@ sudo ln -s /etc/nginx/sites-available/pleroma.nginx /etc/nginx/sites-enabled/ple
``` ```
* Before starting nginx edit the configuration and change it to your needs (e.g. change servername, change cert paths) * Before starting nginx edit the configuration and change it to your needs (e.g. change servername, change cert paths)
* (Strongly recommended) serve media on another domain
Refer to the [Hardening your instance](../configuration/hardening.md) document on how to serve media on another domain. We STRONGLY RECOMMEND you to do this to minimize attack vectors.
* Enable and start nginx: * Enable and start nginx:
```shell ```shell

View File

@ -4,7 +4,7 @@
## Installation ## Installation
This guide will assume you are on Debian 11 (“bullseye”) or later. This guide should also work with Ubuntu 18.04 (“Bionic Beaver”) and later. It also assumes that you have administrative rights, either as root or a user with [sudo permissions](https://www.digitalocean.com/community/tutorials/how-to-add-delete-and-grant-sudo-privileges-to-users-on-a-debian-vps). If you want to run this guide with root, ignore the `sudo` at the beginning of the lines, unless it calls a user like `sudo -Hu pleroma`; in this case, use `su <username> -s $SHELL -c 'command'` instead. This guide will assume you are on Debian 12 (“bookworm”) or later. This guide should also work with Ubuntu 22.04 (“jammy”) and later. It also assumes that you have administrative rights, either as root or a user with [sudo permissions](https://www.digitalocean.com/community/tutorials/how-to-add-delete-and-grant-sudo-privileges-to-users-on-a-debian-vps). If you want to run this guide with root, ignore the `sudo` at the beginning of the lines, unless it calls a user like `sudo -Hu pleroma`; in this case, use `su <username> -s $SHELL -c 'command'` instead.
{! backend/installation/generic_dependencies.include !} {! backend/installation/generic_dependencies.include !}
@ -136,6 +136,11 @@ sudo ln -s /etc/nginx/sites-available/pleroma.nginx /etc/nginx/sites-enabled/ple
``` ```
* Before starting nginx edit the configuration and change it to your needs (e.g. change servername, change cert paths) * Before starting nginx edit the configuration and change it to your needs (e.g. change servername, change cert paths)
* (Strongly recommended) serve media on another domain
Refer to the [Hardening your instance](../configuration/hardening.md) document on how to serve media on another domain. We STRONGLY RECOMMEND you to do this to minimize attack vectors.
* Enable and start nginx: * Enable and start nginx:
```shell ```shell

View File

@ -1,11 +1,14 @@
# Pleromaの入れ方 # Pleromaの入れ方
Note: This article is potentially outdated because at this time we may not have people who can speak this language well enough to update it. To see the up-to-date version, which may have significant differences or important caveats of the installation process, look up the English version.
## 日本語訳について ## 日本語訳について
この記事は [Installing on Debian based distributions](Installing on Debian based distributions) の日本語訳です。何かがおかしいと思ったら、原文を見てください。 この記事は [Installing on Debian based distributions](Installing on Debian based distributions) の日本語訳です。何かがおかしいと思ったら、原文を見てください。
## インストール ## インストール
このガイドはDebian Stretchを利用することを想定しています。Ubuntu 16.04や18.04でもおそらく動作します。また、ユーザはrootもしくはsudoにより管理者権限を持っていることを前提とします。もし、以下の操作をrootユーザで行う場合は、 `sudo` を無視してください。ただし、`sudo -Hu pleroma` のようにユーザを指定している場合には `su <username> -s $SHELL -c 'command'` を代わりに使ってください。 このガイドはDebian Bookwormを利用することを想定しています。Ubuntu 22.04でもおそらく動作します。また、ユーザはrootもしくはsudoにより管理者権限を持っていることを前提とします。もし、以下の操作をrootユーザで行う場合は、 `sudo` を無視してください。ただし、`sudo -Hu pleroma` のようにユーザを指定している場合には `su <username> -s $SHELL -c 'command'` を代わりに使ってください。
### 必要なソフトウェア ### 必要なソフトウェア

View File

@ -173,6 +173,10 @@ Edit the defaults of `/usr/local/etc/nginx/sites-available/pleroma.nginx`:
* Change `ssl_certificate_key` to `/var/db/acme/certs/example.tld/example.tld.key`. * Change `ssl_certificate_key` to `/var/db/acme/certs/example.tld/example.tld.key`.
* Change all references of `example.tld` to your instance's domain name. * Change all references of `example.tld` to your instance's domain name.
#### (Strongly recommended) serve media on another domain
Refer to the [Hardening your instance](../configuration/hardening.md) document on how to serve media on another domain. We STRONGLY RECOMMEND you to do this to minimize attack vectors.
## Creating a startup script for Pleroma ## Creating a startup script for Pleroma
Pleroma will need to compile when it initially starts, which typically takes a longer Pleroma will need to compile when it initially starts, which typically takes a longer

View File

@ -1,6 +1,8 @@
# Installing on Gentoo GNU/Linux # Manual install on Gentoo GNU/Linux
{! backend/installation/otp_vs_from_source_source.include !} {! backend/installation/otp_vs_from_source.include !}
This guide covers a manual from-source installation. To use the gentoo package, please check the [packaged installation guide for gentoo](./gentoo_otp_en.md).
## Installation ## Installation
@ -227,6 +229,10 @@ Replace all instances of `example.tld` with your instance's public URL. If for w
Pay special attention to the line that begins with `ssl_ecdh_curve`. It is stongly advised to comment that line out so that OpenSSL will use its full capabilities, and it is also possible you are running OpenSSL 1.0.2 necessitating that you do this. Pay special attention to the line that begins with `ssl_ecdh_curve`. It is stongly advised to comment that line out so that OpenSSL will use its full capabilities, and it is also possible you are running OpenSSL 1.0.2 necessitating that you do this.
* (Strongly recommended) serve media on another domain
Refer to the [Hardening your instance](../configuration/hardening.md) document on how to serve media on another domain. We STRONGLY RECOMMEND you to do this to minimize attack vectors.
* Enable and start nginx: * Enable and start nginx:
```shell ```shell

View File

@ -0,0 +1,207 @@
# Packaged install on Gentoo Linux
{! backend/installation/otp_vs_from_source.include !}
This guide covers installation via Gentoo provided packaging. A [manual installation guide for gentoo](./gentoo_en.md) is also available.
## Installation
This guide will assume that you have administrative rights, either as root or a user with [sudo permissions](https://wiki.gentoo.org/wiki/Sudo). Lines that begin with `#` indicate that they should be run as the superuser. Lines using `$` should be run as the indicated user, e.g. `pleroma$` should be run as the `pleroma` user.
{! backend/installation/generic_dependencies.include !}
### Installing a cron daemon
Gentoo quite pointedly does not come with a cron daemon installed, and as such it is recommended you install one to automate certbot renewals and to allow other system administration tasks to be run automatically. Gentoo has [a whole wide world of cron options](https://wiki.gentoo.org/wiki/Cron) but if you just want A Cron That Works, `emerge --ask virtual/cron` will install the default cron implementation (probably cronie) which will work just fine. For the purpouses of this guide, we will be doing just that.
### Required ebuilds
* `www-apps/pleroma`
#### Optional ebuilds used in this guide
* `www-servers/nginx` (preferred, example configs for other reverse proxies can be found in the repo)
* `app-crypt/certbot` (or any other ACME client for Lets Encrypt certificates)
* `app-crypt/certbot-nginx` (nginx certbot plugin that allows use of the all-powerful `--nginx` flag on certbot)
* `media-gfx/imagemagick`
* `media-video/ffmpeg`
* `media-libs/exiftool`
### Prepare the system
* If you haven't yet done so, add the [Gentoo User Repository (GURU)](https://wiki.gentoo.org/wiki/Project:GURU), where the `www-apps/pleroma` ebuild currently lives at:
```shell
# eselect repository enable guru
```
* Ensure that you have the latest copy of the Gentoo and GURU ebuilds if you have not synced them yet:
```shell
# emaint sync -a
```
* Emerge all required the required and suggested software in one go:
```shell
# emerge --ask www-apps/pleroma www-servers/nginx app-crypt/certbot app-crypt/certbot-nginx
```
If you would not like to install the optional packages, remove them from this line.
If you're running this from a low-powered virtual machine, it should work though it will take some time. There were no issues on a VPS with a single core and 1GB of RAM; if you are using an even more limited device and run into issues, you can try creating a swapfile or use a more powerful machine running Gentoo to [cross build](https://wiki.gentoo.org/wiki/Cross_build_environment). If you have a wait ahead of you, now would be a good time to take a break, strech a bit, refresh your beverage of choice and/or get a snack, and reply to Arch users' posts with "I use Gentoo btw" as we do.
### Setup PostgreSQL
[Gentoo Wiki article](https://wiki.gentoo.org/wiki/PostgreSQL) as well as [PostgreSQL QuickStart](https://wiki.gentoo.org/wiki/PostgreSQL/QuickStart) might be worth a quick glance, as the way Gentoo handles postgres is slightly unusual, with built in capability to have two different databases running for testing and live or whatever other purpouse. While it is still straightforward to install, it does mean that the version numbers used in this guide might change for future updates, so keep an eye out for the output you get from `emerge` to ensure you are using the correct ones.
* Initialize the database cluster
The output from emerging postgresql should give you a command for initializing the postgres database. The default slot should be indicated in this command, ensure that it matches the command below.
```shell
# emerge --config dev-db/postgresql:11
```
### Install media / graphics packages (optional)
See [Optional software packages needed for specific functionality](optional/media_graphics_packages.md) for details.
```shell
# emerge --ask media-video/ffmpeg media-gfx/imagemagick media-libs/exiftool
```
### Setup PleromaBE
* Generate the configuration:
```shell
# pleroma_ctl instance gen --output /etc/pleroma/config.exs --output-psql /tmp/setup_db.psql"
```
* Create the PostgreSQL database
```shell
# sudo -u postgres -s $SHELL -lc "psql -f /tmp/setup_db.psql"
```
* Now run the database migration:
```shell
# pleroma_ctl migrate
```
* Optional: If you have installed RUM indexes (`dev-db/rum`) you also need to run:
```
# sudo -Hu pleroma "pleroma_ctl migrate --migrations-path priv/repo/optional_migrations/rum_indexing/"
```
* Now you can start Pleroma already and add it in the default runlevel
```shell
# rc-service pleroma start
# rc-update add pleroma default
```
It probably won't work over the public internet quite yet, however, as we still need to set up a web server to proxy to the pleroma application, as well as configure SSL.
### Finalize installation
Assuming you want to open your newly installed federated social network to, well, the federation, you should run nginx or some other webserver/proxy in front of Pleroma. It is also a good idea to set up Pleroma to run as a system service.
#### Nginx
* Install nginx, if not already done:
```shell
# emerge --ask www-servers/nginx
```
* Create directories for available and enabled sites:
```shell
# mkdir -p /etc/nginx/sites-{available,enabled}
```
* Append the following line at the end of the `http` block in `/etc/nginx/nginx.conf`:
```Nginx
include sites-enabled/*;
```
* Setup your SSL cert, using your method of choice or certbot. If using certbot, install it if you haven't already:
```shell
# emerge --ask app-crypt/certbot app-crypt/certbot-nginx
```
and then set it up:
```shell
# mkdir -p /var/lib/letsencrypt/
# certbot certonly --email <your@emailaddress> -d <yourdomain> --standalone
```
If that doesn't work the first time, add `--dry-run` to further attempts to avoid being ratelimited as you identify the issue, and do not remove it until the dry run succeeds. If that doesnt work, make sure, that nginx is not already running. If it still doesnt work, try setting up nginx first (change ssl “on” to “off” and try again). Often the answer to issues with certbot is to use the `--nginx` flag once you have nginx up and running.
If you are using any additional subdomains, such as for a media proxy, you can re-run the same command with the subdomain in question. When it comes time to renew later, you will not need to run multiple times for each domain, one renew will handle it.
---
* Copy the example nginx configuration and activate it:
```shell
# cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/sites-available/
# ln -s /etc/nginx/sites-available/pleroma.nginx /etc/nginx/sites-enabled/pleroma.nginx
```
* Take some time to ensure that your nginx config is correct
Replace all instances of `example.tld` with your instance's public URL. If for whatever reason you made changes to the port that your pleroma app runs on, be sure that is reflected in your configuration.
Pay special attention to the line that begins with `ssl_ecdh_curve`. It is stongly advised to comment that line out so that OpenSSL will use its full capabilities, and it is also possible you are running OpenSSL 1.0.2 necessitating that you do this.
* Enable and start nginx:
```shell
# rc-update add nginx default
# /etc/init.d/nginx start
```
If you are using certbot, it is HIGHLY recommend you set up a cron job that renews your certificate, and that you install the suggested `certbot-nginx` plugin. If you don't do these things, you only have yourself to blame when your instance breaks suddenly because you forgot about it.
First, ensure that the command you will be installing into your crontab works.
```shell
# /usr/bin/certbot renew --nginx
```
Assuming not much time has passed since you got certbot working a few steps ago, you should get a message for all domains you installed certificates for saying `Cert not yet due for renewal`.
Now, run crontab as a superuser with `crontab -e` or `sudo crontab -e` as appropriate, and add the following line to your cron:
```cron
0 0 1 * * /usr/bin/certbot renew --nginx
```
This will run certbot on the first of the month at midnight. If you'd rather run more frequently, it's not a bad idea, feel free to go for it.
#### Other webserver/proxies
If you would like to use other webservers or proxies, there are example configurations for some popular alternatives in `/opt/pleroma/installation/`. You can, of course, check out [the Gentoo wiki](https://wiki.gentoo.org) for more information on installing and configuring said alternatives.
#### Create your first user
If your instance is up and running, you can create your first user with administrative rights with the following task:
```shell
pleroma$ pleroma_ctl user new <username> <your@emailaddress> --admin
```
#### Further reading
{! backend/installation/further_reading.include !}
## Questions
Questions about the installation or didnt it work as it should be, ask in [#pleroma:libera.chat](https://matrix.to/#/#pleroma:libera.chat) via Matrix or **#pleroma** on **libera.chat** via IRC.

View File

@ -5,7 +5,7 @@
In this guide we cover how you can migrate from a from source installation to one using OTP releases. In this guide we cover how you can migrate from a from source installation to one using OTP releases.
## Pre-requisites ## Pre-requisites
You will be running commands as root. If you aren't root already, please elevate your priviledges by executing `sudo su`/`su`. You will be running commands as root. If you aren't root already, please elevate your privileges by executing `sudo su`/`su`.
The system needs to have `curl` and `unzip` installed for downloading and unpacking release builds. The system needs to have `curl` and `unzip` installed for downloading and unpacking release builds.
@ -86,26 +86,26 @@ export FLAVOUR="amd64-musl"
# Clone the release build into a temporary directory and unpack it # Clone the release build into a temporary directory and unpack it
# Replace `stable` with `unstable` if you want to run the unstable branch # Replace `stable` with `unstable` if you want to run the unstable branch
su pleroma -s $SHELL -lc " sudo -Hu pleroma "
curl 'https://git.pleroma.social/api/v4/projects/2/jobs/artifacts/stable/download?job=$FLAVOUR' -o /tmp/pleroma.zip curl 'https://git.pleroma.social/api/v4/projects/2/jobs/artifacts/stable/download?job=$FLAVOUR' -o /tmp/pleroma.zip
unzip /tmp/pleroma.zip -d /tmp/ unzip /tmp/pleroma.zip -d /tmp/
" "
# Move the release to the home directory and delete temporary files # Move the release to the home directory and delete temporary files
su pleroma -s $SHELL -lc " sudo -Hu pleroma "
mv /tmp/release/* ~pleroma/ mv /tmp/release/* ~pleroma/
rmdir /tmp/release rmdir /tmp/release
rm /tmp/pleroma.zip rm /tmp/pleroma.zip
" "
# Start the instance to verify that everything is working as expected # Start the instance to verify that everything is working as expected
su pleroma -s $SHELL -lc "./bin/pleroma daemon" sudo -Hu pleroma "./bin/pleroma daemon"
# Wait for about 20 seconds and query the instance endpoint, if it shows your uri, name and email correctly, you are configured correctly # Wait for about 20 seconds and query the instance endpoint, if it shows your uri, name and email correctly, you are configured correctly
sleep 20 && curl http://localhost:4000/api/v1/instance sleep 20 && curl http://localhost:4000/api/v1/instance
# Stop the instance # Stop the instance
su pleroma -s $SHELL -lc "./bin/pleroma stop" sudo -Hu pleroma "./bin/pleroma stop"
``` ```
## Setting up a system service ## Setting up a system service

View File

@ -123,6 +123,10 @@ Edit the defaults:
* Change `ssl_certificate_key` to `/etc/nginx/tls/key`. * Change `ssl_certificate_key` to `/etc/nginx/tls/key`.
* Change `example.tld` to your instance's domain name. * Change `example.tld` to your instance's domain name.
### (Strongly recommended) serve media on another domain
Refer to the [Hardening your instance](../configuration/hardening.md) document on how to serve media on another domain. We STRONGLY RECOMMEND you to do this to minimize attack vectors.
## Configuring acme.sh ## Configuring acme.sh
We'll be using acme.sh in Stateless Mode for TLS certificate renewal. We'll be using acme.sh in Stateless Mode for TLS certificate renewal.

View File

@ -195,6 +195,10 @@ rcctl enable relayd
rcctl start relayd rcctl start relayd
``` ```
##### (Strongly recommended) serve media on another domain
Refer to the [Hardening your instance](../configuration/hardening.md) document on how to serve media on another domain. We STRONGLY RECOMMEND you to do this to minimize attack vectors.
#### pf #### pf
Enabling and configuring pf is highly recommended. Enabling and configuring pf is highly recommended.
In /etc/pf.conf, insert the following configuration: In /etc/pf.conf, insert the following configuration:

View File

@ -1,5 +1,7 @@
# Pleroman asennus OpenBSD:llä # Pleroman asennus OpenBSD:llä
Note: This article is potentially outdated because at this time we may not have people who can speak this language well enough to update it. To see the up-to-date version, which may have significant differences or important caveats of the installation process, look up the English version.
Tarvitset: Tarvitset:
* Oman domainin * Oman domainin
* OpenBSD 6.3 -serverin * OpenBSD 6.3 -serverin

View File

@ -1,9 +1,10 @@
# Optional software packages needed for specific functionality # Optional software packages needed for specific functionality
For specific Pleroma functionality (which is disabled by default) some or all of the below packages are required: For specific Pleroma functionality (which is disabled by default) some or all of the below packages are required:
* `ImageMagic`
* `ffmpeg` * `ImageMagic`
* `exiftool` * `ffmpeg`
* `exiftool`
Please refer to documentation in `docs/installation` on how to install them on specific OS. Please refer to documentation in `docs/installation` on how to install them on specific OS.
@ -14,20 +15,23 @@ Note: the packages are not required with the current default settings of Pleroma
`ImageMagick` is a set of tools to create, edit, compose, or convert bitmap images. `ImageMagick` is a set of tools to create, edit, compose, or convert bitmap images.
It is required for the following Pleroma features: It is required for the following Pleroma features:
* `Pleroma.Upload.Filters.Mogrify`, `Pleroma.Upload.Filters.Mogrifun` upload filters (related config: `Plaroma.Upload/filters` in `config/config.exs`)
* Media preview proxy for still images (related config: `media_preview_proxy/enabled` in `config/config.exs`) * `Pleroma.Upload.Filters.Mogrify`, `Pleroma.Upload.Filters.Mogrifun` upload filters (related config: `Plaroma.Upload/filters` in `config/config.exs`)
* Media preview proxy for still images (related config: `media_preview_proxy/enabled` in `config/config.exs`)
## `ffmpeg` ## `ffmpeg`
`ffmpeg` is software to record, convert and stream audio and video. `ffmpeg` is software to record, convert and stream audio and video.
It is required for the following Pleroma features: It is required for the following Pleroma features:
* Media preview proxy for videos (related config: `media_preview_proxy/enabled` in `config/config.exs`)
* Media preview proxy for videos (related config: `media_preview_proxy/enabled` in `config/config.exs`)
## `exiftool` ## `exiftool`
`exiftool` is media files metadata reader/writer. `exiftool` is media files metadata reader/writer.
It is required for the following Pleroma features: It is required for the following Pleroma features:
* `Pleroma.Upload.Filters.Exiftool.StripLocation` upload filter (related config: `Plaroma.Upload/filters` in `config/config.exs`)
* `Pleroma.Upload.Filters.Exiftool.ReadDescription` upload filter (related config: `Plaroma.Upload/filters` in `config/config.exs`) * `Pleroma.Upload.Filters.Exiftool.StripLocation` upload filter (related config: `Plaroma.Upload/filters` in `config/config.exs`)
* `Pleroma.Upload.Filters.Exiftool.ReadDescription` upload filter (related config: `Plaroma.Upload/filters` in `config/config.exs`)

View File

@ -2,15 +2,16 @@
{! backend/installation/otp_vs_from_source.include !} {! backend/installation/otp_vs_from_source.include !}
This guide covers a installation using an OTP release. To install Pleroma from source, please check out the corresponding guide for your distro. This guide covers a installation using OTP releases as built by the Pleroma project, it is meant as a fallback to distribution packages/recipes which are the preferred installation method.
To install Pleroma from source, please check out the corresponding guide for your distro.
## Pre-requisites ## Pre-requisites
* A machine running Linux with GNU (e.g. Debian, Ubuntu) or musl (e.g. Alpine) libc and `x86_64`, `aarch64` or `armv7l` CPU, you have root access to. If you are not sure if it's compatible see [Detecting flavour section](#detecting-flavour) below * A machine you have root access to running Debian GNU/Linux or compatible (eg. Ubuntu), or Alpine on `x86_64`, `aarch64` or `armv7l` CPU. If you are not sure what you are running see [Detecting flavour section](#detecting-flavour) below
* A (sub)domain pointed to the machine * A (sub)domain pointed to the machine
You will be running commands as root. If you aren't root already, please elevate your priviledges by executing `sudo su`/`su`. You will be running commands as root. If you aren't root already, please elevate your privileges by executing `sudo -i`/`su`.
While in theory OTP releases are possbile to install on any compatible machine, for the sake of simplicity this guide focuses only on Debian/Ubuntu and Alpine. Similarly to other binaries, OTP releases tend to be only compatible with the distro they are built on, as such this guide focuses only on Debian/Ubuntu and Alpine.
### Detecting flavour ### Detecting flavour
@ -19,7 +20,7 @@ Paste the following into the shell:
arch="$(uname -m)";if [ "$arch" = "x86_64" ];then arch="amd64";elif [ "$arch" = "armv7l" ];then arch="arm";elif [ "$arch" = "aarch64" ];then arch="arm64";else echo "Unsupported arch: $arch">&2;fi;if getconf GNU_LIBC_VERSION>/dev/null;then libc_postfix="";elif [ "$(ldd 2>&1|head -c 9)" = "musl libc" ];then libc_postfix="-musl";elif [ "$(find /lib/libc.musl*|wc -l)" ];then libc_postfix="-musl";else echo "Unsupported libc">&2;fi;echo "$arch$libc_postfix" arch="$(uname -m)";if [ "$arch" = "x86_64" ];then arch="amd64";elif [ "$arch" = "armv7l" ];then arch="arm";elif [ "$arch" = "aarch64" ];then arch="arm64";else echo "Unsupported arch: $arch">&2;fi;if getconf GNU_LIBC_VERSION>/dev/null;then libc_postfix="";elif [ "$(ldd 2>&1|head -c 9)" = "musl libc" ];then libc_postfix="-musl";elif [ "$(find /lib/libc.musl*|wc -l)" ];then libc_postfix="-musl";else echo "Unsupported libc">&2;fi;echo "$arch$libc_postfix"
``` ```
If your platform is supported the output will contain the flavour string, you will need it later. If not, this just means that we don't build releases for your platform, you can still try installing from source. This should give your flavour string. If not this just means that we don't build releases for your platform, you can still try installing from source.
### Installing the required packages ### Installing the required packages
@ -114,13 +115,13 @@ adduser --system --shell /bin/false --home /opt/pleroma pleroma
export FLAVOUR="amd64-musl" export FLAVOUR="amd64-musl"
# Clone the release build into a temporary directory and unpack it # Clone the release build into a temporary directory and unpack it
su pleroma -s $SHELL -lc " sudo -Hu pleroma "
curl 'https://git.pleroma.social/api/v4/projects/2/jobs/artifacts/stable/download?job=$FLAVOUR' -o /tmp/pleroma.zip curl 'https://git.pleroma.social/api/v4/projects/2/jobs/artifacts/stable/download?job=$FLAVOUR' -o /tmp/pleroma.zip
unzip /tmp/pleroma.zip -d /tmp/ unzip /tmp/pleroma.zip -d /tmp/
" "
# Move the release to the home directory and delete temporary files # Move the release to the home directory and delete temporary files
su pleroma -s $SHELL -lc " sudo -Hu pleroma "
mv /tmp/release/* /opt/pleroma mv /tmp/release/* /opt/pleroma
rmdir /tmp/release rmdir /tmp/release
rm /tmp/pleroma.zip rm /tmp/pleroma.zip
@ -141,25 +142,25 @@ mkdir -p /etc/pleroma
chown -R pleroma /etc/pleroma chown -R pleroma /etc/pleroma
# Run the config generator # Run the config generator
su pleroma -s $SHELL -lc "./bin/pleroma_ctl instance gen --output /etc/pleroma/config.exs --output-psql /tmp/setup_db.psql" sudo -Hu pleroma "./bin/pleroma_ctl instance gen --output /etc/pleroma/config.exs --output-psql /tmp/setup_db.psql"
# Create the postgres database # Create the postgres database
su postgres -s $SHELL -lc "psql -f /tmp/setup_db.psql" sudo -u postgres -s $SHELL -lc "psql -f /tmp/setup_db.psql"
# Create the database schema # Create the database schema
su pleroma -s $SHELL -lc "./bin/pleroma_ctl migrate" sudo -Hu pleroma "./bin/pleroma_ctl migrate"
# If you have installed RUM indexes uncommend and run # If you have installed RUM indexes uncommend and run
# su pleroma -s $SHELL -lc "./bin/pleroma_ctl migrate --migrations-path priv/repo/optional_migrations/rum_indexing/" # sudo -Hu pleroma "./bin/pleroma_ctl migrate --migrations-path priv/repo/optional_migrations/rum_indexing/"
# Start the instance to verify that everything is working as expected # Start the instance to verify that everything is working as expected
su pleroma -s $SHELL -lc "./bin/pleroma daemon" sudo -Hu pleroma "./bin/pleroma daemon"
# Wait for about 20 seconds and query the instance endpoint, if it shows your uri, name and email correctly, you are configured correctly # Wait for about 20 seconds and query the instance endpoint, if it shows your uri, name and email correctly, you are configured correctly
sleep 20 && curl http://localhost:4000/api/v1/instance sleep 20 && curl http://localhost:4000/api/v1/instance
# Stop the instance # Stop the instance
su pleroma -s $SHELL -lc "./bin/pleroma stop" sudo -Hu pleroma "./bin/pleroma stop"
``` ```
### Setting up nginx and getting Let's Encrypt SSL certificaties ### Setting up nginx and getting Let's Encrypt SSL certificaties
@ -197,6 +198,10 @@ $EDITOR path-to-nginx-config
# Verify that the config is valid # Verify that the config is valid
nginx -t nginx -t
``` ```
#### (Strongly recommended) serve media on another domain
Refer to the [Hardening your instance](../configuration/hardening.md) document on how to serve media on another domain. We STRONGLY RECOMMEND you to do this to minimize attack vectors.
#### Start nginx #### Start nginx
=== "Alpine" === "Alpine"

View File

@ -1,3 +1,8 @@
## OTP releases vs from-source installations ## Packaged (OTP) installation vs Manual (from-source) installations
There are two ways to install Pleroma. You can use OTP releases or do a from-source installation. OTP releases are as close as you can get to binary releases with Erlang/Elixir. The release is self-contained, and provides everything needed to boot it, it is easily administered via the provided shell script to open up a remote console, start/stop/restart the release, start in the background, send remote commands, and more. With from source installations you install Pleroma from source, meaning you have to install certain dependencies like Erlang+Elixir and compile Pleroma yourself. There is multiple ways to install Pleroma.
<dl>
<dt>Distro-provided packages</dt><dd>This is the recommended method, where you can get the strongest compatibility guarantees and the best dependency-management</dd>
<dt>Pleroma-provided OTP binaries</dt><dd>Intended as fallback for Alpine/Debian-compatible systems lacking a proper Pleroma package, they are heavier than proper distro packages as they also contain Erlang/Elixir and can break after system updates</dd>
<dt>Manual from-source installation</dt><dd>Needs build-dependencies to be installed and manual updates+rebuilds. Allows for easier source-customisations.</dd>
</dl>

View File

@ -1,3 +1,3 @@
{! backend/installation/otp_vs_from_source.include !} {! backend/installation/otp_vs_from_source.include !}
This guide covers a from-source installation. To install using OTP releases, please check out [the OTP guide](./otp_en.md). This guide covers a manual from-source installation. To install using OTP releases, please check for the presence of a distro package, failing that you can use [Pleroma-provided OTP binaries](./otp_en.md).

View File

@ -8,6 +8,7 @@ pidfile="/var/run/pleroma.pid"
directory=/opt/pleroma directory=/opt/pleroma
healthcheck_delay=60 healthcheck_delay=60
healthcheck_timer=30 healthcheck_timer=30
no_new_privs="yes"
: ${pleroma_port:-4000} : ${pleroma_port:-4000}

View File

@ -0,0 +1,97 @@
# This file is for those who want to serve uploaded media and media proxy over
# another domain. This is STRONGLY RECOMMENDED.
# This is meant to be used ALONG WITH `pleroma.nginx`.
# If this is a new instance, replace the `location ~ ^/(media|proxy)` section in
# `pleroma.nginx` with the following to completely disable access to media from the main domain:
# location ~ ^/(media|proxy) {
# return 404;
# }
#
# If you are configuring an existing instance to use another domain
# for media, you will want to keep redirecting all existing local media to the new domain
# so already-uploaded media will not break.
# Replace the `location ~ ^/(media|proxy)` section in `pleroma.nginx` with the following:
#
# location /media {
# return 301 https://some.other.domain$request_uri;
# }
#
# location /proxy {
# return 404;
# }
server {
server_name some.other.domain;
listen 80;
listen [::]:80;
# Uncomment this if you need to use the 'webroot' method with certbot. Make sure
# that the directory exists and that it is accessible by the webserver. If you followed
# the guide, you already ran 'mkdir -p /var/lib/letsencrypt' to create the folder.
# You may need to load this file with the ssl server block commented out, run certbot
# to get the certificate, and then uncomment it.
#
# location ~ /\.well-known/acme-challenge {
# root /var/lib/letsencrypt/;
# }
location / {
return 301 https://$server_name$request_uri;
}
}
server {
server_name some.other.domain;
listen 443 ssl http2;
listen [::]:443 ssl http2;
ssl_session_timeout 1d;
ssl_session_cache shared:MozSSL:10m; # about 40000 sessions
ssl_session_tickets off;
ssl_trusted_certificate /etc/letsencrypt/live/some.other.domain/chain.pem;
ssl_certificate /etc/letsencrypt/live/some.other.domain/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/some.other.domain/privkey.pem;
ssl_protocols TLSv1.2 TLSv1.3;
ssl_ciphers "ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:!aNULL:!eNULL:!EXPORT:!DES:!MD5:!PSK:!RC4";
ssl_prefer_server_ciphers off;
# In case of an old server with an OpenSSL version of 1.0.2 or below,
# leave only prime256v1 or comment out the following line.
ssl_ecdh_curve X25519:prime256v1:secp384r1:secp521r1;
ssl_stapling on;
ssl_stapling_verify on;
gzip_vary on;
gzip_proxied any;
gzip_comp_level 6;
gzip_buffers 16 8k;
gzip_http_version 1.1;
gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript application/activity+json application/atom+xml;
# the nginx default is 1m, not enough for large media uploads
client_max_body_size 16m;
ignore_invalid_headers off;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host $http_host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
location / { return 404; }
location ~ ^/(media|proxy) {
proxy_cache pleroma_media_cache;
slice 1m;
proxy_cache_key $host$uri$is_args$args$slice_range;
proxy_set_header Range $slice_range;
proxy_cache_valid 200 206 301 304 1h;
proxy_cache_lock on;
proxy_ignore_client_abort on;
proxy_buffering on;
chunked_transfer_encoding on;
proxy_pass http://phoenix;
}
}

View File

@ -266,12 +266,20 @@ def run(["gen" | rest]) do
config_dir = Path.dirname(config_path) config_dir = Path.dirname(config_path)
psql_dir = Path.dirname(psql_path) psql_dir = Path.dirname(psql_path)
# Note: Distros requiring group read (0o750) on those directories should
# pre-create the directories.
[config_dir, psql_dir, static_dir, uploads_dir] [config_dir, psql_dir, static_dir, uploads_dir]
|> Enum.reject(&File.exists?/1) |> Enum.reject(&File.exists?/1)
|> Enum.map(&File.mkdir_p!/1) |> Enum.each(fn dir ->
File.mkdir_p!(dir)
File.chmod!(dir, 0o700)
end)
shell_info("Writing config to #{config_path}.") shell_info("Writing config to #{config_path}.")
# Sadly no fchmod(2) equivalent in Elixir…
File.touch!(config_path)
File.chmod!(config_path, 0o640)
File.write(config_path, result_config) File.write(config_path, result_config)
shell_info("Writing the postgres script to #{psql_path}.") shell_info("Writing the postgres script to #{psql_path}.")
File.write(psql_path, result_psql) File.write(psql_path, result_psql)
@ -290,8 +298,7 @@ def run(["gen" | rest]) do
else else
shell_error( shell_error(
"The task would have overwritten the following files:\n" <> "The task would have overwritten the following files:\n" <>
(Enum.map(will_overwrite, &"- #{&1}\n") |> Enum.join("")) <> Enum.map_join(will_overwrite, &"- #{&1}\n") <> "Rerun with `--force` to overwrite them."
"Rerun with `--force` to overwrite them."
) )
end end
end end

View File

@ -6,7 +6,70 @@ defmodule Mix.Tasks.Pleroma.OpenapiSpec do
def run([path]) do def run([path]) do
# Load Pleroma application to get version info # Load Pleroma application to get version info
Application.load(:pleroma) Application.load(:pleroma)
spec = Pleroma.Web.ApiSpec.spec(server_specific: false) |> Jason.encode!()
File.write(path, spec) spec_json = Pleroma.Web.ApiSpec.spec(server_specific: false) |> Jason.encode!()
# to get rid of the structs
spec_regened = spec_json |> Jason.decode!()
check_specs!(spec_regened)
File.write(path, spec_json)
end
defp check_specs!(spec) do
with :ok <- check_specs(spec) do
:ok
else
{_, errors} ->
IO.puts(IO.ANSI.format([:red, :bright, "Spec check failed, errors:"]))
Enum.map(errors, &IO.puts/1)
raise "Spec check failed"
end
end
def check_specs(spec) do
errors =
spec["paths"]
|> Enum.flat_map(fn {path, %{} = endpoints} ->
Enum.map(
endpoints,
fn {method, endpoint} ->
with :ok <- check_endpoint(spec, endpoint) do
:ok
else
error ->
"#{endpoint["operationId"]} (#{method} #{path}): #{error}"
end
end
)
|> Enum.reject(fn res -> res == :ok end)
end)
if errors == [] do
:ok
else
{:error, errors}
end
end
defp check_endpoint(spec, endpoint) do
valid_tags = available_tags(spec)
with {_, [_ | _] = tags} <- {:tags, endpoint["tags"]},
{_, []} <- {:unavailable, Enum.reject(tags, &(&1 in valid_tags))} do
:ok
else
{:tags, _} ->
"No tags specified"
{:unavailable, tags} ->
"Tags #{inspect(tags)} not available. Please add it in \"x-tagGroups\" in Pleroma.Web.ApiSpec"
end
end
defp available_tags(spec) do
spec["x-tagGroups"]
|> Enum.flat_map(fn %{"tags" => tags} -> tags end)
end end
end end

View File

@ -209,7 +209,8 @@ defp cachex_children do
build_cachex("chat_message_id_idempotency_key", build_cachex("chat_message_id_idempotency_key",
expiration: chat_message_id_idempotency_key_expiration(), expiration: chat_message_id_idempotency_key_expiration(),
limit: 500_000 limit: 500_000
) ),
build_cachex("rel_me", limit: 2500)
] ]
end end

View File

@ -1,20 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.BBS.Authenticator do
use Sshd.PasswordAuthenticator
alias Pleroma.User
alias Pleroma.Web.Plugs.AuthenticationPlug
def authenticate(username, password) do
username = to_string(username)
password = to_string(password)
with %User{} = user <- User.get_by_nickname(username) do
AuthenticationPlug.checkpw(password, user.password_hash)
else
_e -> false
end
end
end

View File

@ -1,246 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.BBS.Handler do
use Sshd.ShellHandler
alias Pleroma.Activity
alias Pleroma.HTML
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.CommonAPI
def on_shell(username, _pubkey, _ip, _port) do
:ok = IO.puts("Welcome to #{Pleroma.Config.get([:instance, :name])}!")
user = Pleroma.User.get_cached_by_nickname(to_string(username))
Logger.debug("#{inspect(user)}")
loop(run_state(user: user))
end
def on_connect(username, ip, port, method) do
Logger.debug(fn ->
"""
Incoming SSH shell #{inspect(self())} requested for #{username} from #{inspect(ip)}:#{inspect(port)} using #{inspect(method)}
"""
end)
end
def on_disconnect(username, ip, port) do
Logger.debug(fn ->
"Disconnecting SSH shell for #{username} from #{inspect(ip)}:#{inspect(port)}"
end)
end
defp loop(state) do
self_pid = self()
counter = state.counter
prefix = state.prefix
user = state.user
input = spawn(fn -> io_get(self_pid, prefix, counter, user.nickname) end)
wait_input(state, input)
end
def puts_activity(activity) do
status = Pleroma.Web.MastodonAPI.StatusView.render("show.json", %{activity: activity})
IO.puts("-- #{status.id} by #{status.account.display_name} (#{status.account.acct})")
status.content
|> String.split("<br/>")
|> Enum.map(&HTML.strip_tags/1)
|> Enum.map(&HtmlEntities.decode/1)
|> Enum.map(&IO.puts/1)
end
def puts_notification(activity, user) do
notification =
Pleroma.Web.MastodonAPI.NotificationView.render("show.json", %{
notification: activity,
for: user
})
IO.puts(
"== (#{notification.type}) #{notification.status.id} by #{notification.account.display_name} (#{notification.account.acct})"
)
notification.status.content
|> String.split("<br/>")
|> Enum.map(&HTML.strip_tags/1)
|> Enum.map(&HtmlEntities.decode/1)
|> (fn x ->
case x do
[content] ->
"> " <> content
[head | _tail] ->
# "> " <> hd <> "..."
head
|> String.slice(1, 80)
|> (fn x -> "> " <> x <> "..." end).()
end
end).()
|> IO.puts()
IO.puts("")
end
def handle_command(state, "help") do
IO.puts("Available commands:")
IO.puts("help - This help")
IO.puts("home - Show the home timeline")
IO.puts("p <text> - Post the given text")
IO.puts("r <id> <text> - Reply to the post with the given id")
IO.puts("t <id> - Show a thread from the given id")
IO.puts("n - Show notifications")
IO.puts("n read - Mark all notifactions as read")
IO.puts("f <id> - Favourites the post with the given id")
IO.puts("R <id> - Repeat the post with the given id")
IO.puts("quit - Quit")
state
end
def handle_command(%{user: user} = state, "r " <> text) do
text = String.trim(text)
[activity_id, rest] = String.split(text, " ", parts: 2)
with %Activity{} <- Activity.get_by_id(activity_id),
{:ok, _activity} <-
CommonAPI.post(user, %{status: rest, in_reply_to_status_id: activity_id}) do
IO.puts("Replied!")
else
_e -> IO.puts("Could not reply...")
end
state
end
def handle_command(%{user: user} = state, "t " <> activity_id) do
with %Activity{} = activity <- Activity.get_by_id(activity_id) do
activities =
ActivityPub.fetch_activities_for_context(activity.data["context"], %{
blocking_user: user,
user: user,
exclude_id: activity.id
})
case activities do
[] ->
activity_id
|> Activity.get_by_id()
|> puts_activity()
_ ->
activities
|> Enum.reverse()
|> Enum.each(&puts_activity/1)
end
else
_e -> IO.puts("Could not show this thread...")
end
state
end
def handle_command(%{user: user} = state, "n read") do
Pleroma.Notification.clear(user)
IO.puts("All notifications were marked as read")
state
end
def handle_command(%{user: user} = state, "n") do
user
|> Pleroma.Web.MastodonAPI.MastodonAPI.get_notifications(%{})
|> Enum.each(&puts_notification(&1, user))
state
end
def handle_command(%{user: user} = state, "p " <> text) do
text = String.trim(text)
with {:ok, activity} <- CommonAPI.post(user, %{status: text}) do
IO.puts("Posted! ID: #{activity.id}")
else
_e -> IO.puts("Could not post...")
end
state
end
def handle_command(%{user: user} = state, "f " <> id) do
id = String.trim(id)
with %Activity{} = activity <- Activity.get_by_id(id),
{:ok, _activity} <- CommonAPI.favorite(user, activity) do
IO.puts("Favourited!")
else
_e -> IO.puts("Could not Favourite...")
end
state
end
def handle_command(state, "home") do
user = state.user
params =
%{}
|> Map.put(:type, ["Create"])
|> Map.put(:blocking_user, user)
|> Map.put(:muting_user, user)
|> Map.put(:user, user)
activities =
[user.ap_id | Pleroma.User.following(user)]
|> ActivityPub.fetch_activities(params)
Enum.each(activities, fn activity ->
puts_activity(activity)
end)
state
end
def handle_command(state, command) do
IO.puts("Unknown command '#{command}'")
state
end
defp wait_input(state, input) do
receive do
{:input, ^input, "quit\n"} ->
IO.puts("Exiting...")
{:input, ^input, code} when is_binary(code) ->
code = String.trim(code)
state = handle_command(state, code)
loop(%{state | counter: state.counter + 1})
{:input, ^input, {:error, :interrupted}} ->
IO.puts("Caught Ctrl+C...")
loop(%{state | counter: state.counter + 1})
{:input, ^input, msg} ->
:ok = Logger.warn("received unknown message: #{inspect(msg)}")
loop(%{state | counter: state.counter + 1})
end
end
defp run_state(opts) do
%{prefix: "pleroma", counter: 1, user: opts[:user]}
end
defp io_get(pid, prefix, counter, username) do
prompt = prompt(prefix, counter, username)
send(pid, {:input, self(), IO.gets(:stdio, prompt)})
end
defp prompt(prefix, counter, username) do
prompt = "#{username}@#{prefix}:#{counter}>"
prompt <> " "
end
end

View File

@ -20,6 +20,20 @@ def load(config, opts) do
with_runtime_config = with_runtime_config =
if File.exists?(config_path) do if File.exists?(config_path) do
# <https://git.pleroma.social/pleroma/pleroma/-/issues/3135>
%File.Stat{mode: mode} = File.lstat!(config_path)
if Bitwise.band(mode, 0o007) > 0 do
raise "Configuration at #{config_path} has world-permissions, execute the following: chmod o= #{config_path}"
end
if Bitwise.band(mode, 0o020) > 0 do
raise "Configuration at #{config_path} has group-wise write permissions, execute the following: chmod g-w #{config_path}"
end
# Note: Elixir doesn't provides a getuid(2)
# so cannot forbid group-read only when config is owned by us
runtime_config = Config.Reader.read!(config_path) runtime_config = Config.Reader.read!(config_path)
with_defaults with_defaults

View File

@ -42,6 +42,18 @@ defmodule Pleroma.Constants do
] ]
) )
const(status_object_types,
do: [
"Note",
"Question",
"Audio",
"Video",
"Event",
"Article",
"Page"
]
)
const(updatable_object_types, const(updatable_object_types,
do: [ do: [
"Note", "Note",
@ -69,4 +81,21 @@ defmodule Pleroma.Constants do
const(mime_regex, const(mime_regex,
do: ~r/^[^[:cntrl:] ()<>@,;:\\"\/\[\]?=]+\/[^[:cntrl:] ()<>@,;:\\"\/\[\]?=]+(; .*)?$/ do: ~r/^[^[:cntrl:] ()<>@,;:\\"\/\[\]?=]+\/[^[:cntrl:] ()<>@,;:\\"\/\[\]?=]+(; .*)?$/
) )
const(upload_object_types, do: ["Document", "Image"])
const(activity_json_canonical_mime_type,
do: "application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\""
)
const(activity_json_mime_types,
do: [
"application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\"",
"application/activity+json"
]
)
const(public_streams,
do: ["public", "public:local", "public:media", "public:local:media"]
)
end end

View File

@ -27,3 +27,11 @@
failed: 4, failed: 4,
manual: 5 manual: 5
) )
defenum(Pleroma.User.Backup.State,
pending: 1,
running: 2,
complete: 3,
failed: 4,
invalid: 5
)

View File

@ -0,0 +1,23 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.BareUri do
use Ecto.Type
def type, do: :string
def cast(uri) when is_binary(uri) do
case URI.parse(uri) do
%URI{scheme: nil} -> :error
%URI{} -> {:ok, uri}
_ -> :error
end
end
def cast(_), do: :error
def dump(data), do: {:ok, data}
def load(data), do: {:ok, data}
end

View File

@ -51,6 +51,8 @@ def reload do
@doc "Returns the path of the emoji `name`." @doc "Returns the path of the emoji `name`."
@spec get(String.t()) :: String.t() | nil @spec get(String.t()) :: String.t() | nil
def get(name) do def get(name) do
name = maybe_strip_name(name)
case :ets.lookup(@ets, name) do case :ets.lookup(@ets, name) do
[{_, path}] -> path [{_, path}] -> path
_ -> nil _ -> nil
@ -139,6 +141,57 @@ def is_unicode_emoji?(unquote(emoji)), do: true
def is_unicode_emoji?(_), do: false def is_unicode_emoji?(_), do: false
@emoji_regex ~r/:[A-Za-z0-9_-]+(@.+)?:/
def is_custom_emoji?(s) when is_binary(s), do: Regex.match?(@emoji_regex, s)
def is_custom_emoji?(_), do: false
def maybe_strip_name(name) when is_binary(name), do: String.trim(name, ":")
def maybe_strip_name(name), do: name
def maybe_quote(name) when is_binary(name) do
if is_unicode_emoji?(name) do
name
else
if String.starts_with?(name, ":") do
name
else
":#{name}:"
end
end
end
def maybe_quote(name), do: name
def emoji_url(%{"type" => "EmojiReact", "content" => _, "tag" => []}), do: nil
def emoji_url(%{"type" => "EmojiReact", "content" => emoji, "tag" => tags}) do
emoji = maybe_strip_name(emoji)
tag =
tags
|> Enum.find(fn tag ->
tag["type"] == "Emoji" && !is_nil(tag["name"]) && tag["name"] == emoji
end)
if is_nil(tag) do
nil
else
tag
|> Map.get("icon")
|> Map.get("url")
end
end
def emoji_url(_), do: nil
def emoji_name_with_instance(name, url) do
url = url |> URI.parse() |> Map.get(:host)
"#{name}@#{url}"
end
emoji_qualification_map = emoji_qualification_map =
emojis emojis
|> Enum.filter(&String.contains?(&1, "\uFE0F")) |> Enum.filter(&String.contains?(&1, "\uFE0F"))

View File

@ -285,6 +285,7 @@ def update_metadata(name, data) do
@spec load_pack(String.t()) :: {:ok, t()} | {:error, :file.posix()} @spec load_pack(String.t()) :: {:ok, t()} | {:error, :file.posix()}
def load_pack(name) do def load_pack(name) do
name = Path.basename(name)
pack_file = Path.join([emoji_path(), name, "pack.json"]) pack_file = Path.join([emoji_path(), name, "pack.json"])
with {:ok, _} <- File.stat(pack_file), with {:ok, _} <- File.stat(pack_file),

View File

@ -124,7 +124,7 @@ def mentions_escape(text, options \\ []) do
end end
def markdown_to_html(text) do def markdown_to_html(text) do
Earmark.as_html!(text, %Earmark.Options{compact_output: true}) Earmark.as_html!(text, %Earmark.Options{compact_output: true, smartypants: false})
end end
def html_escape({text, mentions, hashtags}, type) do def html_escape({text, mentions, hashtags}, type) do

View File

@ -7,6 +7,7 @@ defmodule Pleroma.Instances.Instance do
alias Pleroma.Instances alias Pleroma.Instances
alias Pleroma.Instances.Instance alias Pleroma.Instances.Instance
alias Pleroma.Maps
alias Pleroma.Repo alias Pleroma.Repo
alias Pleroma.User alias Pleroma.User
alias Pleroma.Workers.BackgroundWorker alias Pleroma.Workers.BackgroundWorker
@ -24,6 +25,14 @@ defmodule Pleroma.Instances.Instance do
field(:favicon, :string) field(:favicon, :string)
field(:favicon_updated_at, :naive_datetime) field(:favicon_updated_at, :naive_datetime)
embeds_one :metadata, Pleroma.Instances.Metadata, primary_key: false do
field(:software_name, :string)
field(:software_version, :string)
field(:software_repository, :string)
end
field(:metadata_updated_at, :utc_datetime)
timestamps() timestamps()
end end
@ -31,11 +40,17 @@ defmodule Pleroma.Instances.Instance do
def changeset(struct, params \\ %{}) do def changeset(struct, params \\ %{}) do
struct struct
|> cast(params, [:host, :unreachable_since, :favicon, :favicon_updated_at]) |> cast(params, __schema__(:fields) -- [:metadata])
|> cast_embed(:metadata, with: &metadata_changeset/2)
|> validate_required([:host]) |> validate_required([:host])
|> unique_constraint(:host) |> unique_constraint(:host)
end end
def metadata_changeset(struct, params \\ %{}) do
struct
|> cast(params, [:software_name, :software_version, :software_repository])
end
def filter_reachable([]), do: %{} def filter_reachable([]), do: %{}
def filter_reachable(urls_or_hosts) when is_list(urls_or_hosts) do def filter_reachable(urls_or_hosts) when is_list(urls_or_hosts) do
@ -198,6 +213,89 @@ defp scrape_favicon(%URI{} = instance_uri) do
end end
end end
def get_or_update_metadata(%URI{host: host} = instance_uri) do
existing_record = Repo.get_by(Instance, %{host: host})
now = NaiveDateTime.utc_now()
if existing_record && existing_record.metadata_updated_at &&
NaiveDateTime.diff(now, existing_record.metadata_updated_at) < 86_400 do
existing_record.metadata
else
metadata = scrape_metadata(instance_uri)
if existing_record do
existing_record
|> changeset(%{metadata: metadata, metadata_updated_at: now})
|> Repo.update()
else
%Instance{}
|> changeset(%{host: host, metadata: metadata, metadata_updated_at: now})
|> Repo.insert()
end
metadata
end
end
defp get_nodeinfo_uri(well_known) do
links = Map.get(well_known, "links", [])
nodeinfo21 =
Enum.find(links, &(&1["rel"] == "http://nodeinfo.diaspora.software/ns/schema/2.1"))["href"]
nodeinfo20 =
Enum.find(links, &(&1["rel"] == "http://nodeinfo.diaspora.software/ns/schema/2.0"))["href"]
cond do
is_binary(nodeinfo21) -> {:ok, nodeinfo21}
is_binary(nodeinfo20) -> {:ok, nodeinfo20}
true -> {:error, :no_links}
end
end
defp scrape_metadata(%URI{} = instance_uri) do
try do
with {_, true} <- {:reachable, reachable?(instance_uri.host)},
{:ok, %Tesla.Env{body: well_known_body}} <-
instance_uri
|> URI.merge("/.well-known/nodeinfo")
|> to_string()
|> Pleroma.HTTP.get([{"accept", "application/json"}]),
{:ok, well_known_json} <- Jason.decode(well_known_body),
{:ok, nodeinfo_uri} <- get_nodeinfo_uri(well_known_json),
{:ok, %Tesla.Env{body: nodeinfo_body}} <-
Pleroma.HTTP.get(nodeinfo_uri, [{"accept", "application/json"}]),
{:ok, nodeinfo} <- Jason.decode(nodeinfo_body) do
# Can extract more metadata from NodeInfo but need to be careful about it's size,
# can't just dump the entire thing
software = Map.get(nodeinfo, "software", %{})
%{
software_name: software["name"],
software_version: software["version"]
}
|> Maps.put_if_present(:software_repository, software["repository"])
else
{:reachable, false} ->
Logger.debug(
"Instance.scrape_metadata(\"#{to_string(instance_uri)}\") ignored unreachable host"
)
nil
_ ->
nil
end
rescue
e ->
Logger.warn(
"Instance.scrape_metadata(\"#{to_string(instance_uri)}\") error: #{inspect(e)}"
)
nil
end
end
@doc """ @doc """
Deletes all users from an instance in a background task, thus also deleting Deletes all users from an instance in a background task, thus also deleting
all of those users' activities and notifications. all of those users' activities and notifications.

View File

@ -178,6 +178,7 @@ defp exclude_filtered(query, user) do
from([_n, a, o] in query, from([_n, a, o] in query,
where: where:
fragment("not(?->>'content' ~* ?)", o.data, ^regex) or fragment("not(?->>'content' ~* ?)", o.data, ^regex) or
fragment("?->>'content' is null", o.data) or
fragment("?->>'actor' = ?", o.data, ^user.ap_id) fragment("?->>'actor' = ?", o.data, ^user.ap_id)
) )
end end
@ -338,14 +339,6 @@ def destroy_multiple(%{id: user_id} = _user, ids) do
|> Repo.delete_all() |> Repo.delete_all()
end end
def destroy_multiple_from_types(%{id: user_id}, types) do
from(n in Notification,
where: n.user_id == ^user_id,
where: n.type in ^types
)
|> Repo.delete_all()
end
def dismiss(%Pleroma.Activity{} = activity) do def dismiss(%Pleroma.Activity{} = activity) do
Notification Notification
|> where([n], n.activity_id == ^activity.id) |> where([n], n.activity_id == ^activity.id)
@ -559,7 +552,9 @@ def get_potential_receiver_ap_ids(%{data: %{"type" => "Follow", "object" => obje
end end
def get_potential_receiver_ap_ids(%{data: %{"type" => "Flag", "actor" => actor}}) do def get_potential_receiver_ap_ids(%{data: %{"type" => "Flag", "actor" => actor}}) do
(User.all_superusers() |> Enum.map(fn user -> user.ap_id end)) -- [actor] (User.all_users_with_privilege(:reports_manage_reports)
|> Enum.map(fn user -> user.ap_id end)) --
[actor]
end end
# Update activity: notify all who repeated this # Update activity: notify all who repeated this
@ -685,7 +680,7 @@ def skip?(
cond do cond do
opts[:type] == "poll" -> false opts[:type] == "poll" -> false
user.ap_id == actor -> false user.ap_id == actor -> false
!User.following?(follower, user) -> true !User.following?(user, follower) -> true
true -> false true -> false
end end
end end

View File

@ -425,4 +425,30 @@ def object_data_hashtags(%{"tag" => tags}) when is_list(tags) do
end end
def object_data_hashtags(_), do: [] def object_data_hashtags(_), do: []
def get_emoji_reactions(object) do
reactions = object.data["reactions"]
if is_list(reactions) or is_map(reactions) do
reactions
|> Enum.map(fn
[_emoji, users, _maybe_url] = item when is_list(users) ->
item
[emoji, users] when is_list(users) ->
[emoji, users, nil]
# This case is here to process the Map situation, which will happen
# only with the legacy two-value format.
{emoji, users} when is_list(users) ->
[emoji, users, nil]
_ ->
nil
end)
|> Enum.reject(&is_nil/1)
else
[]
end
end
end end

View File

@ -8,77 +8,30 @@ defmodule Pleroma.Object.Fetcher do
alias Pleroma.Maps alias Pleroma.Maps
alias Pleroma.Object alias Pleroma.Object
alias Pleroma.Object.Containment alias Pleroma.Object.Containment
alias Pleroma.Repo
alias Pleroma.Signature alias Pleroma.Signature
alias Pleroma.Web.ActivityPub.InternalFetchActor alias Pleroma.Web.ActivityPub.InternalFetchActor
alias Pleroma.Web.ActivityPub.MRF
alias Pleroma.Web.ActivityPub.ObjectValidator alias Pleroma.Web.ActivityPub.ObjectValidator
alias Pleroma.Web.ActivityPub.Pipeline
alias Pleroma.Web.ActivityPub.Transmogrifier alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.Federator alias Pleroma.Web.Federator
require Logger require Logger
require Pleroma.Constants require Pleroma.Constants
defp touch_changeset(changeset) do
updated_at =
NaiveDateTime.utc_now()
|> NaiveDateTime.truncate(:second)
Ecto.Changeset.put_change(changeset, :updated_at, updated_at)
end
defp maybe_reinject_internal_fields(%{data: %{} = old_data}, new_data) do
has_history? = fn
%{"formerRepresentations" => %{"orderedItems" => list}} when is_list(list) -> true
_ -> false
end
internal_fields = Map.take(old_data, Pleroma.Constants.object_internal_fields())
remote_history_exists? = has_history?.(new_data)
# If the remote history exists, we treat that as the only source of truth.
new_data =
if has_history?.(old_data) and not remote_history_exists? do
Map.put(new_data, "formerRepresentations", old_data["formerRepresentations"])
else
new_data
end
# If the remote does not have history information, we need to manage it ourselves
new_data =
if not remote_history_exists? do
changed? =
Pleroma.Constants.status_updatable_fields()
|> Enum.any?(fn field -> Map.get(old_data, field) != Map.get(new_data, field) end)
%{updated_object: updated_object} =
new_data
|> Object.Updater.maybe_update_history(old_data,
updated: changed?,
use_history_in_new_object?: false
)
updated_object
else
new_data
end
Map.merge(new_data, internal_fields)
end
defp maybe_reinject_internal_fields(_, new_data), do: new_data
@spec reinject_object(struct(), map()) :: {:ok, Object.t()} | {:error, any()} @spec reinject_object(struct(), map()) :: {:ok, Object.t()} | {:error, any()}
defp reinject_object(%Object{data: %{"type" => "Question"}} = object, new_data) do defp reinject_object(%Object{data: %{}} = object, new_data) do
Logger.debug("Reinjecting object #{new_data["id"]}") Logger.debug("Reinjecting object #{new_data["id"]}")
with data <- maybe_reinject_internal_fields(object, new_data), with {:ok, new_data, _} <- ObjectValidator.validate(new_data, %{}),
{:ok, data, _} <- ObjectValidator.validate(data, %{}), {:ok, new_data} <- MRF.filter(new_data),
changeset <- Object.change(object, %{data: data}), {:ok, new_object, _} <-
changeset <- touch_changeset(changeset), Object.Updater.do_update_and_invalidate_cache(
{:ok, object} <- Repo.insert_or_update(changeset), object,
{:ok, object} <- Object.set_cache(object) do new_data,
{:ok, object} _touch_changeset? = true
) do
{:ok, new_object}
else else
e -> e ->
Logger.error("Error while processing object: #{inspect(e)}") Logger.error("Error while processing object: #{inspect(e)}")
@ -86,20 +39,11 @@ defp reinject_object(%Object{data: %{"type" => "Question"}} = object, new_data)
end end
end end
defp reinject_object(%Object{} = object, new_data) do defp reinject_object(_, new_data) do
Logger.debug("Reinjecting object #{new_data["id"]}") with {:ok, object, _} <- Pipeline.common_pipeline(new_data, local: false) do
with new_data <- Transmogrifier.fix_object(new_data),
data <- maybe_reinject_internal_fields(object, new_data),
changeset <- Object.change(object, %{data: data}),
changeset <- touch_changeset(changeset),
{:ok, object} <- Repo.insert_or_update(changeset),
{:ok, object} <- Object.set_cache(object) do
{:ok, object} {:ok, object}
else else
e -> e -> e
Logger.error("Error while processing object: #{inspect(e)}")
{:error, e}
end end
end end

View File

@ -5,6 +5,9 @@
defmodule Pleroma.Object.Updater do defmodule Pleroma.Object.Updater do
require Pleroma.Constants require Pleroma.Constants
alias Pleroma.Object
alias Pleroma.Repo
def update_content_fields(orig_object_data, updated_object) do def update_content_fields(orig_object_data, updated_object) do
Pleroma.Constants.status_updatable_fields() Pleroma.Constants.status_updatable_fields()
|> Enum.reduce( |> Enum.reduce(
@ -97,12 +100,14 @@ def maybe_update_history(
end end
defp maybe_update_poll(to_be_updated, updated_object) do defp maybe_update_poll(to_be_updated, updated_object) do
choice_key = fn data -> choice_key = fn
if Map.has_key?(data, "anyOf"), do: "anyOf", else: "oneOf" %{"anyOf" => [_ | _]} -> "anyOf"
%{"oneOf" => [_ | _]} -> "oneOf"
_ -> nil
end end
with true <- to_be_updated["type"] == "Question", with true <- to_be_updated["type"] == "Question",
key <- choice_key.(updated_object), key when not is_nil(key) <- choice_key.(updated_object),
true <- key == choice_key.(to_be_updated), true <- key == choice_key.(to_be_updated),
orig_choices <- to_be_updated[key] |> Enum.map(&Map.drop(&1, ["replies"])), orig_choices <- to_be_updated[key] |> Enum.map(&Map.drop(&1, ["replies"])),
new_choices <- updated_object[key] |> Enum.map(&Map.drop(&1, ["replies"])), new_choices <- updated_object[key] |> Enum.map(&Map.drop(&1, ["replies"])),
@ -237,4 +242,49 @@ def do_with_history(object, fun) do
{:history_items, e} -> e {:history_items, e} -> e
end end
end end
defp maybe_touch_changeset(changeset, true) do
updated_at =
NaiveDateTime.utc_now()
|> NaiveDateTime.truncate(:second)
Ecto.Changeset.put_change(changeset, :updated_at, updated_at)
end
defp maybe_touch_changeset(changeset, _), do: changeset
def do_update_and_invalidate_cache(orig_object, updated_object, touch_changeset? \\ false) do
orig_object_ap_id = updated_object["id"]
orig_object_data = orig_object.data
%{
updated_data: updated_object_data,
updated: updated,
used_history_in_new_object?: used_history_in_new_object?
} = make_new_object_data_from_update_object(orig_object_data, updated_object)
changeset =
orig_object
|> Repo.preload(:hashtags)
|> Object.change(%{data: updated_object_data})
|> maybe_touch_changeset(touch_changeset?)
with {:ok, new_object} <- Repo.update(changeset),
{:ok, _} <- Object.invalid_object_cache(new_object),
{:ok, _} <- Object.set_cache(new_object),
# The metadata/utils.ex uses the object id for the cache.
{:ok, _} <- Pleroma.Activity.HTML.invalidate_cache_for(new_object.id) do
if used_history_in_new_object? do
with create_activity when not is_nil(create_activity) <-
Pleroma.Activity.get_create_by_object_ap_id(orig_object_ap_id),
{:ok, _} <- Pleroma.Activity.HTML.invalidate_cache_for(create_activity.id) do
nil
else
_ -> nil
end
end
{:ok, new_object, updated}
end
end
end end

View File

@ -40,7 +40,11 @@ defp with_media_attachments(
%{changes: %{params: %{"media_ids" => media_ids} = params}} = changeset %{changes: %{params: %{"media_ids" => media_ids} = params}} = changeset
) )
when is_list(media_ids) do when is_list(media_ids) do
media_attachments = Utils.attachments_from_ids(%{media_ids: media_ids}) media_attachments =
Utils.attachments_from_ids(
%{media_ids: media_ids},
User.get_cached_by_id(changeset.data.user_id)
)
params = params =
params params

View File

@ -38,9 +38,9 @@ def filter([filter | rest], upload) do
{:ok, :noop} -> {:ok, :noop} ->
filter(rest, upload) filter(rest, upload)
error -> {:error, e} ->
Logger.error("#{__MODULE__}: Filter #{filter} failed: #{inspect(error)}") Logger.error("#{__MODULE__}: Filter #{filter} failed: #{inspect(e)}")
error {:error, e}
end end
end end
end end

View File

@ -33,7 +33,10 @@ defp read_when_empty(current_description, _, _) when is_binary(current_descripti
defp read_when_empty(_, file, tag) do defp read_when_empty(_, file, tag) do
try do try do
{tag_content, 0} = {tag_content, 0} =
System.cmd("exiftool", ["-b", "-s3", tag, file], stderr_to_stdout: true, parallelism: true) System.cmd("exiftool", ["-b", "-s3", tag, file],
stderr_to_stdout: false,
parallelism: true
)
tag_content = String.trim(tag_content) tag_content = String.trim(tag_content)

View File

@ -14,6 +14,7 @@ defmodule Pleroma.Upload.Filter.Exiftool.StripLocation do
# Formats not compatible with exiftool at this time # Formats not compatible with exiftool at this time
def filter(%Pleroma.Upload{content_type: "image/heic"}), do: {:ok, :noop} def filter(%Pleroma.Upload{content_type: "image/heic"}), do: {:ok, :noop}
def filter(%Pleroma.Upload{content_type: "image/webp"}), do: {:ok, :noop} def filter(%Pleroma.Upload{content_type: "image/webp"}), do: {:ok, :noop}
def filter(%Pleroma.Upload{content_type: "image/svg" <> _}), do: {:ok, :noop}
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
try do try do

View File

@ -0,0 +1,20 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Upload.Filter.OnlyMedia do
@behaviour Pleroma.Upload.Filter
alias Pleroma.Upload
def filter(%Upload{content_type: content_type}) do
[type, _subtype] = String.split(content_type, "/")
if type in ["image", "video", "audio"] do
{:ok, :noop}
else
{:error, "Disallowed content-type: #{content_type}"}
end
end
def filter(_), do: {:ok, :noop}
end

View File

@ -124,7 +124,6 @@ defmodule Pleroma.User do
field(:domain_blocks, {:array, :string}, default: []) field(:domain_blocks, {:array, :string}, default: [])
field(:is_active, :boolean, default: true) field(:is_active, :boolean, default: true)
field(:no_rich_text, :boolean, default: false) field(:no_rich_text, :boolean, default: false)
field(:ap_enabled, :boolean, default: false)
field(:is_moderator, :boolean, default: false) field(:is_moderator, :boolean, default: false)
field(:is_admin, :boolean, default: false) field(:is_admin, :boolean, default: false)
field(:show_role, :boolean, default: true) field(:show_role, :boolean, default: true)
@ -326,7 +325,7 @@ def visible_for(%User{} = user, nil) do
end end
def visible_for(%User{} = user, for_user) do def visible_for(%User{} = user, for_user) do
if superuser?(for_user) do if privileged?(for_user, :users_manage_activation_state) do
:visible :visible
else else
visible_account_status(user) visible_account_status(user)
@ -353,10 +352,45 @@ defp visible_account_status(user) do
end end
end end
@spec superuser?(User.t()) :: boolean() @spec privileged?(User.t(), atom()) :: boolean()
def superuser?(%User{local: true, is_admin: true}), do: true def privileged?(%User{is_admin: false, is_moderator: false}, _), do: false
def superuser?(%User{local: true, is_moderator: true}), do: true
def superuser?(_), do: false def privileged?(
%User{local: true, is_admin: is_admin, is_moderator: is_moderator},
privilege_tag
),
do:
privileged_for?(privilege_tag, is_admin, :admin_privileges) or
privileged_for?(privilege_tag, is_moderator, :moderator_privileges)
def privileged?(_, _), do: false
defp privileged_for?(privilege_tag, true, config_role_key),
do: privilege_tag in Config.get([:instance, config_role_key])
defp privileged_for?(_, _, _), do: false
@spec privileges(User.t()) :: [atom()]
def privileges(%User{local: false}) do
[]
end
def privileges(%User{is_moderator: false, is_admin: false}) do
[]
end
def privileges(%User{local: true, is_moderator: true, is_admin: true}) do
(Config.get([:instance, :moderator_privileges]) ++ Config.get([:instance, :admin_privileges]))
|> Enum.uniq()
end
def privileges(%User{local: true, is_moderator: true, is_admin: false}) do
Config.get([:instance, :moderator_privileges])
end
def privileges(%User{local: true, is_moderator: false, is_admin: true}) do
Config.get([:instance, :admin_privileges])
end
@spec invisible?(User.t()) :: boolean() @spec invisible?(User.t()) :: boolean()
def invisible?(%User{invisible: true}), do: true def invisible?(%User{invisible: true}), do: true
@ -453,7 +487,6 @@ def remote_user_changeset(struct \\ %User{local: false}, params) do
:nickname, :nickname,
:public_key, :public_key,
:avatar, :avatar,
:ap_enabled,
:banner, :banner,
:is_locked, :is_locked,
:last_refreshed_at, :last_refreshed_at,
@ -1026,11 +1059,7 @@ def maybe_direct_follow(%User{} = follower, %User{local: true} = followed) do
end end
def maybe_direct_follow(%User{} = follower, %User{} = followed) do def maybe_direct_follow(%User{} = follower, %User{} = followed) do
if not ap_enabled?(followed) do {:ok, follower, followed}
follow(follower, followed)
else
{:ok, follower, followed}
end
end end
@doc "A mass follow for local users. Respects blocks in both directions but does not create activities." @doc "A mass follow for local users. Respects blocks in both directions but does not create activities."
@ -1167,24 +1196,10 @@ def update_and_set_cache(struct, params) do
|> update_and_set_cache() |> update_and_set_cache()
end end
def update_and_set_cache(%{data: %Pleroma.User{} = user} = changeset) do def update_and_set_cache(changeset) do
was_superuser_before_update = User.superuser?(user)
with {:ok, user} <- Repo.update(changeset, stale_error_field: :id) do with {:ok, user} <- Repo.update(changeset, stale_error_field: :id) do
set_cache(user) set_cache(user)
end end
|> maybe_remove_report_notifications(was_superuser_before_update)
end
defp maybe_remove_report_notifications({:ok, %Pleroma.User{} = user} = result, true) do
if not User.superuser?(user),
do: user |> Notification.destroy_multiple_from_types(["pleroma:report"])
result
end
defp maybe_remove_report_notifications(result, _) do
result
end end
def get_user_friends_ap_ids(user) do def get_user_friends_ap_ids(user) do
@ -1877,7 +1892,6 @@ def purge_user_changeset(user) do
confirmation_token: nil, confirmation_token: nil,
domain_blocks: [], domain_blocks: [],
is_active: false, is_active: false,
ap_enabled: false,
is_moderator: false, is_moderator: false,
is_admin: false, is_admin: false,
mascot: nil, mascot: nil,
@ -2130,10 +2144,6 @@ def get_public_key_for_ap_id(ap_id) do
end end
end end
def ap_enabled?(%User{local: true}), do: true
def ap_enabled?(%User{ap_enabled: ap_enabled}), do: ap_enabled
def ap_enabled?(_), do: false
@doc "Gets or fetch a user by uri or nickname." @doc "Gets or fetch a user by uri or nickname."
@spec get_or_fetch(String.t()) :: {:ok, User.t()} | {:error, String.t()} @spec get_or_fetch(String.t()) :: {:ok, User.t()} | {:error, String.t()}
def get_or_fetch("http://" <> _host = uri), do: get_or_fetch_by_ap_id(uri) def get_or_fetch("http://" <> _host = uri), do: get_or_fetch_by_ap_id(uri)
@ -2265,6 +2275,11 @@ def all_superusers do
|> Repo.all() |> Repo.all()
end end
@spec all_users_with_privilege(atom()) :: [User.t()]
def all_users_with_privilege(privilege) do
User.Query.build(%{is_privileged: privilege}) |> Repo.all()
end
def muting_reblogs?(%User{} = user, %User{} = target) do def muting_reblogs?(%User{} = user, %User{} = target) do
UserRelationship.reblog_mute_exists?(user, target) UserRelationship.reblog_mute_exists?(user, target)
end end

View File

@ -9,12 +9,14 @@ defmodule Pleroma.User.Backup do
import Ecto.Query import Ecto.Query
import Pleroma.Web.Gettext import Pleroma.Web.Gettext
require Logger
require Pleroma.Constants require Pleroma.Constants
alias Pleroma.Activity alias Pleroma.Activity
alias Pleroma.Bookmark alias Pleroma.Bookmark
alias Pleroma.Repo alias Pleroma.Repo
alias Pleroma.User alias Pleroma.User
alias Pleroma.User.Backup.State
alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Transmogrifier alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.ActivityPub.UserView alias Pleroma.Web.ActivityPub.UserView
@ -25,6 +27,8 @@ defmodule Pleroma.User.Backup do
field(:file_name, :string) field(:file_name, :string)
field(:file_size, :integer, default: 0) field(:file_size, :integer, default: 0)
field(:processed, :boolean, default: false) field(:processed, :boolean, default: false)
field(:state, State, default: :invalid)
field(:processed_number, :integer, default: 0)
belongs_to(:user, User, type: FlakeId.Ecto.CompatType) belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
@ -46,7 +50,8 @@ def new(user) do
%__MODULE__{ %__MODULE__{
user_id: user.id, user_id: user.id,
content_type: "application/zip", content_type: "application/zip",
file_name: name file_name: name,
state: :pending
} }
end end
@ -109,27 +114,108 @@ def remove_outdated(%__MODULE__{id: latest_id, user_id: user_id}) do
def get(id), do: Repo.get(__MODULE__, id) def get(id), do: Repo.get(__MODULE__, id)
defp set_state(backup, state, processed_number \\ nil) do
struct =
%{state: state}
|> Pleroma.Maps.put_if_present(:processed_number, processed_number)
backup
|> cast(struct, [:state, :processed_number])
|> Repo.update()
end
def process(%__MODULE__{} = backup) do def process(%__MODULE__{} = backup) do
with {:ok, zip_file} <- export(backup), set_state(backup, :running, 0)
current_pid = self()
task =
Task.Supervisor.async_nolink(
Pleroma.TaskSupervisor,
__MODULE__,
:do_process,
[backup, current_pid]
)
wait_backup(backup, backup.processed_number, task)
end
def do_process(backup, current_pid) do
with {:ok, zip_file} <- export(backup, current_pid),
{:ok, %{size: size}} <- File.stat(zip_file), {:ok, %{size: size}} <- File.stat(zip_file),
{:ok, _upload} <- upload(backup, zip_file) do {:ok, _upload} <- upload(backup, zip_file) do
backup backup
|> cast(%{file_size: size, processed: true}, [:file_size, :processed]) |> cast(
%{
file_size: size,
processed: true,
state: :complete
},
[:file_size, :processed, :state]
)
|> Repo.update() |> Repo.update()
end end
end end
defp wait_backup(backup, current_processed, task) do
wait_time = Pleroma.Config.get([__MODULE__, :process_wait_time])
receive do
{:progress, new_processed} ->
total_processed = current_processed + new_processed
set_state(backup, :running, total_processed)
wait_backup(backup, total_processed, task)
{:DOWN, _ref, _proc, _pid, reason} ->
backup = get(backup.id)
if reason != :normal do
Logger.error("Backup #{backup.id} process ended abnormally: #{inspect(reason)}")
{:ok, backup} = set_state(backup, :failed)
cleanup(backup)
{:error,
%{
backup: backup,
reason: :exit,
details: reason
}}
else
{:ok, backup}
end
after
wait_time ->
Logger.error(
"Backup #{backup.id} timed out after no response for #{wait_time}ms, terminating"
)
Task.Supervisor.terminate_child(Pleroma.TaskSupervisor, task.pid)
{:ok, backup} = set_state(backup, :failed)
cleanup(backup)
{:error,
%{
backup: backup,
reason: :timeout
}}
end
end
@files ['actor.json', 'outbox.json', 'likes.json', 'bookmarks.json'] @files ['actor.json', 'outbox.json', 'likes.json', 'bookmarks.json']
def export(%__MODULE__{} = backup) do def export(%__MODULE__{} = backup, caller_pid) do
backup = Repo.preload(backup, :user) backup = Repo.preload(backup, :user)
name = String.trim_trailing(backup.file_name, ".zip") dir = backup_tempdir(backup)
dir = dir(name)
with :ok <- File.mkdir(dir), with :ok <- File.mkdir(dir),
:ok <- actor(dir, backup.user), :ok <- actor(dir, backup.user, caller_pid),
:ok <- statuses(dir, backup.user), :ok <- statuses(dir, backup.user, caller_pid),
:ok <- likes(dir, backup.user), :ok <- likes(dir, backup.user, caller_pid),
:ok <- bookmarks(dir, backup.user), :ok <- bookmarks(dir, backup.user, caller_pid),
{:ok, zip_path} <- :zip.create(String.to_charlist(dir <> ".zip"), @files, cwd: dir), {:ok, zip_path} <- :zip.create(String.to_charlist(dir <> ".zip"), @files, cwd: dir),
{:ok, _} <- File.rm_rf(dir) do {:ok, _} <- File.rm_rf(dir) do
{:ok, to_string(zip_path)} {:ok, to_string(zip_path)}
@ -157,11 +243,12 @@ def upload(%__MODULE__{} = backup, zip_path) do
end end
end end
defp actor(dir, user) do defp actor(dir, user, caller_pid) do
with {:ok, json} <- with {:ok, json} <-
UserView.render("user.json", %{user: user}) UserView.render("user.json", %{user: user})
|> Map.merge(%{"likes" => "likes.json", "bookmarks" => "bookmarks.json"}) |> Map.merge(%{"likes" => "likes.json", "bookmarks" => "bookmarks.json"})
|> Jason.encode() do |> Jason.encode() do
send(caller_pid, {:progress, 1})
File.write(Path.join(dir, "actor.json"), json) File.write(Path.join(dir, "actor.json"), json)
end end
end end
@ -180,47 +267,80 @@ defp write_header(file, name) do
) )
end end
defp write(query, dir, name, fun) do defp should_report?(num, chunk_size), do: rem(num, chunk_size) == 0
defp backup_tempdir(backup) do
name = String.trim_trailing(backup.file_name, ".zip")
dir(name)
end
defp cleanup(backup) do
dir = backup_tempdir(backup)
File.rm_rf(dir)
end
defp write(query, dir, name, fun, caller_pid) do
path = Path.join(dir, "#{name}.json") path = Path.join(dir, "#{name}.json")
chunk_size = Pleroma.Config.get([__MODULE__, :process_chunk_size])
with {:ok, file} <- File.open(path, [:write, :utf8]), with {:ok, file} <- File.open(path, [:write, :utf8]),
:ok <- write_header(file, name) do :ok <- write_header(file, name) do
total = total =
query query
|> Pleroma.Repo.chunk_stream(100) |> Pleroma.Repo.chunk_stream(chunk_size, _returns_as = :one, timeout: :infinity)
|> Enum.reduce(0, fn i, acc -> |> Enum.reduce(0, fn i, acc ->
with {:ok, data} <- fun.(i), with {:ok, data} <-
(try do
fun.(i)
rescue
e -> {:error, e}
end),
{:ok, str} <- Jason.encode(data), {:ok, str} <- Jason.encode(data),
:ok <- IO.write(file, str <> ",\n") do :ok <- IO.write(file, str <> ",\n") do
if should_report?(acc + 1, chunk_size) do
send(caller_pid, {:progress, chunk_size})
end
acc + 1 acc + 1
else else
_ -> acc {:error, e} ->
Logger.warn(
"Error processing backup item: #{inspect(e)}\n The item is: #{inspect(i)}"
)
acc
_ ->
acc
end end
end) end)
send(caller_pid, {:progress, rem(total, chunk_size)})
with :ok <- :file.pwrite(file, {:eof, -2}, "\n],\n \"totalItems\": #{total}}") do with :ok <- :file.pwrite(file, {:eof, -2}, "\n],\n \"totalItems\": #{total}}") do
File.close(file) File.close(file)
end end
end end
end end
defp bookmarks(dir, %{id: user_id} = _user) do defp bookmarks(dir, %{id: user_id} = _user, caller_pid) do
Bookmark Bookmark
|> where(user_id: ^user_id) |> where(user_id: ^user_id)
|> join(:inner, [b], activity in assoc(b, :activity)) |> join(:inner, [b], activity in assoc(b, :activity))
|> select([b, a], %{id: b.id, object: fragment("(?)->>'object'", a.data)}) |> select([b, a], %{id: b.id, object: fragment("(?)->>'object'", a.data)})
|> write(dir, "bookmarks", fn a -> {:ok, a.object} end) |> write(dir, "bookmarks", fn a -> {:ok, a.object} end, caller_pid)
end end
defp likes(dir, user) do defp likes(dir, user, caller_pid) do
user.ap_id user.ap_id
|> Activity.Queries.by_actor() |> Activity.Queries.by_actor()
|> Activity.Queries.by_type("Like") |> Activity.Queries.by_type("Like")
|> select([like], %{id: like.id, object: fragment("(?)->>'object'", like.data)}) |> select([like], %{id: like.id, object: fragment("(?)->>'object'", like.data)})
|> write(dir, "likes", fn a -> {:ok, a.object} end) |> write(dir, "likes", fn a -> {:ok, a.object} end, caller_pid)
end end
defp statuses(dir, user) do defp statuses(dir, user, caller_pid) do
opts = opts =
%{} %{}
|> Map.put(:type, ["Create", "Announce"]) |> Map.put(:type, ["Create", "Announce"])
@ -233,10 +353,15 @@ defp statuses(dir, user) do
] ]
|> Enum.concat() |> Enum.concat()
|> ActivityPub.fetch_activities_query(opts) |> ActivityPub.fetch_activities_query(opts)
|> write(dir, "outbox", fn a -> |> write(
with {:ok, activity} <- Transmogrifier.prepare_outgoing(a.data) do dir,
{:ok, Map.delete(activity, "@context")} "outbox",
end fn a ->
end) with {:ok, activity} <- Transmogrifier.prepare_outgoing(a.data) do
{:ok, Map.delete(activity, "@context")}
end
end,
caller_pid
)
end end
end end

View File

@ -29,6 +29,7 @@ defmodule Pleroma.User.Query do
import Ecto.Query import Ecto.Query
import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1] import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1]
alias Pleroma.Config
alias Pleroma.FollowingRelationship alias Pleroma.FollowingRelationship
alias Pleroma.User alias Pleroma.User
@ -49,6 +50,7 @@ defmodule Pleroma.User.Query do
is_suggested: boolean(), is_suggested: boolean(),
is_discoverable: boolean(), is_discoverable: boolean(),
super_users: boolean(), super_users: boolean(),
is_privileged: atom(),
invisible: boolean(), invisible: boolean(),
internal: boolean(), internal: boolean(),
followers: User.t(), followers: User.t(),
@ -136,6 +138,43 @@ defp compose_query({:super_users, _}, query) do
) )
end end
defp compose_query({:is_privileged, privilege}, query) do
moderator_privileged = privilege in Config.get([:instance, :moderator_privileges])
admin_privileged = privilege in Config.get([:instance, :admin_privileges])
query = compose_query({:active, true}, query)
query = compose_query({:local, true}, query)
case {admin_privileged, moderator_privileged} do
{false, false} ->
where(
query,
false
)
{true, true} ->
where(
query,
[u],
u.is_admin or u.is_moderator
)
{true, false} ->
where(
query,
[u],
u.is_admin
)
{false, true} ->
where(
query,
[u],
u.is_moderator
)
end
end
defp compose_query({:local, _}, query), do: location_query(query, true) defp compose_query({:local, _}, query), do: location_query(query, true)
defp compose_query({:external, _}, query), do: location_query(query, false) defp compose_query({:external, _}, query), do: location_query(query, false)

View File

@ -96,7 +96,7 @@ defp increase_replies_count_if_reply(%{
defp increase_replies_count_if_reply(_create_data), do: :noop defp increase_replies_count_if_reply(_create_data), do: :noop
@object_types ~w[ChatMessage Question Answer Audio Video Event Article Note Page] @object_types ~w[ChatMessage Question Answer Audio Video Image Event Article Note Page]
@impl true @impl true
def persist(%{"type" => type} = object, meta) when type in @object_types do def persist(%{"type" => type} = object, meta) when type in @object_types do
with {:ok, object} <- Object.create(object) do with {:ok, object} <- Object.create(object) do
@ -404,11 +404,11 @@ defp do_flag(
_ <- notify_and_stream(activity), _ <- notify_and_stream(activity),
:ok <- :ok <-
maybe_federate(stripped_activity) do maybe_federate(stripped_activity) do
User.all_superusers() User.all_users_with_privilege(:reports_manage_reports)
|> Enum.filter(fn user -> user.ap_id != actor end) |> Enum.filter(fn user -> user.ap_id != actor end)
|> Enum.filter(fn user -> not is_nil(user.email) end) |> Enum.filter(fn user -> not is_nil(user.email) end)
|> Enum.each(fn superuser -> |> Enum.each(fn privileged_user ->
superuser privileged_user
|> Pleroma.Emails.AdminEmail.report(actor, account, statuses, content) |> Pleroma.Emails.AdminEmail.report(actor, account, statuses, content)
|> Pleroma.Emails.Mailer.deliver_async() |> Pleroma.Emails.Mailer.deliver_async()
end) end)
@ -458,6 +458,7 @@ def fetch_activities_for_context_query(context, opts) do
|> maybe_preload_objects(opts) |> maybe_preload_objects(opts)
|> maybe_preload_bookmarks(opts) |> maybe_preload_bookmarks(opts)
|> maybe_set_thread_muted_field(opts) |> maybe_set_thread_muted_field(opts)
|> restrict_unauthenticated(opts[:user])
|> restrict_blocked(opts) |> restrict_blocked(opts)
|> restrict_blockers_visibility(opts) |> restrict_blockers_visibility(opts)
|> restrict_recipients(recipients, opts[:user]) |> restrict_recipients(recipients, opts[:user])
@ -1218,6 +1219,27 @@ defp restrict_filtered(query, %{blocking_user: %User{} = user}) do
defp restrict_filtered(query, _), do: query defp restrict_filtered(query, _), do: query
defp restrict_unauthenticated(query, nil) do
local = Config.restrict_unauthenticated_access?(:activities, :local)
remote = Config.restrict_unauthenticated_access?(:activities, :remote)
cond do
local and remote ->
from(activity in query, where: false)
local ->
from(activity in query, where: activity.local == false)
remote ->
from(activity in query, where: activity.local == true)
true ->
query
end
end
defp restrict_unauthenticated(query, _), do: query
defp exclude_poll_votes(query, %{include_poll_votes: true}), do: query defp exclude_poll_votes(query, %{include_poll_votes: true}), do: query
defp exclude_poll_votes(query, _) do defp exclude_poll_votes(query, _) do
@ -1456,13 +1478,22 @@ def fetch_activities_bounded(
@spec upload(Upload.source(), keyword()) :: {:ok, Object.t()} | {:error, any()} @spec upload(Upload.source(), keyword()) :: {:ok, Object.t()} | {:error, any()}
def upload(file, opts \\ []) do def upload(file, opts \\ []) do
with {:ok, data} <- Upload.store(file, opts) do with {:ok, data} <- Upload.store(sanitize_upload_file(file), opts) do
obj_data = Maps.put_if_present(data, "actor", opts[:actor]) obj_data = Maps.put_if_present(data, "actor", opts[:actor])
Repo.insert(%Object{data: obj_data}) Repo.insert(%Object{data: obj_data})
end end
end end
defp sanitize_upload_file(%Plug.Upload{filename: filename} = upload) when is_binary(filename) do
%Plug.Upload{
upload
| filename: Path.basename(filename)
}
end
defp sanitize_upload_file(upload), do: upload
@spec get_actor_url(any()) :: binary() | nil @spec get_actor_url(any()) :: binary() | nil
defp get_actor_url(url) when is_binary(url), do: url defp get_actor_url(url) when is_binary(url), do: url
defp get_actor_url(%{"href" => href}) when is_binary(href), do: href defp get_actor_url(%{"href" => href}) when is_binary(href), do: href
@ -1541,7 +1572,6 @@ defp object_to_user_data(data, additional) do
%{ %{
ap_id: data["id"], ap_id: data["id"],
uri: get_actor_url(data["url"]), uri: get_actor_url(data["url"]),
ap_enabled: true,
banner: normalize_image(data["image"]), banner: normalize_image(data["image"]),
fields: fields, fields: fields,
emoji: emojis, emoji: emojis,
@ -1662,7 +1692,7 @@ def user_data_from_user_object(data, additional \\ []) do
end end
end end
def fetch_and_prepare_user_from_ap_id(ap_id, additional \\ []) do defp fetch_and_prepare_user_from_ap_id(ap_id, additional) do
with {:ok, data} <- Fetcher.fetch_and_contain_remote_object_from_id(ap_id), with {:ok, data} <- Fetcher.fetch_and_contain_remote_object_from_id(ap_id),
{:ok, data} <- user_data_from_user_object(data, additional) do {:ok, data} <- user_data_from_user_object(data, additional) do
{:ok, maybe_update_follow_information(data)} {:ok, maybe_update_follow_information(data)}
@ -1715,6 +1745,11 @@ def pin_data_from_featured_collection(%{
end) end)
end end
def pin_data_from_featured_collection(obj) do
Logger.error("Could not parse featured collection #{inspect(obj)}")
%{}
end
def fetch_and_prepare_featured_from_ap_id(nil) do def fetch_and_prepare_featured_from_ap_id(nil) do
{:ok, %{}} {:ok, %{}}
end end
@ -1745,24 +1780,20 @@ def pinned_fetch_task(%{pinned_objects: pins}) do
def make_user_from_ap_id(ap_id, additional \\ []) do def make_user_from_ap_id(ap_id, additional \\ []) do
user = User.get_cached_by_ap_id(ap_id) user = User.get_cached_by_ap_id(ap_id)
if user && !User.ap_enabled?(user) do with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id, additional) do
Transmogrifier.upgrade_user_from_ap_id(ap_id) {:ok, _pid} = Task.start(fn -> pinned_fetch_task(data) end)
else
with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id, additional) do
{:ok, _pid} = Task.start(fn -> pinned_fetch_task(data) end)
if user do if user do
user user
|> User.remote_user_changeset(data) |> User.remote_user_changeset(data)
|> User.update_and_set_cache() |> User.update_and_set_cache()
else else
maybe_handle_clashing_nickname(data) maybe_handle_clashing_nickname(data)
data data
|> User.remote_user_changeset() |> User.remote_user_changeset()
|> Repo.insert() |> Repo.insert()
|> User.set_cache() |> User.set_cache()
end
end end
end end
end end

View File

@ -16,6 +16,7 @@ defmodule Pleroma.Web.ActivityPub.Builder do
alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.ActivityPub.Visibility alias Pleroma.Web.ActivityPub.Visibility
alias Pleroma.Web.CommonAPI.ActivityDraft alias Pleroma.Web.CommonAPI.ActivityDraft
alias Pleroma.Web.Endpoint
require Pleroma.Constants require Pleroma.Constants
@ -54,13 +55,87 @@ def follow(follower, followed) do
{:ok, data, []} {:ok, data, []}
end end
defp unicode_emoji_react(_object, data, emoji) do
data
|> Map.put("content", emoji)
|> Map.put("type", "EmojiReact")
end
defp add_emoji_content(data, emoji, url) do
tag = [
%{
"id" => url,
"type" => "Emoji",
"name" => Emoji.maybe_quote(emoji),
"icon" => %{
"type" => "Image",
"url" => url
}
}
]
data
|> Map.put("content", Emoji.maybe_quote(emoji))
|> Map.put("type", "EmojiReact")
|> Map.put("tag", tag)
end
defp remote_custom_emoji_react(
%{data: %{"reactions" => existing_reactions}},
data,
emoji
) do
[emoji_code, instance] = String.split(Emoji.maybe_strip_name(emoji), "@")
matching_reaction =
Enum.find(
existing_reactions,
fn [name, _, url] ->
if url != nil do
url = URI.parse(url)
url.host == instance && name == emoji_code
end
end
)
if matching_reaction do
[name, _, url] = matching_reaction
add_emoji_content(data, name, url)
else
{:error, "Could not react"}
end
end
defp remote_custom_emoji_react(_object, _data, _emoji) do
{:error, "Could not react"}
end
defp local_custom_emoji_react(data, emoji) do
with %{file: path} = emojo <- Emoji.get(emoji) do
url = "#{Endpoint.url()}#{path}"
add_emoji_content(data, emojo.code, url)
else
_ -> {:error, "Emoji does not exist"}
end
end
defp custom_emoji_react(object, data, emoji) do
if String.contains?(emoji, "@") do
remote_custom_emoji_react(object, data, emoji)
else
local_custom_emoji_react(data, emoji)
end
end
@spec emoji_react(User.t(), Object.t(), String.t()) :: {:ok, map(), keyword()} @spec emoji_react(User.t(), Object.t(), String.t()) :: {:ok, map(), keyword()}
def emoji_react(actor, object, emoji) do def emoji_react(actor, object, emoji) do
with {:ok, data, meta} <- object_action(actor, object) do with {:ok, data, meta} <- object_action(actor, object) do
data = data =
data if Emoji.is_unicode_emoji?(emoji) do
|> Map.put("content", emoji) unicode_emoji_react(object, data, emoji)
|> Map.put("type", "EmojiReact") else
custom_emoji_react(object, data, emoji)
end
{:ok, data, meta} {:ok, data, meta}
end end
@ -142,6 +217,7 @@ def note(%ActivityDraft{} = draft) do
"tag" => Keyword.values(draft.tags) |> Enum.uniq() "tag" => Keyword.values(draft.tags) |> Enum.uniq()
} }
|> add_in_reply_to(draft.in_reply_to) |> add_in_reply_to(draft.in_reply_to)
|> add_quote(draft.quote_post)
|> Map.merge(draft.extra) |> Map.merge(draft.extra)
{:ok, data, []} {:ok, data, []}
@ -157,6 +233,16 @@ defp add_in_reply_to(object, in_reply_to) do
end end
end end
defp add_quote(object, nil), do: object
defp add_quote(object, quote_post) do
with %Object{} = quote_object <- Object.normalize(quote_post, fetch: false) do
Map.put(object, "quoteUrl", quote_object.data["id"])
else
_ -> object
end
end
def chat_message(actor, recipient, content, opts \\ []) do def chat_message(actor, recipient, content, opts \\ []) do
basic = %{ basic = %{
"id" => Utils.generate_object_id(), "id" => Utils.generate_object_id(),

View File

@ -0,0 +1,281 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.EmojiPolicy do
require Pleroma.Constants
alias Pleroma.Object.Updater
alias Pleroma.Web.ActivityPub.MRF.Utils
@moduledoc "Reject or force-unlisted emojis with certain URLs or names"
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
defp config_remove_url do
Pleroma.Config.get([:mrf_emoji, :remove_url], [])
end
defp config_remove_shortcode do
Pleroma.Config.get([:mrf_emoji, :remove_shortcode], [])
end
defp config_unlist_url do
Pleroma.Config.get([:mrf_emoji, :federated_timeline_removal_url], [])
end
defp config_unlist_shortcode do
Pleroma.Config.get([:mrf_emoji, :federated_timeline_removal_shortcode], [])
end
@impl Pleroma.Web.ActivityPub.MRF.Policy
def history_awareness, do: :manual
@impl Pleroma.Web.ActivityPub.MRF.Policy
def filter(%{"type" => type, "object" => %{"type" => objtype} = object} = message)
when type in ["Create", "Update"] and objtype in Pleroma.Constants.status_object_types() do
with {:ok, object} <-
Updater.do_with_history(object, fn object ->
{:ok, process_remove(object, :url, config_remove_url())}
end),
{:ok, object} <-
Updater.do_with_history(object, fn object ->
{:ok, process_remove(object, :shortcode, config_remove_shortcode())}
end),
activity <- Map.put(message, "object", object),
activity <- maybe_delist(activity) do
{:ok, activity}
end
end
@impl Pleroma.Web.ActivityPub.MRF.Policy
def filter(%{"type" => type} = object) when type in Pleroma.Constants.actor_types() do
with object <- process_remove(object, :url, config_remove_url()),
object <- process_remove(object, :shortcode, config_remove_shortcode()) do
{:ok, object}
end
end
@impl Pleroma.Web.ActivityPub.MRF.Policy
def filter(%{"type" => "EmojiReact"} = object) do
with {:ok, _} <-
matched_emoji_checker(config_remove_url(), config_remove_shortcode()).(object) do
{:ok, object}
else
_ ->
{:reject, "[EmojiPolicy] Rejected for having disallowed emoji"}
end
end
@impl Pleroma.Web.ActivityPub.MRF.Policy
def filter(message) do
{:ok, message}
end
defp match_string?(string, pattern) when is_binary(pattern) do
string == pattern
end
defp match_string?(string, %Regex{} = pattern) do
String.match?(string, pattern)
end
defp match_any?(string, patterns) do
Enum.any?(patterns, &match_string?(string, &1))
end
defp url_from_tag(%{"icon" => %{"url" => url}}), do: url
defp url_from_tag(_), do: nil
defp url_from_emoji({_name, url}), do: url
defp shortcode_from_tag(%{"name" => name}) when is_binary(name), do: String.trim(name, ":")
defp shortcode_from_tag(_), do: nil
defp shortcode_from_emoji({name, _url}), do: name
defp process_remove(object, :url, patterns) do
process_remove_impl(object, &url_from_tag/1, &url_from_emoji/1, patterns)
end
defp process_remove(object, :shortcode, patterns) do
process_remove_impl(object, &shortcode_from_tag/1, &shortcode_from_emoji/1, patterns)
end
defp process_remove_impl(object, extract_from_tag, extract_from_emoji, patterns) do
object =
if object["tag"] do
Map.put(
object,
"tag",
Enum.filter(
object["tag"],
fn
%{"type" => "Emoji"} = tag ->
str = extract_from_tag.(tag)
if is_binary(str) do
not match_any?(str, patterns)
else
true
end
_ ->
true
end
)
)
else
object
end
object =
if object["emoji"] do
Map.put(
object,
"emoji",
object["emoji"]
|> Enum.reduce(%{}, fn {name, url} = emoji, acc ->
if not match_any?(extract_from_emoji.(emoji), patterns) do
Map.put(acc, name, url)
else
acc
end
end)
)
else
object
end
object
end
defp matched_emoji_checker(urls, shortcodes) do
fn object ->
if any_emoji_match?(object, &url_from_tag/1, &url_from_emoji/1, urls) or
any_emoji_match?(
object,
&shortcode_from_tag/1,
&shortcode_from_emoji/1,
shortcodes
) do
{:matched, nil}
else
{:ok, %{}}
end
end
end
defp maybe_delist(%{"object" => object, "to" => to, "type" => "Create"} = activity) do
check = matched_emoji_checker(config_unlist_url(), config_unlist_shortcode())
should_delist? = fn object ->
with {:ok, _} <- Pleroma.Object.Updater.do_with_history(object, check) do
false
else
_ -> true
end
end
if Pleroma.Constants.as_public() in to and should_delist?.(object) do
to = List.delete(to, Pleroma.Constants.as_public())
cc = [Pleroma.Constants.as_public() | activity["cc"] || []]
activity
|> Map.put("to", to)
|> Map.put("cc", cc)
else
activity
end
end
defp maybe_delist(activity), do: activity
defp any_emoji_match?(object, extract_from_tag, extract_from_emoji, patterns) do
Kernel.||(
Enum.any?(
object["tag"] || [],
fn
%{"type" => "Emoji"} = tag ->
str = extract_from_tag.(tag)
if is_binary(str) do
match_any?(str, patterns)
else
false
end
_ ->
false
end
),
(object["emoji"] || [])
|> Enum.any?(fn emoji -> match_any?(extract_from_emoji.(emoji), patterns) end)
)
end
@impl Pleroma.Web.ActivityPub.MRF.Policy
def describe do
mrf_emoji =
Pleroma.Config.get(:mrf_emoji, [])
|> Enum.map(fn {key, value} ->
{key, Enum.map(value, &Utils.describe_regex_or_string/1)}
end)
|> Enum.into(%{})
{:ok, %{mrf_emoji: mrf_emoji}}
end
@impl Pleroma.Web.ActivityPub.MRF.Policy
def config_description do
%{
key: :mrf_emoji,
related_policy: "Pleroma.Web.ActivityPub.MRF.EmojiPolicy",
label: "MRF Emoji",
description:
"Reject or force-unlisted emojis whose URLs or names match a keyword or [Regex](https://hexdocs.pm/elixir/Regex.html).",
children: [
%{
key: :remove_url,
type: {:list, :string},
description: """
A list of patterns which result in emoji whose URL matches being removed from the message. This will apply to statuses, emoji reactions, and user profiles.
Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.
""",
suggestions: ["https://example.org/foo.png", ~r/example.org\/foo/iu]
},
%{
key: :remove_shortcode,
type: {:list, :string},
description: """
A list of patterns which result in emoji whose shortcode matches being removed from the message. This will apply to statuses, emoji reactions, and user profiles.
Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.
""",
suggestions: ["foo", ~r/foo/iu]
},
%{
key: :federated_timeline_removal_url,
type: {:list, :string},
description: """
A list of patterns which result in message with emojis whose URLs match being removed from federated timelines (a.k.a unlisted). This will apply only to statuses.
Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.
""",
suggestions: ["https://example.org/foo.png", ~r/example.org\/foo/iu]
},
%{
key: :federated_timeline_removal_shortcode,
type: {:list, :string},
description: """
A list of patterns which result in message with emojis whose shortcodes match being removed from federated timelines (a.k.a unlisted). This will apply only to statuses.
Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.
""",
suggestions: ["foo", ~r/foo/iu]
}
]
}
end
end

View File

@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server # Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/> # Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.ForceMentionsInContent do defmodule Pleroma.Web.ActivityPub.MRF.ForceMentionsInContent do
@ -95,11 +95,13 @@ def filter(
|> Enum.reject(&is_nil/1) |> Enum.reject(&is_nil/1)
|> sort_replied_user(replied_to_user) |> sort_replied_user(replied_to_user)
explicitly_mentioned_uris = extract_mention_uris_from_content(content) explicitly_mentioned_uris =
extract_mention_uris_from_content(content)
|> MapSet.new()
added_mentions = added_mentions =
Enum.reduce(mention_users, "", fn %User{ap_id: uri} = user, acc -> Enum.reduce(mention_users, "", fn %User{ap_id: ap_id, uri: uri} = user, acc ->
unless uri in explicitly_mentioned_uris do if MapSet.disjoint?(MapSet.new([ap_id, uri]), explicitly_mentioned_uris) do
acc <> Formatter.mention_from_user(user, %{mentions_format: :compact}) <> " " acc <> Formatter.mention_from_user(user, %{mentions_format: :compact}) <> " "
else else
acc acc

View File

@ -0,0 +1,78 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.InlineQuotePolicy do
@moduledoc "Force a quote line into the message content."
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
defp build_inline_quote(template, url) do
quote_line = String.replace(template, "{url}", "<a href=\"#{url}\">#{url}</a>")
"<span class=\"quote-inline\"><br/><br/>#{quote_line}</span>"
end
defp has_inline_quote?(content, quote_url) do
cond do
# Does the quote URL exist in the content?
content =~ quote_url -> true
# Does the content already have a .quote-inline span?
content =~ "<span class=\"quote-inline\">" -> true
# No inline quote found
true -> false
end
end
defp filter_object(%{"quoteUrl" => quote_url} = object) do
content = object["content"] || ""
if has_inline_quote?(content, quote_url) do
object
else
template = Pleroma.Config.get([:mrf_inline_quote, :template])
content =
if String.ends_with?(content, "</p>"),
do:
String.trim_trailing(content, "</p>") <>
build_inline_quote(template, quote_url) <> "</p>",
else: content <> build_inline_quote(template, quote_url)
Map.put(object, "content", content)
end
end
@impl true
def filter(%{"object" => %{"quoteUrl" => _} = object} = activity) do
{:ok, Map.put(activity, "object", filter_object(object))}
end
@impl true
def filter(object), do: {:ok, object}
@impl true
def describe, do: {:ok, %{}}
@impl Pleroma.Web.ActivityPub.MRF.Policy
def history_awareness, do: :auto
@impl true
def config_description do
%{
key: :mrf_inline_quote,
related_policy: "Pleroma.Web.ActivityPub.MRF.InlineQuotePolicy",
label: "MRF Inline Quote Policy",
type: :group,
description: "Force quote url to appear in post content.",
children: [
%{
key: :template,
type: :string,
description:
"The template to append to the post. `{url}` will be replaced with the actual link to the quoted post.",
suggestions: ["<bdi>RT:</bdi> {url}"]
}
]
}
end
end

View File

@ -5,6 +5,8 @@
defmodule Pleroma.Web.ActivityPub.MRF.KeywordPolicy do defmodule Pleroma.Web.ActivityPub.MRF.KeywordPolicy do
require Pleroma.Constants require Pleroma.Constants
alias Pleroma.Web.ActivityPub.MRF.Utils
@moduledoc "Reject or Word-Replace messages with a keyword or regex" @moduledoc "Reject or Word-Replace messages with a keyword or regex"
@behaviour Pleroma.Web.ActivityPub.MRF.Policy @behaviour Pleroma.Web.ActivityPub.MRF.Policy
@ -128,7 +130,6 @@ def filter(message), do: {:ok, message}
@impl true @impl true
def describe do def describe do
# This horror is needed to convert regex sigils to strings
mrf_keyword = mrf_keyword =
Pleroma.Config.get(:mrf_keyword, []) Pleroma.Config.get(:mrf_keyword, [])
|> Enum.map(fn {key, value} -> |> Enum.map(fn {key, value} ->
@ -136,21 +137,12 @@ def describe do
Enum.map(value, fn Enum.map(value, fn
{pattern, replacement} -> {pattern, replacement} ->
%{ %{
"pattern" => "pattern" => Utils.describe_regex_or_string(pattern),
if not is_binary(pattern) do
inspect(pattern)
else
pattern
end,
"replacement" => replacement "replacement" => replacement
} }
pattern -> pattern ->
if not is_binary(pattern) do Utils.describe_regex_or_string(pattern)
inspect(pattern)
else
pattern
end
end)} end)}
end) end)
|> Enum.into(%{}) |> Enum.into(%{})

View File

@ -0,0 +1,49 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.QuoteToLinkTagPolicy do
@moduledoc "Force a Link tag for posts quoting another post. (may break outgoing federation of quote posts with older Pleroma versions)"
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes
require Pleroma.Constants
@impl Pleroma.Web.ActivityPub.MRF.Policy
def filter(%{"object" => %{"quoteUrl" => _} = object} = activity) do
{:ok, Map.put(activity, "object", filter_object(object))}
end
@impl Pleroma.Web.ActivityPub.MRF.Policy
def filter(object), do: {:ok, object}
@impl Pleroma.Web.ActivityPub.MRF.Policy
def describe, do: {:ok, %{}}
@impl Pleroma.Web.ActivityPub.MRF.Policy
def history_awareness, do: :auto
defp filter_object(%{"quoteUrl" => quote_url} = object) do
tags = object["tag"] || []
if Enum.any?(tags, fn tag ->
CommonFixes.is_object_link_tag(tag) and tag["href"] == quote_url
end) do
object
else
object
|> Map.put(
"tag",
tags ++
[
%{
"type" => "Link",
"mediaType" => Pleroma.Constants.activity_json_canonical_mime_type(),
"href" => quote_url
}
]
)
end
end
end

View File

@ -0,0 +1,15 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.Utils do
@spec describe_regex_or_string(String.t() | Regex.t()) :: String.t()
def describe_regex_or_string(pattern) do
# This horror is needed to convert regex sigils to strings
if not is_binary(pattern) do
inspect(pattern)
else
pattern
end
end
end

View File

@ -21,7 +21,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do
alias Pleroma.Web.ActivityPub.ObjectValidators.AnnounceValidator alias Pleroma.Web.ActivityPub.ObjectValidators.AnnounceValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.AnswerValidator alias Pleroma.Web.ActivityPub.ObjectValidators.AnswerValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.ArticleNotePageValidator alias Pleroma.Web.ActivityPub.ObjectValidators.ArticleNotePageValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.AudioVideoValidator alias Pleroma.Web.ActivityPub.ObjectValidators.AudioImageVideoValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.BlockValidator alias Pleroma.Web.ActivityPub.ObjectValidators.BlockValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.ChatMessageValidator alias Pleroma.Web.ActivityPub.ObjectValidators.ChatMessageValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.CreateChatMessageValidator alias Pleroma.Web.ActivityPub.ObjectValidators.CreateChatMessageValidator
@ -102,7 +102,7 @@ def validate(
%{"type" => "Create", "object" => %{"type" => objtype} = object} = create_activity, %{"type" => "Create", "object" => %{"type" => objtype} = object} = create_activity,
meta meta
) )
when objtype in ~w[Question Answer Audio Video Event Article Note Page] do when objtype in ~w[Question Answer Audio Video Image Event Article Note Page] do
with {:ok, object_data} <- cast_and_apply_and_stringify_with_history(object), with {:ok, object_data} <- cast_and_apply_and_stringify_with_history(object),
meta = Keyword.put(meta, :object_data, object_data), meta = Keyword.put(meta, :object_data, object_data),
{:ok, create_activity} <- {:ok, create_activity} <-
@ -115,13 +115,14 @@ def validate(
end end
def validate(%{"type" => type} = object, meta) def validate(%{"type" => type} = object, meta)
when type in ~w[Event Question Audio Video Article Note Page] do when type in ~w[Event Question Audio Video Image Article Note Page] do
validator = validator =
case type do case type do
"Event" -> EventValidator "Event" -> EventValidator
"Question" -> QuestionValidator "Question" -> QuestionValidator
"Audio" -> AudioVideoValidator "Audio" -> AudioImageVideoValidator
"Video" -> AudioVideoValidator "Video" -> AudioImageVideoValidator
"Image" -> AudioImageVideoValidator
"Article" -> ArticleNotePageValidator "Article" -> ArticleNotePageValidator
"Note" -> ArticleNotePageValidator "Note" -> ArticleNotePageValidator
"Page" -> ArticleNotePageValidator "Page" -> ArticleNotePageValidator
@ -233,8 +234,8 @@ def cast_and_apply(%{"type" => "Answer"} = object) do
AnswerValidator.cast_and_apply(object) AnswerValidator.cast_and_apply(object)
end end
def cast_and_apply(%{"type" => type} = object) when type in ~w[Audio Video] do def cast_and_apply(%{"type" => type} = object) when type in ~w[Audio Image Video] do
AudioVideoValidator.cast_and_apply(object) AudioImageVideoValidator.cast_and_apply(object)
end end
def cast_and_apply(%{"type" => "Event"} = object) do def cast_and_apply(%{"type" => "Event"} = object) do

View File

@ -73,6 +73,7 @@ defp maybe_refetch_user(%User{featured_address: address} = user) when is_binary(
end end
defp maybe_refetch_user(%User{ap_id: ap_id}) do defp maybe_refetch_user(%User{ap_id: ap_id}) do
Pleroma.Web.ActivityPub.Transmogrifier.upgrade_user_from_ap_id(ap_id) # Maybe it could use User.get_or_fetch_by_ap_id to avoid refreshing too often
User.fetch_by_ap_id(ap_id)
end end
end end

View File

@ -84,6 +84,7 @@ defp fix(data) do
|> fix_tag() |> fix_tag()
|> fix_replies() |> fix_replies()
|> fix_attachments() |> fix_attachments()
|> CommonFixes.fix_quote_url()
|> Transmogrifier.fix_emoji() |> Transmogrifier.fix_emoji()
|> Transmogrifier.fix_content_map() |> Transmogrifier.fix_content_map()
end end

View File

@ -2,7 +2,7 @@
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/> # Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.ObjectValidators.AudioVideoValidator do defmodule Pleroma.Web.ActivityPub.ObjectValidators.AudioImageVideoValidator do
use Ecto.Schema use Ecto.Schema
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes
@ -55,9 +55,14 @@ defp find_attachment(url) do
url url
|> Enum.concat(mpeg_url["tag"] || []) |> Enum.concat(mpeg_url["tag"] || [])
|> Enum.find(fn |> Enum.find(fn
%{"mediaType" => mime_type} -> String.starts_with?(mime_type, ["video/", "audio/"]) %{"mediaType" => mime_type} ->
%{"mimeType" => mime_type} -> String.starts_with?(mime_type, ["video/", "audio/"]) String.starts_with?(mime_type, ["video/", "audio/", "image/"])
_ -> false
%{"mimeType" => mime_type} ->
String.starts_with?(mime_type, ["video/", "audio/", "image/"])
_ ->
false
end) end)
end end
@ -94,6 +99,7 @@ defp fix(data) do
data data
|> CommonFixes.fix_actor() |> CommonFixes.fix_actor()
|> CommonFixes.fix_object_defaults() |> CommonFixes.fix_object_defaults()
|> CommonFixes.fix_quote_url()
|> Transmogrifier.fix_emoji() |> Transmogrifier.fix_emoji()
|> fix_url() |> fix_url()
|> fix_content() |> fix_content()
@ -110,7 +116,7 @@ def changeset(struct, data) do
defp validate_data(data_cng) do defp validate_data(data_cng) do
data_cng data_cng
|> validate_inclusion(:type, ["Audio", "Video"]) |> validate_inclusion(:type, ~w[Audio Image Video])
|> validate_required([:id, :actor, :attributedTo, :type, :context]) |> validate_required([:id, :actor, :attributedTo, :type, :context])
|> CommonValidations.validate_any_presence([:cc, :to]) |> CommonValidations.validate_any_presence([:cc, :to])
|> CommonValidations.validate_fields_match([:actor, :attributedTo]) |> CommonValidations.validate_fields_match([:actor, :attributedTo])

View File

@ -27,7 +27,7 @@ defmacro activity_fields do
end end
end end
# All objects except Answer and CHatMessage # All objects except Answer and ChatMessage
defmacro object_fields do defmacro object_fields do
quote bind_quoted: binding() do quote bind_quoted: binding() do
field(:content, :string) field(:content, :string)
@ -58,7 +58,8 @@ defmacro status_object_fields do
field(:like_count, :integer, default: 0) field(:like_count, :integer, default: 0)
field(:announcement_count, :integer, default: 0) field(:announcement_count, :integer, default: 0)
field(:inReplyTo, ObjectValidators.ObjectID) field(:inReplyTo, ObjectValidators.ObjectID)
field(:url, ObjectValidators.Uri) field(:quoteUrl, ObjectValidators.ObjectID)
field(:url, ObjectValidators.BareUri)
field(:likes, {:array, ObjectValidators.ObjectID}, default: []) field(:likes, {:array, ObjectValidators.ObjectID}, default: [])
field(:announcements, {:array, ObjectValidators.ObjectID}, default: []) field(:announcements, {:array, ObjectValidators.ObjectID}, default: [])

View File

@ -10,6 +10,8 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes do
alias Pleroma.Web.ActivityPub.Transmogrifier alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.ActivityPub.Utils
require Pleroma.Constants
def cast_and_filter_recipients(message, field, follower_collection, field_fallback \\ []) do def cast_and_filter_recipients(message, field, follower_collection, field_fallback \\ []) do
{:ok, data} = ObjectValidators.Recipients.cast(message[field] || field_fallback) {:ok, data} = ObjectValidators.Recipients.cast(message[field] || field_fallback)
@ -76,4 +78,48 @@ def fix_object_action_recipients(data, %Object{data: %{"actor" => actor}}) do
Map.put(data, "to", to) Map.put(data, "to", to)
end end
def fix_quote_url(%{"quoteUrl" => _quote_url} = data), do: data
# Fedibird
# https://github.com/fedibird/mastodon/commit/dbd7ae6cf58a92ec67c512296b4daaea0d01e6ac
def fix_quote_url(%{"quoteUri" => quote_url} = data) do
Map.put(data, "quoteUrl", quote_url)
end
# Old Fedibird (bug)
# https://github.com/fedibird/mastodon/issues/9
def fix_quote_url(%{"quoteURL" => quote_url} = data) do
Map.put(data, "quoteUrl", quote_url)
end
# Misskey fallback
def fix_quote_url(%{"_misskey_quote" => quote_url} = data) do
Map.put(data, "quoteUrl", quote_url)
end
def fix_quote_url(%{"tag" => [_ | _] = tags} = data) do
tag = Enum.find(tags, &is_object_link_tag/1)
if not is_nil(tag) do
data
|> Map.put("quoteUrl", tag["href"])
else
data
end
end
def fix_quote_url(data), do: data
# https://codeberg.org/fediverse/fep/src/branch/main/fep/e232/fep-e232.md
def is_object_link_tag(%{
"type" => "Link",
"mediaType" => media_type,
"href" => href
})
when media_type in Pleroma.Constants.activity_json_mime_types() and is_binary(href) do
true
end
def is_object_link_tag(_), do: false
end end

View File

@ -136,11 +136,11 @@ def same_domain?(cng, fields \\ [:actor, :object]) do
# This figures out if a user is able to create, delete or modify something # This figures out if a user is able to create, delete or modify something
# based on the domain and superuser status # based on the domain and superuser status
@spec validate_modification_rights(Ecto.Changeset.t()) :: Ecto.Changeset.t() @spec validate_modification_rights(Ecto.Changeset.t(), atom()) :: Ecto.Changeset.t()
def validate_modification_rights(cng) do def validate_modification_rights(cng, privilege) do
actor = User.get_cached_by_ap_id(get_field(cng, :actor)) actor = User.get_cached_by_ap_id(get_field(cng, :actor))
if User.superuser?(actor) || same_domain?(cng) do if User.privileged?(actor, privilege) || same_domain?(cng) do
cng cng
else else
cng cng

View File

@ -61,7 +61,7 @@ defp validate_data(cng) do
|> validate_required([:id, :type, :actor, :to, :cc, :object]) |> validate_required([:id, :type, :actor, :to, :cc, :object])
|> validate_inclusion(:type, ["Delete"]) |> validate_inclusion(:type, ["Delete"])
|> validate_delete_actor(:actor) |> validate_delete_actor(:actor)
|> validate_modification_rights() |> validate_modification_rights(:messages_delete)
|> validate_object_or_user_presence(allowed_types: @deletable_types) |> validate_object_or_user_presence(allowed_types: @deletable_types)
|> add_deleted_activity_id() |> add_deleted_activity_id()
end end

View File

@ -5,8 +5,10 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.EmojiReactValidator do defmodule Pleroma.Web.ActivityPub.ObjectValidators.EmojiReactValidator do
use Ecto.Schema use Ecto.Schema
alias Pleroma.Emoji
alias Pleroma.Object alias Pleroma.Object
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes
alias Pleroma.Web.ActivityPub.ObjectValidators.TagValidator
import Ecto.Changeset import Ecto.Changeset
import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
@ -19,6 +21,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.EmojiReactValidator do
import Elixir.Pleroma.Web.ActivityPub.ObjectValidators.CommonFields import Elixir.Pleroma.Web.ActivityPub.ObjectValidators.CommonFields
message_fields() message_fields()
activity_fields() activity_fields()
embeds_many(:tag, TagValidator)
end end
end end
@ -43,7 +46,8 @@ def cast_data(data) do
def changeset(struct, data) do def changeset(struct, data) do
struct struct
|> cast(data, __schema__(:fields)) |> cast(data, __schema__(:fields) -- [:tag])
|> cast_embed(:tag)
end end
defp fix(data) do defp fix(data) do
@ -53,12 +57,16 @@ defp fix(data) do
|> CommonFixes.fix_actor() |> CommonFixes.fix_actor()
|> CommonFixes.fix_activity_addressing() |> CommonFixes.fix_activity_addressing()
with %Object{} = object <- Object.normalize(data["object"]) do data = Map.put_new(data, "tag", [])
data
|> CommonFixes.fix_activity_context(object) case Object.normalize(data["object"]) do
|> CommonFixes.fix_object_action_recipients(object) %Object{} = object ->
else data
_ -> data |> CommonFixes.fix_activity_context(object)
|> CommonFixes.fix_object_action_recipients(object)
_ ->
data
end end
end end
@ -82,11 +90,31 @@ defp fix_emoji_qualification(data), do: data
defp validate_emoji(cng) do defp validate_emoji(cng) do
content = get_field(cng, :content) content = get_field(cng, :content)
if Pleroma.Emoji.is_unicode_emoji?(content) do if Emoji.is_unicode_emoji?(content) || Emoji.is_custom_emoji?(content) do
cng cng
else else
cng cng
|> add_error(:content, "must be a single character emoji") |> add_error(:content, "is not a valid emoji")
end
end
defp maybe_validate_tag_presence(cng) do
content = get_field(cng, :content)
if Emoji.is_unicode_emoji?(content) do
cng
else
tag = get_field(cng, :tag)
emoji_name = Emoji.maybe_strip_name(content)
case tag do
[%{name: ^emoji_name, type: "Emoji", icon: %{url: _}}] ->
cng
_ ->
cng
|> add_error(:tag, "does not contain an Emoji tag")
end
end end
end end
@ -97,5 +125,6 @@ defp validate_data(data_cng) do
|> validate_actor_presence() |> validate_actor_presence()
|> validate_object_presence() |> validate_object_presence()
|> validate_emoji() |> validate_emoji()
|> maybe_validate_tag_presence()
end end
end end

View File

@ -62,6 +62,7 @@ defp fix(data) do
data data
|> CommonFixes.fix_actor() |> CommonFixes.fix_actor()
|> CommonFixes.fix_object_defaults() |> CommonFixes.fix_object_defaults()
|> CommonFixes.fix_quote_url()
|> Transmogrifier.fix_emoji() |> Transmogrifier.fix_emoji()
|> fix_closed() |> fix_closed()
end end

View File

@ -9,15 +9,20 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.TagValidator do
import Ecto.Changeset import Ecto.Changeset
require Pleroma.Constants
@primary_key false @primary_key false
embedded_schema do embedded_schema do
# Common # Common
field(:type, :string) field(:type, :string)
field(:name, :string) field(:name, :string)
# Mention, Hashtag # Mention, Hashtag, Link
field(:href, ObjectValidators.Uri) field(:href, ObjectValidators.Uri)
# Link
field(:mediaType, :string)
# Emoji # Emoji
embeds_one :icon, IconObjectValidator, primary_key: false do embeds_one :icon, IconObjectValidator, primary_key: false do
field(:type, :string) field(:type, :string)
@ -68,6 +73,19 @@ def changeset(struct, %{"type" => "Emoji"} = data) do
|> validate_required([:type, :name, :icon]) |> validate_required([:type, :name, :icon])
end end
def changeset(struct, %{"type" => "Link"} = data) do
struct
|> cast(data, [:type, :name, :mediaType, :href])
|> validate_inclusion(:mediaType, Pleroma.Constants.activity_json_mime_types())
|> validate_required([:type, :href, :mediaType])
end
def changeset(struct, %{"type" => _} = data) do
struct
|> cast(data, [])
|> Map.put(:action, :ignore)
end
def icon_changeset(struct, data) do def icon_changeset(struct, data) do
struct struct
|> cast(data, [:type, :url]) |> cast(data, [:type, :url])

View File

@ -199,7 +199,6 @@ def publish(%User{} = actor, %{data: %{"bcc" => bcc}} = activity)
inboxes = inboxes =
recipients recipients
|> Enum.filter(&User.ap_enabled?/1)
|> Enum.map(fn actor -> actor.inbox end) |> Enum.map(fn actor -> actor.inbox end)
|> Enum.filter(fn inbox -> should_federate?(inbox, public) end) |> Enum.filter(fn inbox -> should_federate?(inbox, public) end)
|> Instances.filter_reachable() |> Instances.filter_reachable()
@ -241,7 +240,6 @@ def publish(%User{} = actor, %Activity{} = activity) do
json = Jason.encode!(data) json = Jason.encode!(data)
recipients(actor, activity) recipients(actor, activity)
|> Enum.filter(fn user -> User.ap_enabled?(user) end)
|> Enum.map(fn %User{} = user -> |> Enum.map(fn %User{} = user ->
determine_inbox(activity, user) determine_inbox(activity, user)
end) end)

View File

@ -437,37 +437,13 @@ defp handle_update_object(
end end
if orig_object_data["type"] in Pleroma.Constants.updatable_object_types() do if orig_object_data["type"] in Pleroma.Constants.updatable_object_types() do
%{ {:ok, _, updated} =
updated_data: updated_object_data, Object.Updater.do_update_and_invalidate_cache(orig_object, updated_object)
updated: updated,
used_history_in_new_object?: used_history_in_new_object?
} = Object.Updater.make_new_object_data_from_update_object(orig_object_data, updated_object)
changeset = if updated do
orig_object object
|> Repo.preload(:hashtags) |> Activity.normalize()
|> Object.change(%{data: updated_object_data}) |> ActivityPub.notify_and_stream()
with {:ok, new_object} <- Repo.update(changeset),
{:ok, _} <- Object.invalid_object_cache(new_object),
{:ok, _} <- Object.set_cache(new_object),
# The metadata/utils.ex uses the object id for the cache.
{:ok, _} <- Pleroma.Activity.HTML.invalidate_cache_for(new_object.id) do
if used_history_in_new_object? do
with create_activity when not is_nil(create_activity) <-
Pleroma.Activity.get_create_by_object_ap_id(orig_object_ap_id),
{:ok, _} <- Pleroma.Activity.HTML.invalidate_cache_for(create_activity.id) do
nil
else
_ -> nil
end
end
if updated do
object
|> Activity.normalize()
|> ActivityPub.notify_and_stream()
end
end end
end end
@ -529,7 +505,7 @@ def handle_object_creation(%{"type" => "Answer"} = object_map, _activity, meta)
end end
def handle_object_creation(%{"type" => objtype} = object, _activity, meta) def handle_object_creation(%{"type" => objtype} = object, _activity, meta)
when objtype in ~w[Audio Video Event Article Note Page] do when objtype in ~w[Audio Video Image Event Article Note Page] do
with {:ok, object, meta} <- Pipeline.common_pipeline(object, meta) do with {:ok, object, meta} <- Pipeline.common_pipeline(object, meta) do
{:ok, object, meta} {:ok, object, meta}
end end

View File

@ -20,7 +20,6 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.ActivityPub.Visibility alias Pleroma.Web.ActivityPub.Visibility
alias Pleroma.Web.Federator alias Pleroma.Web.Federator
alias Pleroma.Workers.TransmogrifierWorker
import Ecto.Query import Ecto.Query
@ -167,6 +166,27 @@ def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object, options)
def fix_in_reply_to(object, _options), do: object def fix_in_reply_to(object, _options), do: object
def fix_quote_url_and_maybe_fetch(object, options \\ []) do
quote_url =
case Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes.fix_quote_url(object) do
%{"quoteUrl" => quote_url} -> quote_url
_ -> nil
end
with {:quoting?, true} <- {:quoting?, not is_nil(quote_url)},
{:ok, quoted_object} <- get_obj_helper(quote_url, options),
%Activity{} <- Activity.get_create_by_object_ap_id(quoted_object.data["id"]) do
Map.put(object, "quoteUrl", quoted_object.data["id"])
else
{:quoting?, _} ->
object
e ->
Logger.warn("Couldn't fetch #{inspect(quote_url)}, error: #{inspect(e)}")
object
end
end
defp prepare_in_reply_to(in_reply_to) do defp prepare_in_reply_to(in_reply_to) do
cond do cond do
is_bitstring(in_reply_to) -> is_bitstring(in_reply_to) ->
@ -447,7 +467,7 @@ def handle_incoming(
%{"type" => "Create", "object" => %{"type" => objtype, "id" => obj_id}} = data, %{"type" => "Create", "object" => %{"type" => objtype, "id" => obj_id}} = data,
options options
) )
when objtype in ~w{Question Answer ChatMessage Audio Video Event Article Note Page} do when objtype in ~w{Question Answer ChatMessage Audio Video Event Article Note Page Image} do
fetch_options = Keyword.put(options, :depth, (options[:depth] || 0) + 1) fetch_options = Keyword.put(options, :depth, (options[:depth] || 0) + 1)
object = object =
@ -455,6 +475,7 @@ def handle_incoming(
|> strip_internal_fields() |> strip_internal_fields()
|> fix_type(fetch_options) |> fix_type(fetch_options)
|> fix_in_reply_to(fetch_options) |> fix_in_reply_to(fetch_options)
|> fix_quote_url_and_maybe_fetch(fetch_options)
data = Map.put(data, "object", object) data = Map.put(data, "object", object)
options = Keyword.put(options, :local, false) options = Keyword.put(options, :local, false)
@ -629,6 +650,16 @@ def set_reply_to_uri(%{"inReplyTo" => in_reply_to} = object) when is_binary(in_r
def set_reply_to_uri(obj), do: obj def set_reply_to_uri(obj), do: obj
@doc """
Fedibird compatibility
https://github.com/fedibird/mastodon/commit/dbd7ae6cf58a92ec67c512296b4daaea0d01e6ac
"""
def set_quote_url(%{"quoteUrl" => quote_url} = object) when is_binary(quote_url) do
Map.put(object, "quoteUri", quote_url)
end
def set_quote_url(obj), do: obj
@doc """ @doc """
Serialized Mastodon-compatible `replies` collection containing _self-replies_. Serialized Mastodon-compatible `replies` collection containing _self-replies_.
Based on Mastodon's ActivityPub::NoteSerializer#replies. Based on Mastodon's ActivityPub::NoteSerializer#replies.
@ -683,6 +714,7 @@ def prepare_object(object) do
|> prepare_attachments |> prepare_attachments
|> set_conversation |> set_conversation
|> set_reply_to_uri |> set_reply_to_uri
|> set_quote_url
|> set_replies |> set_replies
|> strip_internal_fields |> strip_internal_fields
|> strip_internal_tags |> strip_internal_tags
@ -946,47 +978,6 @@ defp strip_internal_tags(%{"tag" => tags} = object) do
defp strip_internal_tags(object), do: object defp strip_internal_tags(object), do: object
def perform(:user_upgrade, user) do
# we pass a fake user so that the followers collection is stripped away
old_follower_address = User.ap_followers(%User{nickname: user.nickname})
from(
a in Activity,
where: ^old_follower_address in a.recipients,
update: [
set: [
recipients:
fragment(
"array_replace(?,?,?)",
a.recipients,
^old_follower_address,
^user.follower_address
)
]
]
)
|> Repo.update_all([])
end
def upgrade_user_from_ap_id(ap_id) do
with %User{local: false} = user <- User.get_cached_by_ap_id(ap_id),
{:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id),
{:ok, user} <- update_user(user, data) do
{:ok, _pid} = Task.start(fn -> ActivityPub.pinned_fetch_task(user) end)
TransmogrifierWorker.enqueue("user_upgrade", %{"user_id" => user.id})
{:ok, user}
else
%User{} = user -> {:ok, user}
e -> e
end
end
defp update_user(user, data) do
user
|> User.remote_user_changeset(data)
|> User.update_and_set_cache()
end
def maybe_fix_user_url(%{"url" => url} = data) when is_map(url) do def maybe_fix_user_url(%{"url" => url} = data) when is_map(url) do
Map.put(data, "url", url["href"]) Map.put(data, "url", url["href"])
end end

View File

@ -31,7 +31,8 @@ defmodule Pleroma.Web.ActivityPub.Utils do
"Page", "Page",
"Question", "Question",
"Answer", "Answer",
"Audio" "Audio",
"Image"
] ]
@strip_status_report_states ~w(closed resolved) @strip_status_report_states ~w(closed resolved)
@supported_report_states ~w(open closed resolved) @supported_report_states ~w(open closed resolved)
@ -325,21 +326,29 @@ def update_element_in_object(property, element, object, count \\ nil) do
{:ok, Object.t()} | {:error, Ecto.Changeset.t()} {:ok, Object.t()} | {:error, Ecto.Changeset.t()}
def add_emoji_reaction_to_object( def add_emoji_reaction_to_object(
%Activity{data: %{"content" => emoji, "actor" => actor}}, %Activity{data: %{"content" => emoji, "actor" => actor}} = activity,
object object
) do ) do
reactions = get_cached_emoji_reactions(object) reactions = get_cached_emoji_reactions(object)
emoji = Pleroma.Emoji.maybe_strip_name(emoji)
url = maybe_emoji_url(emoji, activity)
new_reactions = new_reactions =
case Enum.find_index(reactions, fn [candidate, _] -> emoji == candidate end) do case Enum.find_index(reactions, fn [candidate, _, candidate_url] ->
if is_nil(candidate_url) do
emoji == candidate
else
url == candidate_url
end
end) do
nil -> nil ->
reactions ++ [[emoji, [actor]]] reactions ++ [[emoji, [actor], url]]
index -> index ->
List.update_at( List.update_at(
reactions, reactions,
index, index,
fn [emoji, users] -> [emoji, Enum.uniq([actor | users])] end fn [emoji, users, url] -> [emoji, Enum.uniq([actor | users]), url] end
) )
end end
@ -348,18 +357,40 @@ def add_emoji_reaction_to_object(
update_element_in_object("reaction", new_reactions, object, count) update_element_in_object("reaction", new_reactions, object, count)
end end
defp maybe_emoji_url(
name,
%Activity{
data: %{
"tag" => [
%{"type" => "Emoji", "name" => name, "icon" => %{"url" => url}}
]
}
}
),
do: url
defp maybe_emoji_url(_, _), do: nil
def emoji_count(reactions_list) do def emoji_count(reactions_list) do
Enum.reduce(reactions_list, 0, fn [_, users], acc -> acc + length(users) end) Enum.reduce(reactions_list, 0, fn [_, users, _], acc -> acc + length(users) end)
end end
def remove_emoji_reaction_from_object( def remove_emoji_reaction_from_object(
%Activity{data: %{"content" => emoji, "actor" => actor}}, %Activity{data: %{"content" => emoji, "actor" => actor}} = activity,
object object
) do ) do
emoji = Pleroma.Emoji.maybe_strip_name(emoji)
reactions = get_cached_emoji_reactions(object) reactions = get_cached_emoji_reactions(object)
url = maybe_emoji_url(emoji, activity)
new_reactions = new_reactions =
case Enum.find_index(reactions, fn [candidate, _] -> emoji == candidate end) do case Enum.find_index(reactions, fn [candidate, _, candidate_url] ->
if is_nil(candidate_url) do
emoji == candidate
else
url == candidate_url
end
end) do
nil -> nil ->
reactions reactions
@ -367,9 +398,9 @@ def remove_emoji_reaction_from_object(
List.update_at( List.update_at(
reactions, reactions,
index, index,
fn [emoji, users] -> [emoji, List.delete(users, actor)] end fn [emoji, users, url] -> [emoji, List.delete(users, actor), url] end
) )
|> Enum.reject(fn [_, users] -> Enum.empty?(users) end) |> Enum.reject(fn [_, users, _] -> Enum.empty?(users) end)
end end
count = emoji_count(new_reactions) count = emoji_count(new_reactions)
@ -377,11 +408,7 @@ def remove_emoji_reaction_from_object(
end end
def get_cached_emoji_reactions(object) do def get_cached_emoji_reactions(object) do
if is_list(object.data["reactions"]) do Object.get_emoji_reactions(object)
object.data["reactions"]
else
[]
end
end end
@spec add_like_to_object(Activity.t(), Object.t()) :: @spec add_like_to_object(Activity.t(), Object.t()) ::
@ -489,17 +516,37 @@ def fetch_latest_undo(%User{ap_id: ap_id}) do
def get_latest_reaction(internal_activity_id, %{ap_id: ap_id}, emoji) do def get_latest_reaction(internal_activity_id, %{ap_id: ap_id}, emoji) do
%{data: %{"object" => object_ap_id}} = Activity.get_by_id(internal_activity_id) %{data: %{"object" => object_ap_id}} = Activity.get_by_id(internal_activity_id)
emoji = Pleroma.Emoji.maybe_quote(emoji)
"EmojiReact" "EmojiReact"
|> Activity.Queries.by_type() |> Activity.Queries.by_type()
|> where(actor: ^ap_id) |> where(actor: ^ap_id)
|> where([activity], fragment("?->>'content' = ?", activity.data, ^emoji)) |> custom_emoji_discriminator(emoji)
|> Activity.Queries.by_object_id(object_ap_id) |> Activity.Queries.by_object_id(object_ap_id)
|> order_by([activity], fragment("? desc nulls last", activity.id)) |> order_by([activity], fragment("? desc nulls last", activity.id))
|> limit(1) |> limit(1)
|> Repo.one() |> Repo.one()
end end
defp custom_emoji_discriminator(query, emoji) do
if String.contains?(emoji, "@") do
stripped = Pleroma.Emoji.maybe_strip_name(emoji)
[name, domain] = String.split(stripped, "@")
domain_pattern = "%/" <> domain <> "/%"
emoji_pattern = Pleroma.Emoji.maybe_quote(name)
query
|> where([activity], fragment("?->>'content' = ?
AND EXISTS (
SELECT FROM jsonb_array_elements(?->'tag') elem
WHERE elem->>'id' ILIKE ?
)", activity.data, ^emoji_pattern, activity.data, ^domain_pattern))
else
query
|> where([activity], fragment("?->>'content' = ?", activity.data, ^emoji))
end
end
#### Announce-related helpers #### Announce-related helpers
@doc """ @doc """

View File

@ -18,13 +18,24 @@ defmodule Pleroma.Web.AdminAPI.FrontendController do
def index(conn, _params) do def index(conn, _params) do
installed = installed() installed = installed()
# FIrst get frontends from config,
# then add frontends that are installed but not in the config
frontends = frontends =
[:frontends, :available] Config.get([:frontends, :available], [])
|> Config.get([])
|> Enum.map(fn {name, desc} -> |> Enum.map(fn {name, desc} ->
Map.put(desc, "installed", name in installed) desc
|> Map.put("installed", name in installed)
|> Map.put("installed_refs", installed_refs(name))
end) end)
frontends =
frontends ++
(installed
|> Enum.filter(fn n -> not Enum.any?(frontends, fn f -> f["name"] == n end) end)
|> Enum.map(fn name ->
%{"name" => name, "installed" => true, "installed_refs" => installed_refs(name)}
end))
render(conn, "index.json", frontends: frontends) render(conn, "index.json", frontends: frontends)
end end
@ -43,4 +54,12 @@ defp installed do
[] []
end end
end end
def installed_refs(name) do
if name in installed() do
File.ls!(Path.join(Pleroma.Frontend.dir(), name))
else
[]
end
end
end end

View File

@ -31,7 +31,7 @@ def extract_report_info(
defp make_fake_activity(act, user) do defp make_fake_activity(act, user) do
%Activity{ %Activity{
id: "pleroma:fake", id: "pleroma:fake:#{act["id"]}",
data: %{ data: %{
"actor" => user.ap_id, "actor" => user.ap_id,
"type" => "Create", "type" => "Create",

View File

@ -15,7 +15,8 @@ def render("show.json", %{frontend: frontend}) do
git: frontend["git"], git: frontend["git"],
build_url: frontend["build_url"], build_url: frontend["build_url"],
ref: frontend["ref"], ref: frontend["ref"],
installed: frontend["installed"] installed: frontend["installed"],
installed_refs: frontend["installed_refs"]
} }
end end
end end

View File

@ -10,6 +10,14 @@ defmodule Pleroma.Web.ApiSpec do
@behaviour OpenApi @behaviour OpenApi
defp streaming_paths do
%{
"/api/v1/streaming" => %OpenApiSpex.PathItem{
get: Pleroma.Web.ApiSpec.StreamingOperation.streaming_operation()
}
}
end
@impl OpenApi @impl OpenApi
def spec(opts \\ []) do def spec(opts \\ []) do
%OpenApi{ %OpenApi{
@ -45,7 +53,7 @@ def spec(opts \\ []) do
} }
}, },
# populate the paths from a phoenix router # populate the paths from a phoenix router
paths: OpenApiSpex.Paths.from_router(Router), paths: Map.merge(streaming_paths(), OpenApiSpex.Paths.from_router(Router)),
components: %OpenApiSpex.Components{ components: %OpenApiSpex.Components{
parameters: %{ parameters: %{
"accountIdOrNickname" => "accountIdOrNickname" =>
@ -95,7 +103,8 @@ def spec(opts \\ []) do
"Relays", "Relays",
"Report managment", "Report managment",
"Status administration", "Status administration",
"User administration" "User administration",
"Announcement management"
] ]
}, },
%{"name" => "Applications", "tags" => ["Applications", "Push subscriptions"]}, %{"name" => "Applications", "tags" => ["Applications", "Push subscriptions"]},
@ -110,10 +119,12 @@ def spec(opts \\ []) do
"Follow requests", "Follow requests",
"Mascot", "Mascot",
"Markers", "Markers",
"Notifications" "Notifications",
"Filters",
"Settings"
] ]
}, },
%{"name" => "Instance", "tags" => ["Custom emojis"]}, %{"name" => "Instance", "tags" => ["Custom emojis", "Instance misc"]},
%{"name" => "Messaging", "tags" => ["Chats", "Conversations"]}, %{"name" => "Messaging", "tags" => ["Chats", "Conversations"]},
%{ %{
"name" => "Statuses", "name" => "Statuses",
@ -125,10 +136,21 @@ def spec(opts \\ []) do
"Retrieve status information", "Retrieve status information",
"Scheduled statuses", "Scheduled statuses",
"Search", "Search",
"Status actions" "Status actions",
"Media attachments"
] ]
}, },
%{"name" => "Miscellaneous", "tags" => ["Emoji packs", "Reports", "Suggestions"]} %{
"name" => "Miscellaneous",
"tags" => [
"Emoji packs",
"Reports",
"Suggestions",
"Announcements",
"Remote interaction",
"Others"
]
}
] ]
} }
} }

View File

@ -452,7 +452,7 @@ def blocks_operation do
operationId: "AccountController.blocks", operationId: "AccountController.blocks",
description: "View your blocks. See also accounts/:id/{block,unblock}", description: "View your blocks. See also accounts/:id/{block,unblock}",
security: [%{"oAuth" => ["read:blocks"]}], security: [%{"oAuth" => ["read:blocks"]}],
parameters: pagination_params(), parameters: [with_relationships_param() | pagination_params()],
responses: %{ responses: %{
200 => Operation.response("Accounts", "application/json", array_of_accounts()) 200 => Operation.response("Accounts", "application/json", array_of_accounts())
} }
@ -461,7 +461,7 @@ def blocks_operation do
def lookup_operation do def lookup_operation do
%Operation{ %Operation{
tags: ["Account lookup"], tags: ["Retrieve account information"],
summary: "Find a user by nickname", summary: "Find a user by nickname",
operationId: "AccountController.lookup", operationId: "AccountController.lookup",
parameters: [ parameters: [

View File

@ -17,7 +17,7 @@ def open_api_operation(action) do
def index_operation do def index_operation do
%Operation{ %Operation{
tags: ["Announcement managment"], tags: ["Announcement management"],
summary: "Retrieve a list of announcements", summary: "Retrieve a list of announcements",
operationId: "AdminAPI.AnnouncementController.index", operationId: "AdminAPI.AnnouncementController.index",
security: [%{"oAuth" => ["admin:read"]}], security: [%{"oAuth" => ["admin:read"]}],
@ -46,7 +46,7 @@ def index_operation do
def show_operation do def show_operation do
%Operation{ %Operation{
tags: ["Announcement managment"], tags: ["Announcement management"],
summary: "Display one announcement", summary: "Display one announcement",
operationId: "AdminAPI.AnnouncementController.show", operationId: "AdminAPI.AnnouncementController.show",
security: [%{"oAuth" => ["admin:read"]}], security: [%{"oAuth" => ["admin:read"]}],
@ -69,7 +69,7 @@ def show_operation do
def delete_operation do def delete_operation do
%Operation{ %Operation{
tags: ["Announcement managment"], tags: ["Announcement management"],
summary: "Delete one announcement", summary: "Delete one announcement",
operationId: "AdminAPI.AnnouncementController.delete", operationId: "AdminAPI.AnnouncementController.delete",
security: [%{"oAuth" => ["admin:write"]}], security: [%{"oAuth" => ["admin:write"]}],
@ -92,7 +92,7 @@ def delete_operation do
def create_operation do def create_operation do
%Operation{ %Operation{
tags: ["Announcement managment"], tags: ["Announcement management"],
summary: "Create one announcement", summary: "Create one announcement",
operationId: "AdminAPI.AnnouncementController.create", operationId: "AdminAPI.AnnouncementController.create",
security: [%{"oAuth" => ["admin:write"]}], security: [%{"oAuth" => ["admin:write"]}],
@ -107,7 +107,7 @@ def create_operation do
def change_operation do def change_operation do
%Operation{ %Operation{
tags: ["Announcement managment"], tags: ["Announcement management"],
summary: "Change one announcement", summary: "Change one announcement",
operationId: "AdminAPI.AnnouncementController.change", operationId: "AdminAPI.AnnouncementController.change",
security: [%{"oAuth" => ["admin:write"]}], security: [%{"oAuth" => ["admin:write"]}],

View File

@ -51,8 +51,9 @@ defp list_of_frontends do
name: %Schema{type: :string}, name: %Schema{type: :string},
git: %Schema{type: :string, format: :uri, nullable: true}, git: %Schema{type: :string, format: :uri, nullable: true},
build_url: %Schema{type: :string, format: :uri, nullable: true}, build_url: %Schema{type: :string, format: :uri, nullable: true},
ref: %Schema{type: :string}, ref: %Schema{type: :string, nullable: true},
installed: %Schema{type: :boolean} installed: %Schema{type: :boolean},
installed_refs: %Schema{type: :array, items: %Schema{type: :string}}
} }
} }
} }

View File

@ -70,7 +70,7 @@ def index_operation do
def show_operation do def show_operation do
%Operation{ %Operation{
tags: ["Status adminitration)"], tags: ["Status administration"],
summary: "Get status", summary: "Get status",
operationId: "AdminAPI.StatusController.show", operationId: "AdminAPI.StatusController.show",
parameters: [id_param() | admin_api_params()], parameters: [id_param() | admin_api_params()],
@ -84,7 +84,7 @@ def show_operation do
def update_operation do def update_operation do
%Operation{ %Operation{
tags: ["Status adminitration)"], tags: ["Status administration"],
summary: "Change the scope of a status", summary: "Change the scope of a status",
operationId: "AdminAPI.StatusController.update", operationId: "AdminAPI.StatusController.update",
parameters: [id_param() | admin_api_params()], parameters: [id_param() | admin_api_params()],
@ -99,7 +99,7 @@ def update_operation do
def delete_operation do def delete_operation do
%Operation{ %Operation{
tags: ["Status adminitration)"], tags: ["Status administration"],
summary: "Delete status", summary: "Delete status",
operationId: "AdminAPI.StatusController.delete", operationId: "AdminAPI.StatusController.delete",
parameters: [id_param() | admin_api_params()], parameters: [id_param() | admin_api_params()],
@ -143,7 +143,7 @@ def admin_account do
} }
}, },
tags: %Schema{type: :string}, tags: %Schema{type: :string},
is_confirmed: %Schema{type: :string} is_confirmed: %Schema{type: :boolean}
} }
} }
end end

View File

@ -15,7 +15,7 @@ def open_api_operation(action) do
def index_operation do def index_operation do
%Operation{ %Operation{
tags: ["Announcement"], tags: ["Announcements"],
summary: "Retrieve a list of announcements", summary: "Retrieve a list of announcements",
operationId: "MastodonAPI.AnnouncementController.index", operationId: "MastodonAPI.AnnouncementController.index",
security: [%{"oAuth" => []}], security: [%{"oAuth" => []}],
@ -28,7 +28,7 @@ def index_operation do
def mark_read_operation do def mark_read_operation do
%Operation{ %Operation{
tags: ["Announcement"], tags: ["Announcements"],
summary: "Mark one announcement as read", summary: "Mark one announcement as read",
operationId: "MastodonAPI.AnnouncementController.mark_read", operationId: "MastodonAPI.AnnouncementController.mark_read",
security: [%{"oAuth" => ["write:accounts"]}], security: [%{"oAuth" => ["write:accounts"]}],

View File

@ -17,7 +17,7 @@ def open_api_operation(action) do
def index_operation do def index_operation do
%Operation{ %Operation{
tags: ["Directory"], tags: ["Others"],
summary: "Profile directory", summary: "Profile directory",
operationId: "DirectoryController.index", operationId: "DirectoryController.index",
parameters: parameters:

View File

@ -13,7 +13,7 @@ def open_api_operation(action) do
def show_operation do def show_operation do
%Operation{ %Operation{
tags: ["Instance"], tags: ["Instance misc"],
summary: "Retrieve instance information", summary: "Retrieve instance information",
description: "Information about the server", description: "Information about the server",
operationId: "InstanceController.show", operationId: "InstanceController.show",
@ -25,7 +25,7 @@ def show_operation do
def peers_operation do def peers_operation do
%Operation{ %Operation{
tags: ["Instance"], tags: ["Instance misc"],
summary: "Retrieve list of known instances", summary: "Retrieve list of known instances",
operationId: "InstanceController.peers", operationId: "InstanceController.peers",
responses: %{ responses: %{

View File

@ -64,7 +64,13 @@ defp backup do
content_type: %Schema{type: :string}, content_type: %Schema{type: :string},
file_name: %Schema{type: :string}, file_name: %Schema{type: :string},
file_size: %Schema{type: :integer}, file_size: %Schema{type: :integer},
processed: %Schema{type: :boolean} processed: %Schema{type: :boolean, description: "whether this backup has succeeded"},
state: %Schema{
type: :string,
description: "the state of the backup",
enum: ["pending", "running", "complete", "failed"]
},
processed_number: %Schema{type: :integer, description: "the number of records processed"}
}, },
example: %{ example: %{
"content_type" => "application/zip", "content_type" => "application/zip",
@ -72,7 +78,9 @@ defp backup do
"https://cofe.fe:4000/media/backups/archive-foobar-20200908T164207-Yr7vuT5Wycv-sN3kSN2iJ0k-9pMo60j9qmvRCdDqIew.zip", "https://cofe.fe:4000/media/backups/archive-foobar-20200908T164207-Yr7vuT5Wycv-sN3kSN2iJ0k-9pMo60j9qmvRCdDqIew.zip",
"file_size" => 4105, "file_size" => 4105,
"inserted_at" => "2020-09-08T16:42:07.000Z", "inserted_at" => "2020-09-08T16:42:07.000Z",
"processed" => true "processed" => true,
"state" => "complete",
"processed_number" => 20
} }
} }
end end

Some files were not shown because too many files have changed in this diff Show More