diff --git a/.credo.exs b/.credo.exs
index b85898af3..a467f2951 100644
--- a/.credo.exs
+++ b/.credo.exs
@@ -83,6 +83,7 @@
# lanodan: I think PreferImplicitTry should be consistency, and the behaviour seems
# inconsistent, see: https://github.com/rrrene/credo/issues/224
{Credo.Check.Readability.PreferImplicitTry, false},
+ {Credo.Check.Readability.PipeIntoAnonymousFunctions, exit_status: 0},
{Credo.Check.Readability.RedundantBlankLines},
{Credo.Check.Readability.StringSigils},
{Credo.Check.Readability.TrailingBlankLine},
@@ -90,6 +91,7 @@
{Credo.Check.Readability.VariableNames},
{Credo.Check.Readability.Semicolons},
{Credo.Check.Readability.SpaceAfterCommas},
+ {Credo.Check.Readability.WithSingleClause, exit_status: 0},
{Credo.Check.Refactor.DoubleBooleanNegation},
{Credo.Check.Refactor.CondStatements},
{Credo.Check.Refactor.CyclomaticComplexity},
diff --git a/.dialyzer_ignore.exs b/.dialyzer_ignore.exs
new file mode 100644
index 000000000..865e7d782
--- /dev/null
+++ b/.dialyzer_ignore.exs
@@ -0,0 +1,6 @@
+[
+{"lib/cachex.ex", "Unknown type: Spec.cache/0."},
+{"lib/pleroma/web/plugs/rate_limiter.ex", "The pattern can never match the type {:commit, _} | {:ignore, _}."},
+{"lib/pleroma/web/plugs/rate_limiter.ex", "Function get_scale/2 will never be called."},
+{"lib/pleroma/web/plugs/rate_limiter.ex", "Function initialize_buckets!/1 will never be called."}
+]
diff --git a/.gitignore b/.gitignore
index da73b6f36..3b672184e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -48,6 +48,7 @@ docs/generated_config.md
# Code test coverage
/cover
/Elixir.*.coverdata
+/coverage.xml
.idea
pleroma.iml
@@ -56,5 +57,6 @@ pleroma.iml
.tool-versions
# Editor temp files
-/*~
-/*#
+*~
+*#
+*.swp
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index e5664da68..21d7b2242 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,12 +1,22 @@
-image: git.pleroma.social:5050/pleroma/pleroma/ci-base
+image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp-24
variables: &global_variables
+ # Only used for the release
+ ELIXIR_VER: 1.13.4
POSTGRES_DB: pleroma_test
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
DB_HOST: postgres
+ DB_PORT: "5432"
MIX_ENV: test
+workflow:
+ rules:
+ - if: $CI_PIPELINE_SOURCE == "merge_request_event"
+ - if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS
+ when: never
+ - if: $CI_COMMIT_BRANCH
+
cache: &global_cache_policy
key:
files:
@@ -17,11 +27,14 @@ cache: &global_cache_policy
stages:
- build
+ - lint
- test
+ - check-changelog
- benchmark
- deploy
- release
- docker
+ - docker-combine
before_script:
- echo $MIX_ENV
@@ -31,22 +44,61 @@ before_script:
after_script:
- rm -rf _build/*/lib/pleroma
-build:
+check-changelog:
+ stage: check-changelog
+ image: alpine
+ rules:
+ - if: $CI_MERGE_REQUEST_SOURCE_PROJECT_PATH == 'pleroma/pleroma' && $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME == 'weblate-extract'
+ when: never
+ - if: $CI_MERGE_REQUEST_SOURCE_PROJECT_PATH == 'pleroma/pleroma' && $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME == 'weblate'
+ when: never
+ - if: $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop"
+ before_script: ''
+ after_script: ''
+ cache: {}
+ script:
+ - apk add git
+ - sh ./tools/check-changelog
+
+.build_changes_policy:
+ rules:
+ - changes:
+ - ".gitlab-ci.yml"
+ - "**/*.ex"
+ - "**/*.exs"
+ - "mix.lock"
+
+.using-ci-base:
+ tags:
+ - amd64
+
+build-1.13.4:
+ extends:
+ - .build_changes_policy
+ - .using-ci-base
stage: build
- only:
- changes:
- - "**/*.ex"
- - "**/*.exs"
- - "mix.lock"
+ script:
+ - mix compile --force
+
+build-1.15.7-otp-25:
+ extends:
+ - .build_changes_policy
+ - .using-ci-base
+ stage: build
+ image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25
+ allow_failure: true
script:
- mix compile --force
spec-build:
- stage: test
- only:
- changes:
- - "lib/pleroma/web/api_spec/**/*.ex"
- - "lib/pleroma/web/api_spec.ex"
+ extends:
+ - .using-ci-base
+ stage: build
+ rules:
+ - changes:
+ - ".gitlab-ci.yml"
+ - "lib/pleroma/web/api_spec/**/*.ex"
+ - "lib/pleroma/web/api_spec.ex"
artifacts:
paths:
- spec.json
@@ -54,12 +106,14 @@ spec-build:
- mix pleroma.openapi_spec spec.json
benchmark:
+ extends:
+ - .using-ci-base
stage: benchmark
when: manual
variables:
MIX_ENV: benchmark
services:
- - name: postgres:9.6
+ - name: postgres:11.22-alpine
alias: postgres
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
script:
@@ -67,130 +121,99 @@ benchmark:
- mix ecto.migrate
- mix pleroma.load_testing
-unit-testing:
+unit-testing-1.12.3:
+ extends:
+ - .build_changes_policy
+ - .using-ci-base
stage: test
- only:
- changes:
- - "**/*.ex"
- - "**/*.exs"
- - "mix.lock"
cache: &testing_cache_policy
<<: *global_cache_policy
policy: pull
-
- services:
- - name: postgres:13
+ services: &testing_services
+ - name: postgres:13-alpine
alias: postgres
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
- script:
+ script: &testing_script
- mix ecto.create
- mix ecto.migrate
- - mix coveralls --preload-modules
+ - mix test --cover --preload-modules
+ coverage: '/^Line total: ([^ ]*%)$/'
+ artifacts:
+ reports:
+ coverage_report:
+ coverage_format: cobertura
+ path: coverage.xml
-unit-testing-erratic:
+unit-testing-1.15.7-otp-25:
+ extends:
+ - .build_changes_policy
+ - .using-ci-base
+ stage: test
+ image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25
+ allow_failure: true
+ cache: *testing_cache_policy
+ services: *testing_services
+ script: *testing_script
+
+unit-testing-1.12-erratic:
+ extends:
+ - .build_changes_policy
+ - .using-ci-base
stage: test
retry: 2
allow_failure: true
- only:
- changes:
- - "**/*.ex"
- - "**/*.exs"
- - "mix.lock"
- cache: &testing_cache_policy
- <<: *global_cache_policy
- policy: pull
-
- services:
- - name: postgres:13
- alias: postgres
- command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
+ cache: *testing_cache_policy
+ services: *testing_services
script:
- mix ecto.create
- mix ecto.migrate
- mix test --only=erratic
-# Removed to fix CI issue. In this early state it wasn't adding much value anyway.
-# TODO Fix and reinstate federated testing
-# federated-testing:
-# stage: test
-# cache: *testing_cache_policy
-# services:
-# - name: minibikini/postgres-with-rum:12
-# alias: postgres
-# command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
-# script:
-# - mix deps.get
-# - mix ecto.create
-# - mix ecto.migrate
-# - epmd -daemon
-# - mix test --trace --only federated
-
-unit-testing-rum:
- stage: test
- only:
- changes:
- - "**/*.ex"
- - "**/*.exs"
- - "mix.lock"
+formatting-1.13:
+ extends: .build_changes_policy
+ image: &formatting_elixir elixir:1.13-alpine
+ stage: lint
cache: *testing_cache_policy
- services:
- - name: minibikini/postgres-with-rum:12
- alias: postgres
- command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
- variables:
- <<: *global_variables
- RUM_ENABLED: "true"
- script:
- - mix ecto.create
- - mix ecto.migrate
- - "mix ecto.migrate --migrations-path priv/repo/optional_migrations/rum_indexing/"
- - mix test --preload-modules
-
-lint:
- image: elixir:1.12
- stage: test
- only:
- changes:
- - "**/*.ex"
- - "**/*.exs"
- - "mix.lock"
- cache: *testing_cache_policy
- before_script:
+ before_script: ¤t_bfr_script
+ - apk update
+ - apk add build-base cmake file-dev git openssl
- mix local.hex --force
- mix local.rebar --force
- mix deps.get
script:
- mix format --check-formatted
+cycles-1.13:
+ extends: .build_changes_policy
+ image: *formatting_elixir
+ stage: lint
+ cache: {}
+ before_script: *current_bfr_script
+ script:
+ - mix compile
+ - mix xref graph --format cycles --label compile | awk '{print $0} END{exit ($0 != "No cycles found")}'
+
analysis:
- stage: test
- only:
- changes:
- - "**/*.ex"
- - "**/*.exs"
- - "mix.lock"
+ extends:
+ - .build_changes_policy
+ - .using-ci-base
+ stage: lint
cache: *testing_cache_policy
script:
- mix credo --strict --only=warnings,todo,fixme,consistency,readability
-cycles:
- stage: test
- image: elixir:1.11
- only:
- changes:
- - "**/*.ex"
- - "**/*.exs"
- - "mix.lock"
- cache: {}
- before_script:
- - mix local.hex --force
- - mix local.rebar --force
- - mix deps.get
- - apt-get update
- - apt-get install cmake libmagic-dev -y
+dialyzer:
+ extends:
+ - .build_changes_policy
+ - .using-ci-base
+ stage: lint
+ allow_failure: true
+ when: manual
+ cache: *testing_cache_policy
+ tags:
+ - feld
script:
- - mix compile
- - mix xref graph --format cycles --label compile | awk '{print $0} END{exit ($0 != "No cycles found")}'
+ - mix dialyzer
docs-deploy:
stage: deploy
@@ -202,7 +225,7 @@ docs-deploy:
before_script:
- apk add curl
script:
- - curl -X POST -F"token=$DOCS_PIPELINE_TRIGGER" -F'ref=master' -F"variables[BRANCH]=$CI_COMMIT_REF_NAME" https://git.pleroma.social/api/v4/projects/673/trigger/pipeline
+ - curl --fail-with-body -X POST -F"token=$CI_JOB_TOKEN" -F'ref=master' -F"variables[BRANCH]=$CI_COMMIT_REF_NAME" https://git.pleroma.social/api/v4/projects/673/trigger/pipeline
review_app:
image: alpine:3.9
stage: deploy
@@ -243,7 +266,7 @@ spec-deploy:
before_script:
- apk add curl
script:
- - curl -X POST -F"token=$API_DOCS_PIPELINE_TRIGGER" -F'ref=master' -F"variables[BRANCH]=$CI_COMMIT_REF_NAME" -F"variables[JOB_REF]=$CI_JOB_ID" https://git.pleroma.social/api/v4/projects/1130/trigger/pipeline
+ - curl --fail-with-body -X POST -F"token=$CI_JOB_TOKEN" -F'ref=master' -F"variables[BRANCH]=$CI_COMMIT_REF_NAME" -F"variables[JOB_REF]=$CI_JOB_ID" https://git.pleroma.social/api/v4/projects/1130/trigger/pipeline
stop_review_app:
@@ -266,12 +289,14 @@ stop_review_app:
amd64:
stage: release
- image: elixir:1.10.4
+ image: elixir:$ELIXIR_VER
only: &release-only
- stable@pleroma/pleroma
- develop@pleroma/pleroma
- /^maint/.*$/@pleroma/pleroma
- /^release/.*$/@pleroma/pleroma
+ tags:
+ - amd64
artifacts: &release-artifacts
name: "pleroma-$CI_COMMIT_REF_NAME-$CI_COMMIT_SHORT_SHA-$CI_JOB_NAME"
paths:
@@ -288,9 +313,10 @@ amd64:
- deps
variables: &release-variables
MIX_ENV: prod
+ VIX_COMPILATION_MODE: PLATFORM_PROVIDED_LIBVIPS
before_script: &before-release
- - apt-get update && apt-get install -y cmake libmagic-dev
- - echo "import Mix.Config" > config/prod.secret.exs
+ - apt-get update && apt-get install -y cmake libmagic-dev libvips-dev erlang-dev
+ - echo "import Config" > config/prod.secret.exs
- mix local.hex --force
- mix local.rebar --force
script: &release
@@ -304,12 +330,14 @@ amd64-musl:
stage: release
artifacts: *release-artifacts
only: *release-only
- image: elixir:1.10.4-alpine
+ image: elixir:$ELIXIR_VER-alpine
+ tags:
+ - amd64
cache: *release-cache
variables: *release-variables
before_script: &before-release-musl
- - apk add git gcc g++ musl-dev make cmake file-dev
- - echo "import Mix.Config" > config/prod.secret.exs
+ - apk add git build-base cmake file-dev openssl vips-dev
+ - echo "import Config" > config/prod.secret.exs
- mix local.hex --force
- mix local.rebar --force
script: *release
@@ -320,7 +348,7 @@ arm:
only: *release-only
tags:
- arm32-specified
- image: arm32v7/elixir:1.10.4
+ image: arm32v7/elixir:$ELIXIR_VER
cache: *release-cache
variables: *release-variables
before_script: *before-release
@@ -332,7 +360,7 @@ arm-musl:
only: *release-only
tags:
- arm32-specified
- image: arm32v7/elixir:1.10.4-alpine
+ image: arm32v7/elixir:$ELIXIR_VER-alpine
cache: *release-cache
variables: *release-variables
before_script: *before-release-musl
@@ -344,7 +372,7 @@ arm64:
only: *release-only
tags:
- arm
- image: arm64v8/elixir:1.10.4
+ image: arm64v8/elixir:$ELIXIR_VER
cache: *release-cache
variables: *release-variables
before_script: *before-release
@@ -356,110 +384,173 @@ arm64-musl:
only: *release-only
tags:
- arm
- image: arm64v8/elixir:1.10.4-alpine
+ image: arm64v8/elixir:$ELIXIR_VER-alpine
cache: *release-cache
variables: *release-variables
before_script: *before-release-musl
script: *release
-docker:
+.kaniko:
stage: docker
- image: docker:latest
+ image:
+ name: gcr.io/kaniko-project/executor:debug
+ entrypoint: [""]
cache: {}
dependencies: []
- variables: &docker-variables
- DOCKER_DRIVER: overlay2
- DOCKER_HOST: unix:///var/run/docker.sock
- IMAGE_TAG: $CI_REGISTRY_IMAGE:$CI_COMMIT_SHORT_SHA
- IMAGE_TAG_SLUG: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG
- IMAGE_TAG_LATEST: $CI_REGISTRY_IMAGE:latest
- IMAGE_TAG_LATEST_STABLE: $CI_REGISTRY_IMAGE:latest-stable
- DOCKER_BUILDX_URL: https://github.com/docker/buildx/releases/download/v0.6.3/buildx-v0.6.3.linux-amd64
- DOCKER_BUILDX_HASH: 980e6b9655f971991fbbb5fd6cd19f1672386195
- before_script: &before-docker
- - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- - docker pull $IMAGE_TAG_SLUG || true
+ before_script: &before-kaniko
- export CI_JOB_TIMESTAMP=$(date --utc -Iseconds)
- export CI_VCS_REF=$CI_COMMIT_SHORT_SHA
- allow_failure: true
- script:
- - mkdir -p /root/.docker/cli-plugins
- - wget "${DOCKER_BUILDX_URL}" -O ~/.docker/cli-plugins/docker-buildx
- - echo "${DOCKER_BUILDX_HASH} /root/.docker/cli-plugins/docker-buildx" | sha1sum -c
- - chmod +x ~/.docker/cli-plugins/docker-buildx
- - docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
- - docker buildx create --name mbuilder --driver docker-container --use
- - docker buildx inspect --bootstrap
- - docker buildx build --platform linux/amd64,linux/arm/v7,linux/arm64/v8 --push --cache-from $IMAGE_TAG_SLUG --build-arg VCS_REF=$CI_VCS_REF --build-arg BUILD_DATE=$CI_JOB_TIMESTAMP -t $IMAGE_TAG -t $IMAGE_TAG_SLUG -t $IMAGE_TAG_LATEST .
- tags:
- - dind
+ - export IMAGE_TAG=$CI_REGISTRY_IMAGE/$BUILD_ARCH_IMG_SUFFIX:$CI_COMMIT_SHORT_SHA
+ - export IMAGE_TAG_SLUG=$CI_REGISTRY_IMAGE/$BUILD_ARCH_IMG_SUFFIX:$CI_COMMIT_REF_SLUG
+ - export IMAGE_TAG_LATEST=$CI_REGISTRY_IMAGE/$BUILD_ARCH_IMG_SUFFIX:latest
+ - export IMAGE_TAG_LATEST_STABLE=$CI_REGISTRY_IMAGE/$BUILD_ARCH_IMG_SUFFIX:latest-stable
+ - mkdir -p /kaniko/.docker
+ - echo "{\"auths\":{\"$CI_REGISTRY\":{\"username\":\"$CI_REGISTRY_USER\",\"password\":\"$CI_REGISTRY_PASSWORD\"}}}" > /kaniko/.docker/config.json
+
+.kaniko-latest:
+ extends: .kaniko
only:
- develop@pleroma/pleroma
-
-docker-stable:
- stage: docker
- image: docker:latest
- cache: {}
- dependencies: []
- variables: *docker-variables
- before_script: *before-docker
- allow_failure: true
script:
- - mkdir -p /root/.docker/cli-plugins
- - wget "${DOCKER_BUILDX_URL}" -O ~/.docker/cli-plugins/docker-buildx
- - echo "${DOCKER_BUILDX_HASH} /root/.docker/cli-plugins/docker-buildx" | sha1sum -c
- - chmod +x ~/.docker/cli-plugins/docker-buildx
- - docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
- - docker buildx create --name mbuilder --driver docker-container --use
- - docker buildx inspect --bootstrap
- - docker buildx build --platform linux/amd64,linux/arm/v7,linux/arm64/v8 --push --cache-from $IMAGE_TAG_SLUG --build-arg VCS_REF=$CI_VCS_REF --build-arg BUILD_DATE=$CI_JOB_TIMESTAMP -t $IMAGE_TAG -t $IMAGE_TAG_SLUG -t $IMAGE_TAG_LATEST_STABLE .
- tags:
- - dind
+ - /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --custom-platform=$BUILD_ARCH --build-arg VCS_REF=$CI_VCS_REF --build-arg BUILD_DATE=$CI_JOB_TIMESTAMP --build-arg ELIXIR_IMG=$ELIXIR_IMG --destination $IMAGE_TAG --destination $IMAGE_TAG_SLUG --destination $IMAGE_TAG_LATEST
+
+.kaniko-stable:
+ extends: .kaniko
only:
- stable@pleroma/pleroma
+ script:
+ - /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --custom-platform=$BUILD_ARCH --build-arg VCS_REF=$CI_VCS_REF --build-arg BUILD_DATE=$CI_JOB_TIMESTAMP --build-arg ELIXIR_IMG=$ELIXIR_IMG --destination $IMAGE_TAG --destination $IMAGE_TAG_SLUG --destination $IMAGE_TAG_LATEST_STABLE
-docker-release:
- stage: docker
- image: docker:latest
- cache: {}
- dependencies: []
- variables: *docker-variables
- before_script: *before-docker
- allow_failure: true
- script:
- script:
- - mkdir -p /root/.docker/cli-plugins
- - wget "${DOCKER_BUILDX_URL}" -O ~/.docker/cli-plugins/docker-buildx
- - echo "${DOCKER_BUILDX_HASH} /root/.docker/cli-plugins/docker-buildx" | sha1sum -c
- - chmod +x ~/.docker/cli-plugins/docker-buildx
- - docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
- - docker buildx create --name mbuilder --driver docker-container --use
- - docker buildx inspect --bootstrap
- - docker buildx build --platform linux/amd64,linux/arm/v7,linux/arm64/v8 --push --cache-from $IMAGE_TAG_SLUG --build-arg VCS_REF=$CI_VCS_REF --build-arg BUILD_DATE=$CI_JOB_TIMESTAMP -t $IMAGE_TAG -t $IMAGE_TAG_SLUG .
- tags:
- - dind
+.kaniko-release:
+ extends: .kaniko
only:
- /^release/.*$/@pleroma/pleroma
+ script:
+ - /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --custom-platform=$BUILD_ARCH --build-arg VCS_REF=$CI_VCS_REF --build-arg BUILD_DATE=$CI_JOB_TIMESTAMP --build-arg ELIXIR_IMG=$ELIXIR_IMG --destination $IMAGE_TAG --destination $IMAGE_TAG_SLUG
-docker-adhoc:
- stage: docker
- image: docker:latest
- cache: {}
- dependencies: []
- variables: *docker-variables
- before_script: *before-docker
- allow_failure: true
- script:
- script:
- - mkdir -p /root/.docker/cli-plugins
- - wget "${DOCKER_BUILDX_URL}" -O ~/.docker/cli-plugins/docker-buildx
- - echo "${DOCKER_BUILDX_HASH} /root/.docker/cli-plugins/docker-buildx" | sha1sum -c
- - chmod +x ~/.docker/cli-plugins/docker-buildx
- - docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
- - docker buildx create --name mbuilder --driver docker-container --use
- - docker buildx inspect --bootstrap
- - docker buildx build --platform linux/amd64,linux/arm/v7,linux/arm64/v8 --push --cache-from $IMAGE_TAG_SLUG --build-arg VCS_REF=$CI_VCS_REF --build-arg BUILD_DATE=$CI_JOB_TIMESTAMP -t $IMAGE_TAG -t $IMAGE_TAG_SLUG .
- tags:
- - dind
+.kaniko-adhoc:
+ extends: .kaniko
only:
- /^build-docker/.*$/@pleroma/pleroma
+ script:
+ - /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --custom-platform=$BUILD_ARCH --build-arg VCS_REF=$CI_VCS_REF --build-arg BUILD_DATE=$CI_JOB_TIMESTAMP --build-arg ELIXIR_IMG=$ELIXIR_IMG --destination $IMAGE_TAG --destination $IMAGE_TAG_SLUG
+
+.kaniko:linux/amd64:
+ variables:
+ BUILD_ARCH: linux/amd64
+ BUILD_ARCH_IMG_SUFFIX: linux-amd64
+ ELIXIR_IMG: hexpm/elixir
+ tags:
+ - amd64
+
+.kaniko:linux/arm64:
+ variables:
+ BUILD_ARCH: linux/arm64/v8
+ BUILD_ARCH_IMG_SUFFIX: linux-arm64-v8
+ ELIXIR_IMG: hexpm/elixir
+ tags:
+ - arm
+
+.kaniko:linux/arm:
+ variables:
+ BUILD_ARCH: linux/arm/v7
+ BUILD_ARCH_IMG_SUFFIX: linux-arm-v7
+ ELIXIR_IMG: git.pleroma.social:5050/pleroma/ci-image/elixir-linux-arm-v7
+ tags:
+ - arm32-specified
+
+kaniko-latest:linux/amd64:
+ extends:
+ - .kaniko-latest
+ - .kaniko:linux/amd64
+
+kaniko-latest:linux/arm64:
+ extends:
+ - .kaniko-latest
+ - .kaniko:linux/arm64
+
+kaniko-latest:linux/arm:
+ extends:
+ - .kaniko-latest
+ - .kaniko:linux/arm
+
+kaniko-stable:linux/amd64:
+ extends:
+ - .kaniko-stable
+ - .kaniko:linux/amd64
+
+kaniko-stable:linux/arm64:
+ extends:
+ - .kaniko-stable
+ - .kaniko:linux/arm64
+
+kaniko-stable:linux/arm:
+ extends:
+ - .kaniko-stable
+ - .kaniko:linux/arm
+
+kaniko-release:linux/amd64:
+ extends:
+ - .kaniko-release
+ - .kaniko:linux/amd64
+
+kaniko-release:linux/arm64:
+ extends:
+ - .kaniko-release
+ - .kaniko:linux/arm64
+
+kaniko-release:linux/arm:
+ extends:
+ - .kaniko-release
+ - .kaniko:linux/arm
+
+.docker-combine:
+ stage: docker-combine
+ image: docker:cli
+ cache: {}
+ before_script:
+ - 'BUILD_ARCHES="linux-amd64 linux-arm64-v8 linux-arm-v7"'
+ - export IMAGE_TAG=$CI_REGISTRY_IMAGE:$CI_COMMIT_SHORT_SHA
+ - export IMAGE_TAG_SLUG=$CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG
+ - export IMAGE_TAG_LATEST=$CI_REGISTRY_IMAGE:latest
+ - export IMAGE_TAG_LATEST_STABLE=$CI_REGISTRY_IMAGE:latest-stable
+ - 'IMAGES=; for arch in $BUILD_ARCHES; do IMAGES="$IMAGES $CI_REGISTRY_IMAGE/$arch:$CI_COMMIT_SHORT_SHA"; done'
+ - 'IMAGES_SLUG=; for arch in $BUILD_ARCHES; do IMAGES_SLUG="$IMAGES_SLUG $CI_REGISTRY_IMAGE/$arch:$CI_COMMIT_REF_SLUG"; done'
+ - 'IMAGES_LATEST=; for arch in $BUILD_ARCHES; do IMAGES_LATEST="$IMAGES_LATEST $CI_REGISTRY_IMAGE/$arch:latest"; done'
+ - 'IMAGES_LATEST_STABLE=; for arch in $BUILD_ARCHES; do IMAGES_LATEST_STABLE="$IMAGES_LATEST_STABLE $CI_REGISTRY_IMAGE/$arch:latest"; done'
+ - mkdir -p ~/.docker
+ - echo "{\"auths\":{\"$CI_REGISTRY\":{\"username\":\"$CI_REGISTRY_USER\",\"password\":\"$CI_REGISTRY_PASSWORD\"}}}" > ~/.docker/config.json
+
+docker-combine:latest:
+ extends: .docker-combine
+ only:
+ - develop@pleroma/pleroma
+ script:
+ - 'docker manifest create $IMAGE_TAG $IMAGES'
+ - 'docker manifest push $IMAGE_TAG'
+ - 'docker manifest create $IMAGE_TAG_SLUG $IMAGES_SLUG'
+ - 'docker manifest push $IMAGE_TAG_SLUG'
+ - 'docker manifest create $IMAGE_TAG_LATEST $IMAGES_LATEST'
+ - 'docker manifest push $IMAGE_TAG_LATEST'
+
+docker-combine:stable:
+ extends: .docker-combine
+ only:
+ - stable@pleroma/pleroma
+ script:
+ - 'docker manifest create $IMAGE_TAG $IMAGES'
+ - 'docker manifest push $IMAGE_TAG'
+ - 'docker manifest create $IMAGE_TAG_SLUG $IMAGES_SLUG'
+ - 'docker manifest push $IMAGE_TAG_SLUG'
+ - 'docker manifest create $IMAGE_TAG_LATEST_STABLE $IMAGES_LATEST_STABLE'
+ - 'docker manifest push $IMAGE_TAG_LATEST_STABLE'
+
+docker-combine:release:
+ extends: .docker-combine
+ only:
+ - /^release/.*$/@pleroma/pleroma
+ script:
+ - 'docker manifest create $IMAGE_TAG $IMAGES'
+ - 'docker manifest push $IMAGE_TAG'
+ - 'docker manifest create $IMAGE_TAG_SLUG $IMAGES_SLUG'
+ - 'docker manifest push $IMAGE_TAG_SLUG'
diff --git a/.gitlab/merge_request_templates/Default.md b/.gitlab/merge_request_templates/Default.md
new file mode 100644
index 000000000..641d9cfd8
--- /dev/null
+++ b/.gitlab/merge_request_templates/Default.md
@@ -0,0 +1,10 @@
+### Checklist
+- [ ] Adding a changelog: In the `changelog.d` directory, create a file named `.`.
+
+ `` can be anything, but we recommend using a more or less unique identifier to avoid collisions, such as the branch name.
+
+ `` can be `add`, `change`, `remove`, `fix`, `security` or `skip`. `skip` is only used if there is no user-visible change in the MR (for example, only editing comments in the code). Otherwise, choose a type that corresponds to your change.
+
+ In the file, write the changelog entry. For example, if an MR adds group functionality, we can create a file named `group.add` and write `Add group functionality` in it.
+
+ If one changelog entry is not enough, you may add more. But that might mean you can split it into two MRs. Only use more than one changelog entry if you really need to (for example, when one change in the code fix two different bugs, or when refactoring).
diff --git a/.gitlab/merge_request_templates/Release.md b/.gitlab/merge_request_templates/Release.md
index b2c772696..e57556e6c 100644
--- a/.gitlab/merge_request_templates/Release.md
+++ b/.gitlab/merge_request_templates/Release.md
@@ -1,6 +1,8 @@
### Release checklist
-* [ ] Bump version in `mix.exs`
-* [ ] Compile a changelog
-* [ ] Create an MR with an announcement to pleroma.social
-* [ ] Tag the release
+* [ ] Bump version in `mix.exs`
+* [ ] Compile a changelog with the `tools/collect-changelog` script
+* [ ] Create an MR with an announcement to pleroma.social
+#### post-merge
+* [ ] Tag the release on the merge commit
+* [ ] Make the tag into a Gitlab Release™
* [ ] Merge `stable` into `develop` (in case the fixes are already in develop, use `git merge -s ours --no-commit` and manually merge the changelogs)
diff --git a/.rgignore b/.rgignore
new file mode 100644
index 000000000..975056b6d
--- /dev/null
+++ b/.rgignore
@@ -0,0 +1 @@
+priv/static
diff --git a/CHANGELOG.md b/CHANGELOG.md
index b2185b1ba..063d51d4c 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,22 +4,152 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
-## Unreleased
+## 2.6.2
+
+### Security
+- MRF StealEmojiPolicy: Sanitize shortcodes (thanks to Hazel K for the report
+
+## 2.6.1
+### Changed
+- - Document maximum supported version of Erlang & Elixir
+
+### Added
+- [docs] add frontends management documentation
+
+### Fixed
+- TwitterAPI: Return proper error when healthcheck is disabled
+- Fix eblurhash and elixir-captcha not using system cflags
+
+## 2.6.0
+### Security
+- Preload: Make generated JSON html-safe. It already was html safe because it only consists of config data that is base64 encoded, but this will keep it safe it that ever changes.
+- CommonAPI: Prevent users from accessing media of other users by creating a status with reused attachment ID
+- Disable XML entity resolution completely to fix a dos vulnerability
+
+### Added
+- Support for Image activities, namely from Hubzilla
+- Add OAuth scope descriptions
+- Allow lang attribute in status text
+- OnlyMedia Upload Filter
+- Implement MRF policy to reject or delist according to emojis
+- (hardening) Add no_new_privs=yes to OpenRC service files
+- Implement quotes
+- Add unified streaming endpoint
+
+### Fixed
+- rel="me" was missing its cache
+- MediaProxy responses now return a sandbox CSP header
+- Filter context activities using Visibility.visible_for_user?
+- UploadedMedia: Add missing disposition_type to Content-Disposition
+- fix not being able to fetch flash file from remote instance
+- Fix abnormal behaviour when refetching a poll
+- Allow non-HTTP(s) URIs in "url" fields for compatibility with "FEP-fffd: Proxy Objects"
+- Fix opengraph and twitter card meta tags
+- ForceMentionsInContent: fix double mentions for Mastodon/Misskey posts
+- OEmbed HTML tags are now filtered
+- Restrict attachments to only uploaded files only
+- Fix error 404 when deleting status of a banned user
+- Fix config ownership in dockerfile to pass restriction test
+- Fix user fetch completely broken if featured collection is not in a supported form
+- Correctly handle the situation when a poll has both "anyOf" and "oneOf" but one of them being empty
+- Fix handling report from a deactivated user
+- Prevent using the .json format to bypass authorized fetch mode
+- Fix mentioning punycode domains when using Markdown
+- Show more informative errors when profile exceeds char limits
+
+### Removed
+- BREAKING: Support for passwords generated with `crypt(3)` (Gnu Social migration artifact)
+- remove BBS/SSH feature, replaced by an external bridge.
+- Remove a few unused indexes.
+- Cleanup OStatus-era user upgrades and ap_enabled indicator
+- Deprecate Pleroma's audio scrobbling
+
+## 2.5.4
+
+## Security
+- Fix XML External Entity (XXE) loading vulnerability allowing to fetch arbitrary files from the server's filesystem
+
+## 2.5.3
+
+### Security
+- Emoji pack loader sanitizes pack names
+- Reduced permissions of config files and directories, distros requiring greater permissions like group-read need to pre-create the directories
+
+## 2.5.5
+
+## Security
+- Prevent users from accessing media of other users by creating a status with reused attachment ID
+
+## 2.5.4
+
+## Security
+- Fix XML External Entity (XXE) loading vulnerability allowing to fetch arbitrary files from the server's filesystem
+
+## 2.5.3
+
+### Security
+- Emoji pack loader sanitizes pack names
+- Reduced permissions of config files and directories, distros requiring greater permissions like group-read need to pre-create the directories
+
+## 2.5.2
+
+### Security
+- `/proxy` endpoint now sets a Content-Security-Policy (sandbox)
+- WebSocket endpoint now respects unauthenticated restrictions for streams of public posts
+- OEmbed HTML tags are now filtered
+
+### Changed
+- docs: Be more explicit about the level of compatibility of OTP releases
+- Set default background worker timeout to 15 minutes
+
+### Fixed
+- Atom/RSS formatting (HTML truncation, published, missing summary)
+- Remove `static_fe` pipeline for `/users/:nickname/feed`
+- Stop oban from retrying if validating errors occur when processing incoming data
+- Make sure object refetching as used by already received polls follows MRF rules
+
+### Removed
+- BREAKING: Support for passwords generated with `crypt(3)` (Gnu Social migration artifact)
+
+## 2.5.1
+
+### Added
+- Allow customizing instance languages
+
+### Fixed
+- Security: uploading HTTP endpoint can no longer create directories in the upload dir (internal APIs, like backup, still can do it.)
+- ~ character in urls in Markdown posts are handled properly
+- Exiftool upload filter will now ignore SVG files
+- Fix `block_from_stranger` setting
+- Fix rel="me"
+- Docker images will now run properly
+- Fix improper content being cached in report content
+- Notification filter on object content will not operate on the ones that inherently have no content
+- ZWNJ and double dots in links are parsed properly for Plain-text posts
+- OTP releases will work on systems with a newer libcrypt
+- Errors when running Exiftool.ReadDescription filter will not be filled into the image description
+
+## 2.5.0 - 2022-12-23
### Removed
- MastoFE
+- Quack, the logging backend that pushes to Slack channels
### Changed
+- **Breaking:** Elixir >=1.11 is now required (was >= 1.9)
- Allow users to remove their emails if instance does not need email to register
- Uploadfilter `Pleroma.Upload.Filter.Exiftool` has been renamed to `Pleroma.Upload.Filter.Exiftool.StripLocation`
+- **Breaking**: `/api/v1/pleroma/backups` endpoints now requires `read:backups` scope instead of `read:accounts`
+- Updated the recommended pleroma.vcl configuration for Varnish to target Varnish 7.0+
+- Set timeout values for Oban queues. The default is infinity and some operations may not time out on their own.
+- Delete activities are federated at lowest priority
+- CSP now includes wasm-unsafe-eval
### Added
- `activeMonth` and `activeHalfyear` fields in NodeInfo usage.users object
- Experimental support for Finch. Put `config :tesla, :adapter, {Tesla.Adapter.Finch, name: MyFinch}` in your secrets file to use it. Reverse Proxy will still use Hackney.
- `ForceMentionsInPostContent` MRF policy
-- AdminAPI: allow moderators to manage reports, users, invites, and custom emojis
-- AdminAPI: restrict moderators to access sensitive data: change user credentials, get password reset token, read private statuses and chats, etc
- PleromaAPI: Add remote follow API endpoint at `POST /api/v1/pleroma/remote_interaction`
- MastoAPI: Add `GET /api/v1/accounts/lookup`
- MastoAPI: Profile Directory support
@@ -31,8 +161,11 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Configuration: Add `birthday_required` and `birthday_min_age` settings to provide a way to require users to enter their birth date.
- PleromaAPI: Add `GET /api/v1/pleroma/birthdays` API endpoint
- Make backend-rendered pages translatable. This includes emails. Pages returned as a HTTP response are translated using the language specified in the `userLanguage` cookie, or the `Accept-Language` header. Emails are translated using the `language` field when registering. This language can be changed by `PATCH /api/v1/accounts/update_credentials` with the `language` field.
+- Add fine grained options to provide privileges to moderators and admins (e.g. delete messages, manage reports...)
- Uploadfilter `Pleroma.Upload.Filter.Exiftool.ReadDescription` returns description values to the FE so they can pre fill the image description field
-- Uploader: Add support for uploading attachments using IPFS
+- Added move account API
+- Enable remote users to interact with posts
+- Possibility to discover users like `user@example.org`, while Pleroma is working on `pleroma.example.org`. Additional configuration required.
### Fixed
- Subscription(Bell) Notifications: Don't create from Pipeline Ingested replies
@@ -48,8 +181,30 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Fixed crash when pinned_objects is nil
- Fixed slow timelines when there are a lot of deactivated users
- Fixed account deletion API
+- Fixed lowercase HTTP HEAD method in the Media Proxy Preview code
+- Removed useless notification call on Delete activities
+- Improved performance for filtering out deactivated and invisible users
+- RSS and Atom feeds for users work again
+- TwitterCard meta tags conformance
-### Removed
+## 2.4.5 - 2022-11-27
+
+## Fixed
+- Image `class` attributes not being scrubbed, allowing to exploit frontend special classes [!3792](https://git.pleroma.social/pleroma/pleroma/-/merge_requests/3792)
+- Delete report notifs when demoting from superuser [!3642](https://git.pleroma.social/pleroma/pleroma/-/merge_requests/3642)
+- Validate `mediaType` only by it's format rather than using a list [!3597](https://git.pleroma.social/pleroma/pleroma/-/merge_requests/3597)
+- Pagination: Make mutes and blocks lists behave the same as other lists [!3693](https://git.pleroma.social/pleroma/pleroma/-/merge_requests/3693)
+- Compatibility with Elixir 1.14 [!3740](https://git.pleroma.social/pleroma/pleroma/-/merge_requests/3740)
+- Frontend installer: FediFE build URL [!3736](https://git.pleroma.social/pleroma/pleroma/-/merge_requests/3736)
+- Streaming: Don't stream ChatMessage into the home timeline [!3738](https://git.pleroma.social/pleroma/pleroma/-/merge_requests/3738)
+- Streaming: Stream local-only posts in the local timeline [!3738](https://git.pleroma.social/pleroma/pleroma/-/merge_requests/3738)
+- Signatures: Fix `keyId` lookup for GoToSocial [!3725](https://git.pleroma.social/pleroma/pleroma/-/merge_requests/3725)
+- Validator: Fix `replies` handling for GoToSocial [!3725](https://git.pleroma.social/pleroma/pleroma/-/merge_requests/3725)
+
+## 2.4.4 - 2022-08-19
+
+### Security
+- Streaming API sessions will now properly disconnect if the corresponding token is revoked
## 2.4.3 - 2022-05-06
@@ -634,7 +789,7 @@ switched to a new configuration mechanism, however it was not officially removed
- Rate limiter crashes when there is no explicitly specified ip in the config
- 500 errors when no `Accept` header is present if Static-FE is enabled
- Instance panel not being updated immediately due to wrong `Cache-Control` headers
-- Statuses posted with BBCode/Markdown having unncessary newlines in Pleroma-FE
+- Statuses posted with BBCode/Markdown having unnecessary newlines in Pleroma-FE
- OTP: Fix some settings not being migrated to in-database config properly
- No `Cache-Control` headers on attachment/media proxy requests
- Character limit enforcement being off by 1
@@ -954,10 +1109,10 @@ curl -Lo ./bin/pleroma_ctl 'https://git.pleroma.social/pleroma/pleroma/raw/devel
- Reverse Proxy limiting `max_body_length` was incorrectly defined and only checked `Content-Length` headers which may not be sufficient in some circumstances
### Added
-- Expiring/ephemeral activites. All activities can have expires_at value set, which controls when they should be deleted automatically.
+- Expiring/ephemeral activities. All activities can have expires_at value set, which controls when they should be deleted automatically.
- Mastodon API: in post_status, the expires_in parameter lets you set the number of seconds until an activity expires. It must be at least one hour.
- Mastodon API: all status JSON responses contain a `pleroma.expires_at` item which states when an activity will expire. The value is only shown to the user who created the activity. To everyone else it's empty.
-- Configuration: `ActivityExpiration.enabled` controls whether expired activites will get deleted at the appropriate time. Enabled by default.
+- Configuration: `ActivityExpiration.enabled` controls whether expired activities will get deleted at the appropriate time. Enabled by default.
- Conversations: Add Pleroma-specific conversation endpoints and status posting extensions. Run the `bump_all_conversations` task again to create the necessary data.
- MRF: Support for priming the mediaproxy cache (`Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy`)
- MRF: Support for excluding specific domains from Transparency.
diff --git a/Dockerfile b/Dockerfile
index c51ebbab0..72461305c 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,18 +1,24 @@
-FROM elixir:1.9-alpine as build
+ARG ELIXIR_IMG=hexpm/elixir
+ARG ELIXIR_VER=1.13.4
+ARG ERLANG_VER=24.3.4.15
+ARG ALPINE_VER=3.17.5
+
+FROM ${ELIXIR_IMG}:${ELIXIR_VER}-erlang-${ERLANG_VER}-alpine-${ALPINE_VER} as build
COPY . .
ENV MIX_ENV=prod
+ENV VIX_COMPILATION_MODE=PLATFORM_PROVIDED_LIBVIPS
-RUN apk add git gcc g++ musl-dev make cmake file-dev &&\
- echo "import Mix.Config" > config/prod.secret.exs &&\
+RUN apk add git gcc g++ musl-dev make cmake file-dev vips-dev &&\
+ echo "import Config" > config/prod.secret.exs &&\
mix local.hex --force &&\
mix local.rebar --force &&\
mix deps.get --only prod &&\
mkdir release &&\
mix release --path release
-FROM alpine:3.14
+FROM alpine:${ALPINE_VER}
ARG BUILD_DATE
ARG VCS_REF
@@ -32,7 +38,7 @@ ARG HOME=/opt/pleroma
ARG DATA=/var/lib/pleroma
RUN apk update &&\
- apk add exiftool ffmpeg imagemagick libmagic ncurses postgresql-client &&\
+ apk add exiftool ffmpeg vips libmagic ncurses postgresql-client &&\
adduser --system --shell /bin/false --home ${HOME} pleroma &&\
mkdir -p ${DATA}/uploads &&\
mkdir -p ${DATA}/static &&\
@@ -44,7 +50,7 @@ USER pleroma
COPY --from=build --chown=pleroma:0 /release ${HOME}
-COPY ./config/docker.exs /etc/pleroma/config.exs
+COPY --chown=pleroma --chmod=640 ./config/docker.exs /etc/pleroma/config.exs
COPY ./docker-entrypoint.sh ${HOME}
EXPOSE 4000
diff --git a/README.md b/README.md
index 25fde90b9..2837b6ef8 100644
--- a/README.md
+++ b/README.md
@@ -30,7 +30,8 @@ If your platform is not supported, or you just want to be able to edit the sourc
- [OpenBSD (fi)](https://docs-develop.pleroma.social/backend/installation/openbsd_fi/)
### OS/Distro packages
-Currently Pleroma is packaged for [YunoHost](https://yunohost.org). If you want to package Pleroma for any OS/Distros, we can guide you through the process on our [community channels](#community-channels). If you want to change default options in your Pleroma package, please **discuss it with us first**.
+Currently Pleroma is packaged for [YunoHost](https://yunohost.org), [NixOS](https://nixos.org), [Gentoo through GURU](https://gentoo.org/) and [Archlinux through AUR](https://aur.archlinux.org/packages/pleroma). You may find more at .
+If you want to package Pleroma for any OS/Distros, we can guide you through the process on our [community channels](#community-channels). If you want to change default options in your Pleroma package, please **discuss it with us first**.
### Docker
While we don’t provide docker files, other people have written very good ones. Take a look at or .
diff --git a/lib/mix/tasks/pleroma/benchmark.ex b/benchmarks/mix/tasks/pleroma/benchmark.ex
similarity index 93%
rename from lib/mix/tasks/pleroma/benchmark.ex
rename to benchmarks/mix/tasks/pleroma/benchmark.ex
index f32492169..42b28478d 100644
--- a/lib/mix/tasks/pleroma/benchmark.ex
+++ b/benchmarks/mix/tasks/pleroma/benchmark.ex
@@ -3,8 +3,20 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Mix.Tasks.Pleroma.Benchmark do
- import Mix.Pleroma
+ @shortdoc "Benchmarks"
+ @moduledoc """
+ Benchmark tasks available:
+
+ adapters
+ render_timeline
+ search
+ tag
+
+ MIX_ENV=benchmark mix pleroma.benchmark adapters
+ """
+
use Mix.Task
+ import Mix.Pleroma
def run(["search"]) do
start_pleroma()
@@ -63,7 +75,7 @@ def run(["render_timeline", nickname | _] = args) do
Benchee.run(
%{
- "Standart rendering" => fn activities ->
+ "Standard rendering" => fn activities ->
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
activities: activities,
for: user,
diff --git a/changelog.d/2.6.0-mergeback.skip b/changelog.d/2.6.0-mergeback.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/2.6.1-mergeback.skip b/changelog.d/2.6.1-mergeback.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/3900.change b/changelog.d/3900.change
new file mode 100644
index 000000000..fe0cc2fbf
--- /dev/null
+++ b/changelog.d/3900.change
@@ -0,0 +1 @@
+Update to Phoenix 1.7
diff --git a/changelog.d/3987.fix b/changelog.d/3987.fix
new file mode 100644
index 000000000..5d578cc09
--- /dev/null
+++ b/changelog.d/3987.fix
@@ -0,0 +1 @@
+Remove checking ImageMagick's commands for Pleroma.Upload.Filter.AnalyzeMetadata
diff --git a/changelog.d/account-rendering-auth-check.fix b/changelog.d/account-rendering-auth-check.fix
new file mode 100644
index 000000000..12f68e454
--- /dev/null
+++ b/changelog.d/account-rendering-auth-check.fix
@@ -0,0 +1 @@
+Fix authentication check on account rendering when bio is defined
diff --git a/changelog.d/add-ipfs-upload.add b/changelog.d/add-ipfs-upload.add
new file mode 100644
index 000000000..0cd1f2858
--- /dev/null
+++ b/changelog.d/add-ipfs-upload.add
@@ -0,0 +1 @@
+Uploader: Add support for uploading attachments using IPFS
diff --git a/changelog.d/add-outbox.fix b/changelog.d/add-outbox.fix
new file mode 100644
index 000000000..f3de5338d
--- /dev/null
+++ b/changelog.d/add-outbox.fix
@@ -0,0 +1 @@
+ap userview: add outbox field.
diff --git a/changelog.d/anonymous-exception-else.fix b/changelog.d/anonymous-exception-else.fix
new file mode 100644
index 000000000..38d5d1be5
--- /dev/null
+++ b/changelog.d/anonymous-exception-else.fix
@@ -0,0 +1 @@
+Fix #strip_report_status_data
diff --git a/changelog.d/api-docs-2.skip b/changelog.d/api-docs-2.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/api-docs.skip b/changelog.d/api-docs.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/atom-leak.skip b/changelog.d/atom-leak.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/authorize-interaction.add b/changelog.d/authorize-interaction.add
new file mode 100644
index 000000000..8692209e1
--- /dev/null
+++ b/changelog.d/authorize-interaction.add
@@ -0,0 +1 @@
+Support /authorize-interaction route used by Mastodon
\ No newline at end of file
diff --git a/changelog.d/backups-follows.add b/changelog.d/backups-follows.add
new file mode 100644
index 000000000..a55c436f6
--- /dev/null
+++ b/changelog.d/backups-follows.add
@@ -0,0 +1 @@
+Include following/followers in backups
\ No newline at end of file
diff --git a/changelog.d/bad_inbox_request.change b/changelog.d/bad_inbox_request.change
new file mode 100644
index 000000000..b81f60638
--- /dev/null
+++ b/changelog.d/bad_inbox_request.change
@@ -0,0 +1 @@
+Invalid activities delivered to the inbox will be rejected with a 400 Bad Request
diff --git a/changelog.d/bandit.change b/changelog.d/bandit.change
new file mode 100644
index 000000000..7a1104314
--- /dev/null
+++ b/changelog.d/bandit.change
@@ -0,0 +1 @@
+Support Bandit as an alternative to Cowboy for the HTTP server.
diff --git a/changelog.d/bare_uri_test.skip b/changelog.d/bare_uri_test.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/benchee.skip b/changelog.d/benchee.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/blurhash.change b/changelog.d/blurhash.change
new file mode 100644
index 000000000..4276eb164
--- /dev/null
+++ b/changelog.d/blurhash.change
@@ -0,0 +1 @@
+Replace eblurhash with rinpatch_blurhash. This also removes a dependency on ImageMagick.
diff --git a/changelog.d/bookmark-folders.add b/changelog.d/bookmark-folders.add
new file mode 100644
index 000000000..d9b03cecc
--- /dev/null
+++ b/changelog.d/bookmark-folders.add
@@ -0,0 +1 @@
+Allow to group bookmarks in folders
\ No newline at end of file
diff --git a/changelog.d/bookmark-folders.skip b/changelog.d/bookmark-folders.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/bugfix-ccworks.fix b/changelog.d/bugfix-ccworks.fix
new file mode 100644
index 000000000..658e27b86
--- /dev/null
+++ b/changelog.d/bugfix-ccworks.fix
@@ -0,0 +1 @@
+Fix federation with Convergence AP Bridge
\ No newline at end of file
diff --git a/changelog.d/build-release-with-local-libvips.skip b/changelog.d/build-release-with-local-libvips.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/bump-elixir.change b/changelog.d/bump-elixir.change
new file mode 100644
index 000000000..afb25d4e7
--- /dev/null
+++ b/changelog.d/bump-elixir.change
@@ -0,0 +1 @@
+Elixir 1.13 is the minimum required version.
diff --git a/changelog.d/card-endpoint.remove b/changelog.d/card-endpoint.remove
new file mode 100644
index 000000000..e09a24cf7
--- /dev/null
+++ b/changelog.d/card-endpoint.remove
@@ -0,0 +1 @@
+Mastodon API: Remove deprecated GET /api/v1/statuses/:id/card endpoint https://github.com/mastodon/mastodon/pull/11213
diff --git a/changelog.d/card-image-description.add b/changelog.d/card-image-description.add
new file mode 100644
index 000000000..bf423ebb8
--- /dev/null
+++ b/changelog.d/card-image-description.add
@@ -0,0 +1 @@
+Include image description in status media cards
\ No newline at end of file
diff --git a/changelog.d/chat-attachment-empty-array.fix b/changelog.d/chat-attachment-empty-array.fix
new file mode 100644
index 000000000..7d98c9dd2
--- /dev/null
+++ b/changelog.d/chat-attachment-empty-array.fix
@@ -0,0 +1 @@
+ChatMessage: Tolerate attachment field set to an empty array
diff --git a/changelog.d/config-stat-symlink.fix b/changelog.d/config-stat-symlink.fix
new file mode 100644
index 000000000..c8b98225d
--- /dev/null
+++ b/changelog.d/config-stat-symlink.fix
@@ -0,0 +1 @@
+- Config: Check the permissions of the linked file instead of the symlink
diff --git a/changelog.d/content-length.fix b/changelog.d/content-length.fix
new file mode 100644
index 000000000..dee906a9d
--- /dev/null
+++ b/changelog.d/content-length.fix
@@ -0,0 +1 @@
+MediaProxy was setting the content-length header which is not permitted by RFC9112§6.2 when we are chunking the reply as it conflicts with the existence of the transfer-encoding header.
diff --git a/changelog.d/deprecations.skip b/changelog.d/deprecations.skip
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/changelog.d/deprecations.skip
@@ -0,0 +1 @@
+
diff --git a/changelog.d/deprecations2.skip b/changelog.d/deprecations2.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/deps-bump-2024-01-25.skip b/changelog.d/deps-bump-2024-01-25.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/description-meilisearch-type.skip b/changelog.d/description-meilisearch-type.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/dialyzer.skip b/changelog.d/dialyzer.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/dialyzer2.skip b/changelog.d/dialyzer2.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/dialyzer3.skip b/changelog.d/dialyzer3.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/dialyzer4.skip b/changelog.d/dialyzer4.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/digest_emails.fix b/changelog.d/digest_emails.fix
new file mode 100644
index 000000000..335a24464
--- /dev/null
+++ b/changelog.d/digest_emails.fix
@@ -0,0 +1 @@
+Fix the processing of email digest jobs.
diff --git a/changelog.d/doc-fix.skip b/changelog.d/doc-fix.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/docs-max-elixir-erlang.change b/changelog.d/docs-max-elixir-erlang.change
new file mode 100644
index 000000000..a58b7fc17
--- /dev/null
+++ b/changelog.d/docs-max-elixir-erlang.change
@@ -0,0 +1 @@
+- Document maximum supported version of Erlang & Elixir
diff --git a/changelog.d/emoji-download-paginate.fix b/changelog.d/emoji-download-paginate.fix
new file mode 100644
index 000000000..e31a63380
--- /dev/null
+++ b/changelog.d/emoji-download-paginate.fix
@@ -0,0 +1 @@
+When downloading remote emojis packs, account for pagination
\ No newline at end of file
diff --git a/changelog.d/emoji-use-v1.fix b/changelog.d/emoji-use-v1.fix
new file mode 100644
index 000000000..ccc96b377
--- /dev/null
+++ b/changelog.d/emoji-use-v1.fix
@@ -0,0 +1 @@
+Make remote emoji packs API use specifically the V1 URL. Akkoma does not understand it without V1, and it works either way with normal pleroma, so no reason to not do this
\ No newline at end of file
diff --git a/changelog.d/exile-bsds.skip b/changelog.d/exile-bsds.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/exile-freebsd.skip b/changelog.d/exile-freebsd.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/exile-macos.skip b/changelog.d/exile-macos.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/exile.skip b/changelog.d/exile.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/familiar-followers.add b/changelog.d/familiar-followers.add
new file mode 100644
index 000000000..6e7ec9d25
--- /dev/null
+++ b/changelog.d/familiar-followers.add
@@ -0,0 +1 @@
+Implement `/api/v1/accounts/familiar_followers`
\ No newline at end of file
diff --git a/changelog.d/favicon.add b/changelog.d/favicon.add
new file mode 100644
index 000000000..cf12395e7
--- /dev/null
+++ b/changelog.d/favicon.add
@@ -0,0 +1 @@
+Add support for configuring favicon, embed favicon and PWA manifest in server-generated meta
diff --git a/changelog.d/federation_status-access.change b/changelog.d/federation_status-access.change
new file mode 100644
index 000000000..952254476
--- /dev/null
+++ b/changelog.d/federation_status-access.change
@@ -0,0 +1 @@
+- Make `/api/v1/pleroma/federation_status` publicly available
diff --git a/changelog.d/federator-modules.remove b/changelog.d/federator-modules.remove
new file mode 100644
index 000000000..6ff71d107
--- /dev/null
+++ b/changelog.d/federator-modules.remove
@@ -0,0 +1 @@
+Removed support for multiple federator modules as we only support ActivityPub
diff --git a/changelog.d/federator.skip b/changelog.d/federator.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/fep-2c59.add b/changelog.d/fep-2c59.add
new file mode 100644
index 000000000..03e33cbd8
--- /dev/null
+++ b/changelog.d/fep-2c59.add
@@ -0,0 +1 @@
+Implement FEP-2c59, add "webfinger" to user actor
\ No newline at end of file
diff --git a/changelog.d/ffmpeg-limiter.add b/changelog.d/ffmpeg-limiter.add
new file mode 100644
index 000000000..e4a5ef196
--- /dev/null
+++ b/changelog.d/ffmpeg-limiter.add
@@ -0,0 +1 @@
+Framegrabs with ffmpeg will execute with a 5 second timeout and cache the URLs of failures with a TTL of 15 minutes to prevent excessive retries.
diff --git a/changelog.d/finch_redirects.fix b/changelog.d/finch_redirects.fix
new file mode 100644
index 000000000..c25beaba4
--- /dev/null
+++ b/changelog.d/finch_redirects.fix
@@ -0,0 +1 @@
+Following HTTP Redirects when the HTTP Adapter is Finch
diff --git a/changelog.d/fix-bookmark-folder-tests.skip b/changelog.d/fix-bookmark-folder-tests.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/fix-dockerfile.skip b/changelog.d/fix-dockerfile.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/fix-duplicate-inbox-deliveries.fix b/changelog.d/fix-duplicate-inbox-deliveries.fix
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/fix-otp-comparison.skip b/changelog.d/fix-otp-comparison.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/fix-tests.skip b/changelog.d/fix-tests.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/fix-webfinger-spoofing.security b/changelog.d/fix-webfinger-spoofing.security
new file mode 100644
index 000000000..7b3c9490a
--- /dev/null
+++ b/changelog.d/fix-webfinger-spoofing.security
@@ -0,0 +1 @@
+Fix webfinger spoofing.
diff --git a/changelog.d/force-mention-mrf.add b/changelog.d/force-mention-mrf.add
new file mode 100644
index 000000000..46ac14244
--- /dev/null
+++ b/changelog.d/force-mention-mrf.add
@@ -0,0 +1 @@
+Add ForceMention MRF
\ No newline at end of file
diff --git a/changelog.d/framegrabs.fix b/changelog.d/framegrabs.fix
new file mode 100644
index 000000000..dc0466f1b
--- /dev/null
+++ b/changelog.d/framegrabs.fix
@@ -0,0 +1 @@
+Video framegrabs were not working correctly after the change to use Exile to execute ffmpeg
diff --git a/changelog.d/frontend-management.add b/changelog.d/frontend-management.add
new file mode 100644
index 000000000..b85cddd96
--- /dev/null
+++ b/changelog.d/frontend-management.add
@@ -0,0 +1 @@
+[docs] add frontends management documentation
diff --git a/changelog.d/generate-unset-user-keys-migration.skip b/changelog.d/generate-unset-user-keys-migration.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/group-actor.add b/changelog.d/group-actor.add
new file mode 100644
index 000000000..2f614b3d8
--- /dev/null
+++ b/changelog.d/group-actor.add
@@ -0,0 +1 @@
+Implement group actors
diff --git a/changelog.d/gun-logs.skip b/changelog.d/gun-logs.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/gun_pool.fix b/changelog.d/gun_pool.fix
new file mode 100644
index 000000000..94ec9103d
--- /dev/null
+++ b/changelog.d/gun_pool.fix
@@ -0,0 +1 @@
+Fix logic error in Gun connection pooling which prevented retries even when the worker was launched with retry = true
diff --git a/changelog.d/gun_pool2.fix b/changelog.d/gun_pool2.fix
new file mode 100644
index 000000000..a1f98b49c
--- /dev/null
+++ b/changelog.d/gun_pool2.fix
@@ -0,0 +1 @@
+Connection pool errors when publishing an activity is a soft-error that will be retried shortly.
diff --git a/changelog.d/gun_pool3.skip b/changelog.d/gun_pool3.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/handle_object_fetch_failures.change b/changelog.d/handle_object_fetch_failures.change
new file mode 100644
index 000000000..ae44e6f4b
--- /dev/null
+++ b/changelog.d/handle_object_fetch_failures.change
@@ -0,0 +1 @@
+Remote object fetch failures will prevent the object fetch job from retrying if the object request returns 401, 403, 404, 410, or exceeds the maximum thread depth.
diff --git a/changelog.d/healthcheck-disabled-error.fix b/changelog.d/healthcheck-disabled-error.fix
new file mode 100644
index 000000000..984384a52
--- /dev/null
+++ b/changelog.d/healthcheck-disabled-error.fix
@@ -0,0 +1 @@
+TwitterAPI: Return proper error when healthcheck is disabled
diff --git a/changelog.d/instance-contact-account.add b/changelog.d/instance-contact-account.add
new file mode 100644
index 000000000..e119446d2
--- /dev/null
+++ b/changelog.d/instance-contact-account.add
@@ -0,0 +1 @@
+Add contact account to InstanceView
\ No newline at end of file
diff --git a/changelog.d/instance-defdelegates.skip b/changelog.d/instance-defdelegates.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/instance-rules.add b/changelog.d/instance-rules.add
new file mode 100644
index 000000000..42f3cbfa1
--- /dev/null
+++ b/changelog.d/instance-rules.add
@@ -0,0 +1 @@
+Add instance rules
\ No newline at end of file
diff --git a/changelog.d/instance-v2.add b/changelog.d/instance-v2.add
new file mode 100644
index 000000000..4dd7ce8c0
--- /dev/null
+++ b/changelog.d/instance-v2.add
@@ -0,0 +1 @@
+Implement /api/v2/instance route
\ No newline at end of file
diff --git a/changelog.d/instance-v2.skip b/changelog.d/instance-v2.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/issue-3241.fix b/changelog.d/issue-3241.fix
new file mode 100644
index 000000000..d46db9805
--- /dev/null
+++ b/changelog.d/issue-3241.fix
@@ -0,0 +1 @@
+Handle cases when users.inbox is nil.
diff --git a/changelog.d/last_status_at.change b/changelog.d/last_status_at.change
new file mode 100644
index 000000000..5417aff30
--- /dev/null
+++ b/changelog.d/last_status_at.change
@@ -0,0 +1 @@
+- Change AccountView `last_status_at` from a datetime to a date (as done in Mastodon 3.1.0)
\ No newline at end of file
diff --git a/changelog.d/link-verification.add b/changelog.d/link-verification.add
new file mode 100644
index 000000000..d8b11ebbc
--- /dev/null
+++ b/changelog.d/link-verification.add
@@ -0,0 +1 @@
+Verify profile link ownership with rel="me"
\ No newline at end of file
diff --git a/changelog.d/loading-order-test-fix.skip b/changelog.d/loading-order-test-fix.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/local-webfinger.fix b/changelog.d/local-webfinger.fix
new file mode 100644
index 000000000..d99056efd
--- /dev/null
+++ b/changelog.d/local-webfinger.fix
@@ -0,0 +1 @@
+Use correct domain for fqn and InstanceView
\ No newline at end of file
diff --git a/changelog.d/mark-read.fix b/changelog.d/mark-read.fix
new file mode 100644
index 000000000..346eb19e2
--- /dev/null
+++ b/changelog.d/mark-read.fix
@@ -0,0 +1 @@
+The query for marking notifications as read has been simplified
diff --git a/changelog.d/mastodon_api_v2.add b/changelog.d/mastodon_api_v2.add
new file mode 100644
index 000000000..d53aa35c4
--- /dev/null
+++ b/changelog.d/mastodon_api_v2.add
@@ -0,0 +1 @@
+Add new parameters to /api/v2/instance: configuration[accounts][max_pinned_statuses] and configuration[statuses][characters_reserved_per_url]
diff --git a/changelog.d/mastodon_directory.fix b/changelog.d/mastodon_directory.fix
new file mode 100644
index 000000000..937c8f864
--- /dev/null
+++ b/changelog.d/mastodon_directory.fix
@@ -0,0 +1 @@
+Mastodon API /api/v1/directory: Fix listing directory contents when not authenticated
diff --git a/changelog.d/meilisearch.add b/changelog.d/meilisearch.add
new file mode 100644
index 000000000..4856eea2e
--- /dev/null
+++ b/changelog.d/meilisearch.add
@@ -0,0 +1 @@
+Add meilisearch, make search engines pluggable
diff --git a/changelog.d/memleak.fix b/changelog.d/memleak.fix
new file mode 100644
index 000000000..2465921c0
--- /dev/null
+++ b/changelog.d/memleak.fix
@@ -0,0 +1 @@
+Fix a memory leak caused by Websocket connections that would not enter a state where a full garbage collection run could be triggered.
diff --git a/changelog.d/mergeback-2.6.2.skip b/changelog.d/mergeback-2.6.2.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/migration-fix.skip b/changelog.d/migration-fix.skip
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/changelog.d/migration-fix.skip
@@ -0,0 +1 @@
+
diff --git a/changelog.d/missing-mrfs.add b/changelog.d/missing-mrfs.add
new file mode 100644
index 000000000..6a17f9e1a
--- /dev/null
+++ b/changelog.d/missing-mrfs.add
@@ -0,0 +1 @@
+Startup detection for configured MRF modules that are missing or incorrectly defined
diff --git a/changelog.d/mrf-regex-error.fix b/changelog.d/mrf-regex-error.fix
new file mode 100644
index 000000000..2c43bc04a
--- /dev/null
+++ b/changelog.d/mrf-regex-error.fix
@@ -0,0 +1 @@
+MRF: Log sensible error for subdomains_regex
diff --git a/changelog.d/mrf-steal-emoji-extname.fix b/changelog.d/mrf-steal-emoji-extname.fix
new file mode 100644
index 000000000..197aa9b9e
--- /dev/null
+++ b/changelog.d/mrf-steal-emoji-extname.fix
@@ -0,0 +1 @@
+MRF.StealEmojiPolicy: Properly add fallback extension to filenames missing one
diff --git a/changelog.d/mrf_hashtags.fix b/changelog.d/mrf_hashtags.fix
new file mode 100644
index 000000000..c44c2376b
--- /dev/null
+++ b/changelog.d/mrf_hashtags.fix
@@ -0,0 +1 @@
+Federated timeline removal of hashtags via MRF HashtagPolicy
diff --git a/changelog.d/nil-content-map.fix b/changelog.d/nil-content-map.fix
new file mode 100644
index 000000000..d4943bf74
--- /dev/null
+++ b/changelog.d/nil-content-map.fix
@@ -0,0 +1 @@
+Support objects with a null contentMap (firefish)
diff --git a/changelog.d/no-async-with-clear-config.skip b/changelog.d/no-async-with-clear-config.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/notifications-index.fix b/changelog.d/notifications-index.fix
new file mode 100644
index 000000000..4617cbec0
--- /dev/null
+++ b/changelog.d/notifications-index.fix
@@ -0,0 +1 @@
+Fix notifications query which was not using the index properly
diff --git a/changelog.d/notifications.fix b/changelog.d/notifications.fix
new file mode 100644
index 000000000..a2d2eaea9
--- /dev/null
+++ b/changelog.d/notifications.fix
@@ -0,0 +1 @@
+Notifications: improve performance by filtering on users table instead of activities table
\ No newline at end of file
diff --git a/changelog.d/oauth-nickname.skip b/changelog.d/oauth-nickname.skip
new file mode 100644
index 000000000..02f16e06c
--- /dev/null
+++ b/changelog.d/oauth-nickname.skip
@@ -0,0 +1 @@
+Use User.full_nickname/1 in oauth html template
\ No newline at end of file
diff --git a/changelog.d/opengraph-rich-media-proxy.add b/changelog.d/opengraph-rich-media-proxy.add
new file mode 100644
index 000000000..2b2fc657d
--- /dev/null
+++ b/changelog.d/opengraph-rich-media-proxy.add
@@ -0,0 +1 @@
+Add media proxy to opengraph rich media cards
diff --git a/changelog.d/optimistic-inbox.change b/changelog.d/optimistic-inbox.change
new file mode 100644
index 000000000..2cf1ce92c
--- /dev/null
+++ b/changelog.d/optimistic-inbox.change
@@ -0,0 +1 @@
+Optimistic Inbox reduces the processing overhead of incoming activities without instantly verifiable signatures.
diff --git a/changelog.d/otp26.add b/changelog.d/otp26.add
new file mode 100644
index 000000000..b019afdf3
--- /dev/null
+++ b/changelog.d/otp26.add
@@ -0,0 +1 @@
+Support for Erlang OTP 26
diff --git a/changelog.d/postgres-jit.change b/changelog.d/postgres-jit.change
new file mode 100644
index 000000000..38225b06b
--- /dev/null
+++ b/changelog.d/postgres-jit.change
@@ -0,0 +1 @@
+Disable jit by default for PostgreSQL
diff --git a/changelog.d/prioritize-direct-recipients.add b/changelog.d/prioritize-direct-recipients.add
new file mode 100644
index 000000000..4efc94c68
--- /dev/null
+++ b/changelog.d/prioritize-direct-recipients.add
@@ -0,0 +1 @@
+- Prioritize mentioned recipients (i.e., those that are not just followers) when federating.
diff --git a/changelog.d/promex.change b/changelog.d/promex.change
new file mode 100644
index 000000000..6c1571c54
--- /dev/null
+++ b/changelog.d/promex.change
@@ -0,0 +1 @@
+Change the prometheus library to PromEx.
diff --git a/changelog.d/public-polls.add b/changelog.d/public-polls.add
new file mode 100644
index 000000000..0dae0c38e
--- /dev/null
+++ b/changelog.d/public-polls.add
@@ -0,0 +1 @@
+Expose nonAnonymous field from Smithereen polls
\ No newline at end of file
diff --git a/changelog.d/publisher_discard.change b/changelog.d/publisher_discard.change
new file mode 100644
index 000000000..85e530d8d
--- /dev/null
+++ b/changelog.d/publisher_discard.change
@@ -0,0 +1 @@
+Activity publishing failures will prevent the job from retrying if the publishing request returns a 403 or 410
diff --git a/changelog.d/publisher_log.change b/changelog.d/publisher_log.change
new file mode 100644
index 000000000..3f85f5a1e
--- /dev/null
+++ b/changelog.d/publisher_log.change
@@ -0,0 +1 @@
+Publisher errors will now emit logs indicating the inbox that was not available for delivery.
diff --git a/changelog.d/qtfaststart.fix b/changelog.d/qtfaststart.fix
new file mode 100644
index 000000000..66d2569f2
--- /dev/null
+++ b/changelog.d/qtfaststart.fix
@@ -0,0 +1 @@
+MediaProxy Preview failures prevented when encountering certain video files
diff --git a/changelog.d/quotes-count.skip b/changelog.d/quotes-count.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/reachability.change b/changelog.d/reachability.change
new file mode 100644
index 000000000..06f63272b
--- /dev/null
+++ b/changelog.d/reachability.change
@@ -0,0 +1 @@
+Reduce the reachability timestamp update to a single upsert query
diff --git a/changelog.d/realpath-over-readlink.fix b/changelog.d/realpath-over-readlink.fix
new file mode 100644
index 000000000..479561b95
--- /dev/null
+++ b/changelog.d/realpath-over-readlink.fix
@@ -0,0 +1 @@
+pleroma_ctl: Use realpath(1) instead of readlink(1)
diff --git a/changelog.d/receiverworker-error-handling.fix b/changelog.d/receiverworker-error-handling.fix
new file mode 100644
index 000000000..f017a2bba
--- /dev/null
+++ b/changelog.d/receiverworker-error-handling.fix
@@ -0,0 +1 @@
+ReceiverWorker: Make sure non-{:ok, _} is returned as {:error, …}
\ No newline at end of file
diff --git a/changelog.d/remote-fetcher-error.skip b/changelog.d/remote-fetcher-error.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/reply-to-deleted.change b/changelog.d/reply-to-deleted.change
new file mode 100644
index 000000000..8b952ee7a
--- /dev/null
+++ b/changelog.d/reply-to-deleted.change
@@ -0,0 +1 @@
+A 422 error is returned when attempting to reply to a deleted status
diff --git a/changelog.d/rich_media.fix b/changelog.d/rich_media.fix
new file mode 100644
index 000000000..08f119550
--- /dev/null
+++ b/changelog.d/rich_media.fix
@@ -0,0 +1 @@
+Rich Media Preview cache eviction when the activity is updated.
diff --git a/changelog.d/rich_media_refactor.change b/changelog.d/rich_media_refactor.change
new file mode 100644
index 000000000..c0d4e3b0a
--- /dev/null
+++ b/changelog.d/rich_media_refactor.change
@@ -0,0 +1 @@
+Refactored Rich Media to cache the content in the database. Fetching operations that could block status rendering have been eliminated.
diff --git a/changelog.d/rich_media_tests.skip b/changelog.d/rich_media_tests.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/richmediattl.fix b/changelog.d/richmediattl.fix
new file mode 100644
index 000000000..98de63015
--- /dev/null
+++ b/changelog.d/richmediattl.fix
@@ -0,0 +1 @@
+Parsing of RichMedia TTLs for Amazon URLs when query parameters are nil
diff --git a/changelog.d/scrobble-url.add b/changelog.d/scrobble-url.add
new file mode 100644
index 000000000..24bdeed89
--- /dev/null
+++ b/changelog.d/scrobble-url.add
@@ -0,0 +1 @@
+Adds the capability to add a URL to a scrobble (optional field)
diff --git a/changelog.d/scrubbers-html4-GtS.add b/changelog.d/scrubbers-html4-GtS.add
new file mode 100644
index 000000000..7f99dbb25
--- /dev/null
+++ b/changelog.d/scrubbers-html4-GtS.add
@@ -0,0 +1 @@
+- scrubbers/default: Add more formatting elements from HTML4 / GoToSocial (acronym, bdo, big, cite, dfn, ins, kbd, q, samp, s, tt, var, wbr)
diff --git a/changelog.d/search-healthcheck.add b/changelog.d/search-healthcheck.add
new file mode 100644
index 000000000..4974925e7
--- /dev/null
+++ b/changelog.d/search-healthcheck.add
@@ -0,0 +1 @@
+Monitoring of search backend health to control the processing of jobs in the search indexing Oban queue
diff --git a/changelog.d/status-notification-type.add b/changelog.d/status-notification-type.add
new file mode 100644
index 000000000..a6e94fa87
--- /dev/null
+++ b/changelog.d/status-notification-type.add
@@ -0,0 +1 @@
+Add "status" notification type
\ No newline at end of file
diff --git a/changelog.d/system-cflags.fix b/changelog.d/system-cflags.fix
new file mode 100644
index 000000000..84de5ad57
--- /dev/null
+++ b/changelog.d/system-cflags.fix
@@ -0,0 +1 @@
+- Fix eblurhash and elixir-captcha not using system cflags
diff --git a/changelog.d/tesla.deps b/changelog.d/tesla.deps
new file mode 100644
index 000000000..799bbc670
--- /dev/null
+++ b/changelog.d/tesla.deps
@@ -0,0 +1 @@
+Update Tesla HTTP client middleware to 1.8.0
diff --git a/changelog.d/test-improvements.skip b/changelog.d/test-improvements.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/testsecrets.skip b/changelog.d/testsecrets.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/transient-validators-defaults.change b/changelog.d/transient-validators-defaults.change
new file mode 100644
index 000000000..225cf4d0c
--- /dev/null
+++ b/changelog.d/transient-validators-defaults.change
@@ -0,0 +1 @@
+Set default values on validators for transient objects (attachment, poll options)
diff --git a/changelog.d/typo.skip b/changelog.d/typo.skip
new file mode 100644
index 000000000..e69de29bb
diff --git a/changelog.d/vips.change b/changelog.d/vips.change
new file mode 100644
index 000000000..ee18cd34b
--- /dev/null
+++ b/changelog.d/vips.change
@@ -0,0 +1 @@
+Change mediaproxy previews to use vips to generate thumbnails instead of ImageMagick
diff --git a/changelog.d/web_push.fix b/changelog.d/web_push.fix
new file mode 100644
index 000000000..cf933e2d4
--- /dev/null
+++ b/changelog.d/web_push.fix
@@ -0,0 +1 @@
+Fix web push notifications not successfully delivering
diff --git a/changelog.d/web_push_filtered.fix b/changelog.d/web_push_filtered.fix
new file mode 100644
index 000000000..b9159362a
--- /dev/null
+++ b/changelog.d/web_push_filtered.fix
@@ -0,0 +1 @@
+Web Push notifications are no longer generated for muted/blocked threads and users.
diff --git a/changelog.d/webfinger-validation.fix b/changelog.d/webfinger-validation.fix
new file mode 100644
index 000000000..e64312666
--- /dev/null
+++ b/changelog.d/webfinger-validation.fix
@@ -0,0 +1 @@
+Fix validate_webfinger when running a different domain for Webfinger
\ No newline at end of file
diff --git a/changelog.d/websocket-refactor.change b/changelog.d/websocket-refactor.change
new file mode 100644
index 000000000..3c447832b
--- /dev/null
+++ b/changelog.d/websocket-refactor.change
@@ -0,0 +1 @@
+Refactor the Mastodon /api/v1/streaming websocket handler to use Phoenix.Socket.Transport
diff --git a/ci/Dockerfile b/ci/Dockerfile
deleted file mode 100644
index e6a8b438c..000000000
--- a/ci/Dockerfile
+++ /dev/null
@@ -1,7 +0,0 @@
-FROM elixir:1.9.4
-
-RUN apt-get update &&\
- apt-get install -y libmagic-dev cmake libimage-exiftool-perl ffmpeg &&\
- mix local.hex --force &&\
- mix local.rebar --force
-
diff --git a/ci/README b/ci/README
new file mode 100644
index 000000000..3785adef1
--- /dev/null
+++ b/ci/README
@@ -0,0 +1,12 @@
+## Dependencies
+
+Assuming an AMD64 Alpine system, you're going to need the following packages
+- `qemu qemu-openrc qemu-arm qemu-aarch64` for binfmt
+- `docker-cli-buildx` for building the images
+
+## Setting up
+
+```
+docker login git.pleroma.social:5050
+doas rc-service qemu-binfmt start
+```
diff --git a/ci/build_and_push.sh b/ci/build_and_push.sh
deleted file mode 100755
index 484cc2643..000000000
--- a/ci/build_and_push.sh
+++ /dev/null
@@ -1 +0,0 @@
-docker buildx build --platform linux/amd64,linux/arm64,linux/arm/v7 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:latest --push .
diff --git a/ci/elixir-1.12/Dockerfile b/ci/elixir-1.12/Dockerfile
new file mode 100644
index 000000000..a2b566873
--- /dev/null
+++ b/ci/elixir-1.12/Dockerfile
@@ -0,0 +1,8 @@
+FROM elixir:1.12.3
+
+# Single RUN statement, otherwise intermediate images are created
+# https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#run
+RUN apt-get update &&\
+ apt-get install -y libmagic-dev cmake libimage-exiftool-perl ffmpeg &&\
+ mix local.hex --force &&\
+ mix local.rebar --force
diff --git a/ci/elixir-1.12/build_and_push.sh b/ci/elixir-1.12/build_and_push.sh
new file mode 100755
index 000000000..508262ed8
--- /dev/null
+++ b/ci/elixir-1.12/build_and_push.sh
@@ -0,0 +1 @@
+docker buildx build --platform linux/amd64,linux/arm64,linux/arm/v7 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.12 --push .
diff --git a/ci/elixir-1.13/Dockerfile b/ci/elixir-1.13/Dockerfile
new file mode 100644
index 000000000..b8bceb3d9
--- /dev/null
+++ b/ci/elixir-1.13/Dockerfile
@@ -0,0 +1,8 @@
+FROM elixir:1.13.4-otp-24
+
+# Single RUN statement, otherwise intermediate images are created
+# https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#run
+RUN apt-get update &&\
+ apt-get install -y libmagic-dev cmake libimage-exiftool-perl ffmpeg &&\
+ mix local.hex --force &&\
+ mix local.rebar --force
diff --git a/ci/elixir-1.13/build_and_push.sh b/ci/elixir-1.13/build_and_push.sh
new file mode 100755
index 000000000..64e1856db
--- /dev/null
+++ b/ci/elixir-1.13/build_and_push.sh
@@ -0,0 +1 @@
+docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp-24 --push .
diff --git a/ci/elixir-1.15-otp25/Dockerfile b/ci/elixir-1.15-otp25/Dockerfile
new file mode 100644
index 000000000..3335c6e36
--- /dev/null
+++ b/ci/elixir-1.15-otp25/Dockerfile
@@ -0,0 +1,8 @@
+FROM elixir:1.15.7-otp-25
+
+# Single RUN statement, otherwise intermediate images are created
+# https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#run
+RUN apt-get update &&\
+ apt-get install -y libmagic-dev cmake libimage-exiftool-perl ffmpeg &&\
+ mix local.hex --force &&\
+ mix local.rebar --force
diff --git a/ci/elixir-1.15-otp25/build_and_push.sh b/ci/elixir-1.15-otp25/build_and_push.sh
new file mode 100755
index 000000000..a28e0d33c
--- /dev/null
+++ b/ci/elixir-1.15-otp25/build_and_push.sh
@@ -0,0 +1 @@
+docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15-otp25 --push .
diff --git a/ci/postgres-with-rum-13/Dockerfile b/ci/postgres-with-rum-13/Dockerfile
new file mode 100644
index 000000000..dc727df1d
--- /dev/null
+++ b/ci/postgres-with-rum-13/Dockerfile
@@ -0,0 +1,3 @@
+FROM postgres:13-bullseye
+
+RUN apt-get update && apt-get install -y postgresql-13-rum/bullseye-pgdg
diff --git a/ci/postgres-with-rum-13/build_and_push.sh b/ci/postgres-with-rum-13/build_and_push.sh
new file mode 100755
index 000000000..c437b64a7
--- /dev/null
+++ b/ci/postgres-with-rum-13/build_and_push.sh
@@ -0,0 +1 @@
+docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/postgres-with-rum-13:latest --push .
diff --git a/config/benchmark.exs b/config/benchmark.exs
index 9a7ea5669..d30c95946 100644
--- a/config/benchmark.exs
+++ b/config/benchmark.exs
@@ -14,7 +14,7 @@
method: Pleroma.Captcha.Mock
# Print only warnings and errors during test
-config :logger, level: :warn
+config :logger, level: :warning
config :pleroma, :auth, oauth_consumer_strategies: []
@@ -40,6 +40,7 @@
password: "postgres",
database: "pleroma_benchmark",
hostname: System.get_env("DB_HOST") || "localhost",
+ port: System.get_env("DB_PORT") || "5432",
pool_size: 10
# Reduce hash rounds for testing
@@ -78,6 +79,10 @@
config :pleroma, Pleroma.ReverseProxy.Client, Pleroma.ReverseProxy.ClientMock
+config :pleroma, Pleroma.Application,
+ background_migrators: false,
+ streamer_registry: false
+
if File.exists?("./config/benchmark.secret.exs") do
import_config "benchmark.secret.exs"
else
diff --git a/config/config.exs b/config/config.exs
index 7efad0061..fef3910fb 100644
--- a/config/config.exs
+++ b/config/config.exs
@@ -37,7 +37,7 @@
# FIGURATION! EDIT YOUR SECRET FILE (either prod.secret.exs, dev.secret.exs).
#
# This file is responsible for configuring your application
-# and its dependencies with the aid of the Mix.Config module.
+# and its dependencies with the aid of the Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
@@ -114,32 +114,11 @@
"xmpp"
]
-websocket_config = [
- path: "/websocket",
- serializer: [
- {Phoenix.Socket.V1.JSONSerializer, "~> 1.0.0"},
- {Phoenix.Socket.V2.JSONSerializer, "~> 2.0.0"}
- ],
- timeout: 60_000,
- transport_log: false,
- compress: false
-]
-
# Configures the endpoint
config :pleroma, Pleroma.Web.Endpoint,
url: [host: "localhost"],
http: [
- ip: {127, 0, 0, 1},
- dispatch: [
- {:_,
- [
- {"/api/v1/streaming", Pleroma.Web.MastodonAPI.WebsocketHandler, []},
- {"/websocket", Phoenix.Endpoint.CowboyWebSocket,
- {Phoenix.Transports.WebSocket,
- {Pleroma.Web.Endpoint, Pleroma.Web.UserSocket, websocket_config}}},
- {:_, Phoenix.Endpoint.Cowboy2Handler, {Pleroma.Web.Endpoint, []}}
- ]}
- ]
+ ip: {127, 0, 0, 1}
],
protocol: "https",
secret_key_base: "aK4Abxf29xU9TTDKre9coZPUgevcVCFQJe/5xP/7Lt4BEif6idBIbjupVbOrbKxl",
@@ -164,11 +143,6 @@
format: "$metadata[$level] $message",
metadata: [:request_id]
-config :quack,
- level: :warn,
- meta: [:all],
- webhook_url: "https://hooks.slack.com/services/YOUR-KEY-HERE"
-
config :mime, :types, %{
"application/xml" => ["xml"],
"application/xrd+xml" => ["xrd+xml"],
@@ -191,8 +165,10 @@
email: "example@example.com",
notify_email: "noreply@example.com",
description: "Pleroma: An efficient and flexible fediverse server",
+ short_description: "",
background_image: "/images/city.jpg",
instance_thumbnail: "/instance/thumbnail.jpeg",
+ favicon: "/favicon.png",
limit: 5_000,
description_limit: 5_000,
remote_limit: 100_000,
@@ -213,9 +189,6 @@
federating: true,
federation_incoming_replies_max_depth: 100,
federation_reachability_timeout_days: 7,
- federation_publisher_modules: [
- Pleroma.Web.ActivityPub.Publisher
- ],
allow_relay: true,
public: true,
quarantined_instances: [],
@@ -231,6 +204,7 @@
max_pinned_statuses: 1,
attachment_links: false,
max_report_comment_size: 1000,
+ report_strip_status: true,
safe_dm_mentions: false,
healthcheck: false,
remote_post_retention_days: 90,
@@ -260,7 +234,23 @@
show_reactions: true,
password_reset_token_validity: 60 * 60 * 24,
profile_directory: true,
- privileged_staff: false,
+ admin_privileges: [
+ :users_read,
+ :users_manage_invites,
+ :users_manage_activation_state,
+ :users_manage_tags,
+ :users_manage_credentials,
+ :users_delete,
+ :messages_read,
+ :messages_delete,
+ :instances_delete,
+ :reports_manage_reports,
+ :moderation_log_read,
+ :announcements_manage_announcements,
+ :emoji_manage_emoji,
+ :statistics_read
+ ],
+ moderator_privileges: [:messages_delete, :reports_manage_reports],
max_endorsed_users: 20,
birthday_required: false,
birthday_min_age: 0,
@@ -351,6 +341,8 @@
icons: [
%{
src: "/static/logo.svg",
+ sizes: "144x144",
+ purpose: "any",
type: "image/svg+xml"
}
],
@@ -399,6 +391,12 @@
federated_timeline_removal: [],
replace: []
+config :pleroma, :mrf_emoji,
+ remove_url: [],
+ remove_shortcode: [],
+ federated_timeline_removal_url: [],
+ federated_timeline_removal_shortcode: []
+
config :pleroma, :mrf_hashtag,
sensitive: ["nsfw"],
reject: [],
@@ -419,6 +417,12 @@
config :pleroma, :mrf_follow_bot, follower_nickname: nil
+config :pleroma, :mrf_inline_quote, template: "RT: {url}"
+
+config :pleroma, :mrf_force_mention,
+ mention_parent: true,
+ mention_quoted: true
+
config :pleroma, :rich_media,
enabled: true,
ignore_hosts: [],
@@ -428,7 +432,11 @@
Pleroma.Web.RichMedia.Parsers.OEmbed
],
failure_backoff: 60_000,
- ttl_setters: [Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl]
+ ttl_setters: [
+ Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl,
+ Pleroma.Web.RichMedia.Parser.TTL.Opengraph
+ ],
+ max_body: 5_000_000
config :pleroma, :media_proxy,
enabled: false,
@@ -562,8 +570,8 @@
token_expiration: 5,
filter_expiration: 1,
backup: 1,
- federator_incoming: 50,
- federator_outgoing: 50,
+ federator_incoming: 5,
+ federator_outgoing: 5,
ingestion_queue: 50,
web_push: 50,
mailer: 10,
@@ -574,7 +582,9 @@
remote_fetcher: 2,
attachments_cleanup: 1,
new_users_digest: 1,
- mute_expire: 5
+ mute_expire: 5,
+ search_indexing: [limit: 10, paused: true],
+ rich_media_expiration: 2
],
plugins: [Oban.Plugins.Pruner],
crontab: [
@@ -585,7 +595,8 @@
config :pleroma, :workers,
retries: [
federator_incoming: 5,
- federator_outgoing: 5
+ federator_outgoing: 5,
+ search_indexing: 2
]
config :pleroma, Pleroma.Formatter,
@@ -608,9 +619,6 @@
base: System.get_env("LDAP_BASE") || "dc=example,dc=com",
uid: System.get_env("LDAP_UID") || "cn"
-config :esshd,
- enabled: false
-
oauth_consumer_strategies =
System.get_env("OAUTH_CONSUMER_STRATEGIES")
|> to_string()
@@ -646,12 +654,26 @@
config :pleroma, Pleroma.Emails.NewUsersDigestEmail, enabled: false
-config :prometheus, Pleroma.Web.Endpoint.MetricsExporter,
- enabled: false,
- auth: false,
- ip_whitelist: [],
- path: "/api/pleroma/app_metrics",
- format: :text
+config :pleroma, Pleroma.PromEx,
+ disabled: false,
+ manual_metrics_start_delay: :no_delay,
+ drop_metrics_groups: [],
+ grafana: [
+ host: System.get_env("GRAFANA_HOST", "http://localhost:3000"),
+ auth_token: System.get_env("GRAFANA_TOKEN"),
+ upload_dashboards_on_start: false,
+ folder_name: "BEAM",
+ annotate_app_lifecycle: true
+ ],
+ metrics_server: [
+ port: 4021,
+ path: "/metrics",
+ protocol: :http,
+ pool_size: 5,
+ cowboy_opts: [],
+ auth_strategy: :none
+ ],
+ datasource: "Prometheus"
config :pleroma, Pleroma.ScheduledActivity,
daily_user_limit: 25,
@@ -676,6 +698,8 @@
config :pleroma, :populate_hashtags_table, fault_rate_allowance: 0.01
+config :pleroma, :delete_context_objects, fault_rate_allowance: 0.01
+
config :pleroma, :env, Mix.env()
config :http_signatures,
@@ -744,7 +768,7 @@
"name" => "fedi-fe",
"git" => "https://git.pleroma.social/pleroma/fedi-fe",
"build_url" =>
- "https://git.pleroma.social/pleroma/fedi-fe/-/jobs/artifacts/${ref}/download?job=build",
+ "https://git.pleroma.social/pleroma/fedi-fe/-/jobs/artifacts/${ref}/download?job=build_release",
"ref" => "master",
"custom-http-headers" => [
{"service-worker-allowed", "/"}
@@ -757,13 +781,21 @@
"https://git.pleroma.social/pleroma/admin-fe/-/jobs/artifacts/${ref}/download?job=build",
"ref" => "develop"
},
- "soapbox-fe" => %{
- "name" => "soapbox-fe",
- "git" => "https://gitlab.com/soapbox-pub/soapbox-fe",
+ "soapbox" => %{
+ "name" => "soapbox",
+ "git" => "https://gitlab.com/soapbox-pub/soapbox",
"build_url" =>
- "https://gitlab.com/soapbox-pub/soapbox-fe/-/jobs/artifacts/${ref}/download?job=build-production",
- "ref" => "v1.0.0",
+ "https://gitlab.com/soapbox-pub/soapbox/-/jobs/artifacts/${ref}/download?job=build-production",
+ "ref" => "v3.0.0-beta.1",
"build_dir" => "static"
+ },
+ "glitch-lily" => %{
+ "name" => "glitch-lily",
+ "git" => "https://lily-is.land/infra/glitch-lily",
+ "build_url" =>
+ "https://lily-is.land/infra/glitch-lily/-/jobs/artifacts/${ref}/download?job=build",
+ "ref" => "servant",
+ "build_dir" => "public"
}
}
@@ -776,7 +808,7 @@
config :pleroma, configurable_from_database: false
config :pleroma, Pleroma.Repo,
- parameters: [gin_fuzzy_search_limit: "500"],
+ parameters: [gin_fuzzy_search_limit: "500", jit: "off"],
prepare: :unnamed
config :pleroma, :connections_pool,
@@ -836,7 +868,11 @@
config :pleroma, Pleroma.Web.ApiSpec.CastAndValidate, strict: false
config :pleroma, :mrf,
- policies: [Pleroma.Web.ActivityPub.MRF.ObjectAgePolicy, Pleroma.Web.ActivityPub.MRF.TagPolicy],
+ policies: [
+ Pleroma.Web.ActivityPub.MRF.ObjectAgePolicy,
+ Pleroma.Web.ActivityPub.MRF.TagPolicy,
+ Pleroma.Web.ActivityPub.MRF.InlineQuotePolicy
+ ],
transparency: true,
transparency_exclusions: []
@@ -855,13 +891,34 @@
config :pleroma, Pleroma.User.Backup,
purge_after_days: 30,
limit_days: 7,
- dir: nil
+ dir: nil,
+ process_wait_time: 30_000,
+ process_chunk_size: 100
config :pleroma, ConcurrentLimiter, [
{Pleroma.Web.RichMedia.Helpers, [max_running: 5, max_waiting: 5]},
- {Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy, [max_running: 5, max_waiting: 5]}
+ {Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy, [max_running: 5, max_waiting: 5]},
+ {Pleroma.Search, [max_running: 30, max_waiting: 50]}
]
+config :pleroma, Pleroma.Web.WebFinger, domain: nil, update_nickname_on_user_fetch: true
+
+config :pleroma, Pleroma.Search, module: Pleroma.Search.DatabaseSearch
+
+config :pleroma, Pleroma.Search.Meilisearch,
+ url: "http://127.0.0.1:7700/",
+ private_key: nil,
+ initial_indexing_chunk_size: 100_000
+
+config :pleroma, Pleroma.Application,
+ background_migrators: true,
+ internal_fetch: true,
+ load_custom_modules: true,
+ max_restarts: 3,
+ streamer_registry: true
+
+config :pleroma, Pleroma.Uploaders.Uploader, timeout: 30_000
+
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
diff --git a/config/description.exs b/config/description.exs
index b180e7308..e16abfc42 100644
--- a/config/description.exs
+++ b/config/description.exs
@@ -520,6 +520,27 @@
}
]
},
+ %{
+ group: :pleroma,
+ key: :delete_context_objects,
+ type: :group,
+ description: "`delete_context_objects` background migration settings",
+ children: [
+ %{
+ key: :fault_rate_allowance,
+ type: :float,
+ description:
+ "Max accepted rate of objects that failed in the migration. Any value from 0.0 which tolerates no errors to 1.0 which will enable the feature even if context object deletion failed for all records.",
+ suggestions: [0.01]
+ },
+ %{
+ key: :sleep_interval_ms,
+ type: :integer,
+ description:
+ "Sleep interval between each chunk of processed records in order to decrease the load on the system (defaults to 0 and should be keep default on most instances)."
+ }
+ ]
+ },
%{
group: :pleroma,
key: :instance,
@@ -561,6 +582,29 @@
"Very cool instance"
]
},
+ %{
+ key: :short_description,
+ type: :string,
+ description:
+ "Shorter version of instance description. It can be seen on `/api/v1/instance`",
+ suggestions: [
+ "Cool instance"
+ ]
+ },
+ %{
+ key: :status_page,
+ type: :string,
+ description: "A page where people can see the status of the server during an outage",
+ suggestions: [
+ "https://status.pleroma.example.org"
+ ]
+ },
+ %{
+ key: :contact_username,
+ type: :string,
+ description: "Instance owner username",
+ suggestions: ["admin"]
+ },
%{
key: :limit,
type: :integer,
@@ -810,6 +854,13 @@
1_000
]
},
+ %{
+ key: :report_strip_status,
+ label: "Report strip status",
+ type: :boolean,
+ description:
+ "Strip associated statuses in reports to ids when closed/resolved, otherwise keep a copy"
+ },
%{
key: :safe_dm_mentions,
label: "Safe DM mentions",
@@ -975,6 +1026,12 @@
"The instance thumbnail can be any image that represents your instance and is used by some apps or services when they display information about your instance.",
suggestions: ["/instance/thumbnail.jpeg"]
},
+ %{
+ key: :favicon,
+ type: {:string, :image},
+ description: "Favicon of the instance",
+ suggestions: ["/favicon.png"]
+ },
%{
key: :show_reactions,
type: :boolean,
@@ -986,10 +1043,48 @@
description: "Enable profile directory."
},
%{
- key: :privileged_staff,
- type: :boolean,
+ key: :admin_privileges,
+ type: {:list, :atom},
+ suggestions: [
+ :users_read,
+ :users_manage_invites,
+ :users_manage_activation_state,
+ :users_manage_tags,
+ :users_manage_credentials,
+ :users_delete,
+ :messages_read,
+ :messages_delete,
+ :instances_delete,
+ :reports_manage_reports,
+ :moderation_log_read,
+ :announcements_manage_announcements,
+ :emoji_manage_emoji,
+ :statistics_read
+ ],
description:
- "Let moderators access sensitive data (e.g. updating user credentials, get password reset token, delete users, index and read private statuses and chats)"
+ "What extra privileges to allow admins (e.g. updating user credentials, get password reset token, delete users, index and read private statuses and chats)"
+ },
+ %{
+ key: :moderator_privileges,
+ type: {:list, :atom},
+ suggestions: [
+ :users_read,
+ :users_manage_invites,
+ :users_manage_activation_state,
+ :users_manage_tags,
+ :users_manage_credentials,
+ :users_delete,
+ :messages_read,
+ :messages_delete,
+ :instances_delete,
+ :reports_manage_reports,
+ :moderation_log_read,
+ :announcements_manage_announcements,
+ :emoji_manage_emoji,
+ :statistics_read
+ ],
+ description:
+ "What extra privileges to allow moderators (e.g. updating user credentials, get password reset token, delete users, index and read private statuses and chats)"
},
%{
key: :birthday_required,
@@ -1000,7 +1095,17 @@
key: :birthday_min_age,
type: :integer,
description:
- "Minimum required age for users to create account. Only used if birthday is required."
+ "Minimum required age (in days) for users to create account. Only used if birthday is required.",
+ suggestions: [6570]
+ },
+ %{
+ key: :languages,
+ type: {:list, :string},
+ description:
+ "Languages to be exposed in /api/v1/instance. Should be in the format of BCP47 language codes.",
+ suggestions: [
+ "en"
+ ]
}
]
},
@@ -1121,7 +1226,7 @@
type: [:atom, :tuple, :module],
description:
"Where logs will be sent, :console - send logs to stdout, { ExSyslogger, :ex_syslogger } - to syslog, Quack.Logger - to Slack.",
- suggestions: [:console, {ExSyslogger, :ex_syslogger}, Quack.Logger]
+ suggestions: [:console, {ExSyslogger, :ex_syslogger}]
}
]
},
@@ -1136,7 +1241,7 @@
key: :level,
type: {:dropdown, :atom},
description: "Log level",
- suggestions: [:debug, :info, :warn, :error]
+ suggestions: [:debug, :info, :warning, :error]
},
%{
key: :ident,
@@ -1169,7 +1274,7 @@
key: :level,
type: {:dropdown, :atom},
description: "Log level",
- suggestions: [:debug, :info, :warn, :error]
+ suggestions: [:debug, :info, :warning, :error]
},
%{
key: :format,
@@ -1184,45 +1289,6 @@
}
]
},
- %{
- group: :quack,
- type: :group,
- label: "Quack Logger",
- description: "Quack-related settings",
- children: [
- %{
- key: :level,
- type: {:dropdown, :atom},
- description: "Log level",
- suggestions: [:debug, :info, :warn, :error]
- },
- %{
- key: :meta,
- type: {:list, :atom},
- description: "Configure which metadata you want to report on",
- suggestions: [
- :application,
- :module,
- :file,
- :function,
- :line,
- :pid,
- :crash_reason,
- :initial_call,
- :registered_name,
- :all,
- :none
- ]
- },
- %{
- key: :webhook_url,
- label: "Webhook URL",
- type: :string,
- description: "Configure the Slack incoming webhook",
- suggestions: ["https://hooks.slack.com/services/YOUR-KEY-HERE"]
- }
- ]
- },
%{
group: :pleroma,
key: :frontend_configurations,
@@ -1417,7 +1483,7 @@
label: "Subject line behavior",
type: :string,
description: "Allows changing the default behaviour of subject lines in replies.
- `email`: copy and preprend re:, as in email,
+ `email`: copy and prepend re:, as in email,
`masto`: copy verbatim, as in Mastodon,
`noop`: don't copy the subject.",
suggestions: ["email", "masto", "noop"]
@@ -1745,6 +1811,11 @@
type: :boolean,
description: "Sign object fetches with HTTP signatures"
},
+ %{
+ key: :authorized_fetch_mode,
+ type: :boolean,
+ description: "Require HTTP signatures for AP fetches"
+ },
%{
key: :note_replies_output_limit,
type: :integer,
@@ -1905,7 +1976,7 @@
key: :log,
type: {:dropdown, :atom},
description: "Logs verbose mode",
- suggestions: [false, :error, :warn, :info, :debug]
+ suggestions: [false, :error, :warning, :info, :debug]
},
%{
key: :queues,
@@ -2602,45 +2673,6 @@
}
]
},
- %{
- group: :esshd,
- label: "ESSHD",
- type: :group,
- description:
- "Before enabling this you must add :esshd to mix.exs as one of the extra_applications " <>
- "and generate host keys in your priv dir with ssh-keygen -m PEM -N \"\" -b 2048 -t rsa -f ssh_host_rsa_key",
- children: [
- %{
- key: :enabled,
- type: :boolean,
- description: "Enables SSH"
- },
- %{
- key: :priv_dir,
- type: :string,
- description: "Dir with SSH keys",
- suggestions: ["/some/path/ssh_keys"]
- },
- %{
- key: :handler,
- type: :string,
- description: "Handler module",
- suggestions: ["Pleroma.BBS.Handler"]
- },
- %{
- key: :port,
- type: :integer,
- description: "Port to connect",
- suggestions: [10_022]
- },
- %{
- key: :password_authenticator,
- type: :string,
- description: "Authenticator module",
- suggestions: ["Pleroma.BBS.Authenticator"]
- }
- ]
- },
%{
group: :mime,
label: "Mime Types",
@@ -3103,7 +3135,7 @@
key: :max_waiting,
type: :integer,
description:
- "Maximum number of requests waiting for other requests to finish. After this number is reached, the pool will start returning errrors when a new request is made",
+ "Maximum number of requests waiting for other requests to finish. After this number is reached, the pool will start returning errors when a new request is made",
suggestions: [10]
},
%{
@@ -3369,7 +3401,7 @@
%{
key: :purge_after_days,
type: :integer,
- description: "Remove backup achives after N days",
+ description: "Remove backup archives after N days",
suggestions: [30]
},
%{
@@ -3377,6 +3409,21 @@
type: :integer,
description: "Limit user to export not more often than once per N days",
suggestions: [7]
+ },
+ %{
+ key: :process_wait_time,
+ type: :integer,
+ label: "Process Wait Time",
+ description:
+ "The amount of time to wait for backup to report progress, in milliseconds. If no progress is received from the backup job for that much time, terminate it and deem it failed.",
+ suggestions: [30_000]
+ },
+ %{
+ key: :process_chunk_size,
+ type: :integer,
+ label: "Process Chunk Size",
+ description: "The number of activities to fetch in the backup job for each chunk.",
+ suggestions: [100]
}
]
},
@@ -3464,5 +3511,48 @@
]
}
]
+ },
+ %{
+ group: :pleroma,
+ key: Pleroma.Search,
+ type: :group,
+ description: "General search settings.",
+ children: [
+ %{
+ key: :module,
+ type: :keyword,
+ description: "Selected search module.",
+ suggestion: [Pleroma.Search.DatabaseSearch, Pleroma.Search.Meilisearch]
+ }
+ ]
+ },
+ %{
+ group: :pleroma,
+ key: Pleroma.Search.Meilisearch,
+ type: :group,
+ description: "Meilisearch settings.",
+ children: [
+ %{
+ key: :url,
+ type: :string,
+ description: "Meilisearch URL.",
+ suggestion: ["http://127.0.0.1:7700/"]
+ },
+ %{
+ key: :private_key,
+ type: :string,
+ description:
+ "Private key for meilisearch authentication, or `nil` to disable private key authentication.",
+ suggestion: [nil]
+ },
+ %{
+ key: :initial_indexing_chunk_size,
+ type: :integer,
+ description:
+ "Amount of posts in a batch when running the initial indexing operation. Should probably not be more than 100000" <>
+ " since there's a limit on maximum insert size",
+ suggestion: [100_000]
+ }
+ ]
}
]
diff --git a/config/dev.exs b/config/dev.exs
index ab3e83c12..fe8de5045 100644
--- a/config/dev.exs
+++ b/config/dev.exs
@@ -8,8 +8,7 @@
# with brunch.io to recompile .js and .css sources.
config :pleroma, Pleroma.Web.Endpoint,
http: [
- port: 4000,
- protocol_options: [max_request_line_length: 8192, max_header_value_length: 8192]
+ port: 4000
],
protocol: "http",
debug_errors: true,
diff --git a/config/docker.exs b/config/docker.exs
index f9f27d141..5db222485 100644
--- a/config/docker.exs
+++ b/config/docker.exs
@@ -18,6 +18,7 @@
password: System.fetch_env!("DB_PASS"),
database: System.get_env("DB_NAME", "pleroma"),
hostname: System.get_env("DB_HOST", "db"),
+ port: System.get_env("DB_PORT", "5432"),
pool_size: 10
# Configure web push notifications
diff --git a/config/test.exs b/config/test.exs
index d5c25f65e..9b4113dd5 100644
--- a/config/test.exs
+++ b/config/test.exs
@@ -16,7 +16,7 @@
# Print only warnings and errors during test
config :logger, :console,
- level: :warn,
+ level: :warning,
format: "\n[$level] $message\n"
config :pleroma, :auth, oauth_consumer_strategies: []
@@ -47,8 +47,9 @@
password: "postgres",
database: "pleroma_test",
hostname: System.get_env("DB_HOST") || "localhost",
+ port: System.get_env("DB_PORT") || "5432",
pool: Ecto.Adapters.SQL.Sandbox,
- pool_size: 50
+ pool_size: System.schedulers_online() * 2
config :pleroma, :dangerzone, override_repo_pool_size: true
@@ -60,7 +61,8 @@
config :pleroma, :rich_media,
enabled: false,
ignore_hosts: [],
- ignore_tld: ["local", "localdomain", "lan"]
+ ignore_tld: ["local", "localdomain", "lan"],
+ max_body: 2_000_000
config :pleroma, :instance,
multi_factor_authentication: [
@@ -81,10 +83,7 @@
"BLH1qVhJItRGCfxgTtONfsOKDc9VRAraXw-3NsmjMngWSh7NxOizN6bkuRA7iLTMPS82PjwJAr3UoK9EC1IFrz4",
private_key: "_-XZ0iebPrRfZ_o0-IatTdszYa8VCH1yLN-JauK7HHA"
-config :pleroma, Oban,
- queues: false,
- crontab: false,
- plugins: false
+config :pleroma, Oban, testing: :manual
config :pleroma, Pleroma.ScheduledActivity,
daily_user_limit: 2,
@@ -129,14 +128,55 @@
config :pleroma, :cachex, provider: Pleroma.CachexMock
+config :pleroma, Pleroma.Web.WebFinger, update_nickname_on_user_fetch: false
+
config :pleroma, :side_effects,
ap_streamer: Pleroma.Web.ActivityPub.ActivityPubMock,
logger: Pleroma.LoggerMock
+config :pleroma, Pleroma.Search, module: Pleroma.Search.DatabaseSearch
+
+config :pleroma, Pleroma.Search.Meilisearch, url: "http://127.0.0.1:7700/", private_key: nil
+
# Reduce recompilation time
# https://dashbit.co/blog/speeding-up-re-compilation-of-elixir-projects
config :phoenix, :plug_init_mode, :runtime
+config :pleroma, :config_impl, Pleroma.UnstubbedConfigMock
+
+config :pleroma, Pleroma.PromEx, disabled: true
+
+# Mox definitions. Only read during compile time.
+config :pleroma, Pleroma.User.Backup, config_impl: Pleroma.UnstubbedConfigMock
+config :pleroma, Pleroma.Uploaders.S3, ex_aws_impl: Pleroma.Uploaders.S3.ExAwsMock
+config :pleroma, Pleroma.Uploaders.S3, config_impl: Pleroma.UnstubbedConfigMock
+config :pleroma, Pleroma.Upload, config_impl: Pleroma.UnstubbedConfigMock
+config :pleroma, Pleroma.ScheduledActivity, config_impl: Pleroma.UnstubbedConfigMock
+config :pleroma, Pleroma.Web.RichMedia.Helpers, config_impl: Pleroma.StaticStubbedConfigMock
+
+peer_module =
+ if String.to_integer(System.otp_release()) >= 25 do
+ :peer
+ else
+ :slave
+ end
+
+config :pleroma, Pleroma.Cluster, peer_module: peer_module
+
+config :pleroma, Pleroma.Application,
+ background_migrators: false,
+ internal_fetch: false,
+ load_custom_modules: false,
+ max_restarts: 100,
+ streamer_registry: false,
+ test_http_pools: true
+
+config :pleroma, Pleroma.Uploaders.Uploader, timeout: 1_000
+
+config :pleroma, Pleroma.Emoji.Loader, test_emoji: true
+
+config :pleroma, Pleroma.Web.RichMedia.Backfill, provider: Pleroma.Web.RichMedia.Backfill
+
if File.exists?("./config/test.secret.exs") do
import_config "test.secret.exs"
else
diff --git a/docker-entrypoint.sh b/docker-entrypoint.sh
index f56f8c50a..4691f68bb 100755
--- a/docker-entrypoint.sh
+++ b/docker-entrypoint.sh
@@ -3,7 +3,7 @@
set -e
echo "-- Waiting for database..."
-while ! pg_isready -U ${DB_USER:-pleroma} -d postgres://${DB_HOST:-db}:5432/${DB_NAME:-pleroma} -t 1; do
+while ! pg_isready -U ${DB_USER:-pleroma} -d postgres://${DB_HOST:-db}:${DB_PORT:-5432}/${DB_NAME:-pleroma} -t 1; do
sleep 1s
done
diff --git a/docs/administration/CLI_tasks/config.md b/docs/administration/CLI_tasks/config.md
index fc9f3cbd5..7c167ec5d 100644
--- a/docs/administration/CLI_tasks/config.md
+++ b/docs/administration/CLI_tasks/config.md
@@ -1,4 +1,4 @@
-# Transfering the config to/from the database
+# Transferring the config to/from the database
{! backend/administration/CLI_tasks/general_cli_task_info.include !}
@@ -34,7 +34,7 @@
Options:
-- `` - where to save migrated config. E.g. `--path=/tmp`. If file saved into non standart folder, you must manually copy file into directory where Pleroma can read it. For OTP install path will be `PLEROMA_CONFIG_PATH` or `/etc/pleroma`. For installation from source - `config` directory in the pleroma folder.
+- `` - where to save migrated config. E.g. `--path=/tmp`. If file saved into non-standard folder, you must manually copy file into directory where Pleroma can read it. For OTP install path will be `PLEROMA_CONFIG_PATH` or `/etc/pleroma`. For installation from source - `config` directory in the pleroma folder.
- `` - environment, for which is migrated config. By default is `prod`.
- To delete transferred settings from database optional flag `-d` can be used
diff --git a/docs/administration/CLI_tasks/frontend.md b/docs/administration/CLI_tasks/frontend.md
index d4a48cb56..4e9d9eecb 100644
--- a/docs/administration/CLI_tasks/frontend.md
+++ b/docs/administration/CLI_tasks/frontend.md
@@ -22,7 +22,7 @@ Currently, known `` values are:
- [kenoma](http://git.pleroma.social/lambadalambda/kenoma)
- [pleroma-fe](http://git.pleroma.social/pleroma/pleroma-fe)
- [fedi-fe](https://git.pleroma.social/pleroma/fedi-fe)
-- [soapbox-fe](https://gitlab.com/soapbox-pub/soapbox-fe)
+- [soapbox](https://gitlab.com/soapbox-pub/soapbox)
You can still install frontends that are not configured, see below.
diff --git a/docs/administration/backup.md b/docs/administration/backup.md
index 5f279ab97..93325e702 100644
--- a/docs/administration/backup.md
+++ b/docs/administration/backup.md
@@ -31,7 +31,7 @@
1. Optionally you can remove the users of your instance. This will trigger delete requests for their accounts and posts. Note that this is 'best effort' and doesn't mean that all traces of your instance will be gone from the fediverse.
* You can do this from the admin-FE where you can select all local users and delete the accounts using the *Moderate multiple users* dropdown.
- * You can also list local users and delete them individualy using the CLI tasks for [Managing users](./CLI_tasks/user.md).
+ * You can also list local users and delete them individually using the CLI tasks for [Managing users](./CLI_tasks/user.md).
2. Stop the Pleroma service `systemctl stop pleroma`
3. Disable pleroma from systemd `systemctl disable pleroma`
4. Remove the files and folders you created during installation (see installation guide). This includes the pleroma, nginx and systemd files and folders.
diff --git a/docs/administration/frontends-management.md b/docs/administration/frontends-management.md
new file mode 100644
index 000000000..f982c4bca
--- /dev/null
+++ b/docs/administration/frontends-management.md
@@ -0,0 +1,71 @@
+# Managing installed frontends
+
+Pleroma lets you install multiple frontends including multiple versions of same frontend. Right now it's only possible to switch which frontend is the default, but in the future it would be possible for user to select which frontend they prefer to use.
+
+As of 2.6.0 there are two ways of managing frontends - through PleromaFE's Admin Dashboard (preferred, easier method) or through AdminFE (clunky but also works on versions older than 2.6.0).
+
+!!! note
+ Managing frontends through UI requires [in-database configuration](../configuration/howto_database_config.md) to be enabled (default on newer instances but might be off on older ones).
+
+## How it works
+
+When installing frontends, it creates a folder in [static directory](../configuration/static_dir.md) that follows this pattern: `/frontends/${front-end name}/${front-end version}/`, puts contents of the built frontend in there. Then when accessing the server backend checks what front-end name and version are set to be default and serves index.html and assets from appropriate path.
+
+!!! warning
+
+ If you've been putting your frontend build directly into static dir as an antiquated way of serving custom frontend, this system will not work and will still serve the custom index.html you put in there. You can still serve custom frontend builds if you put your build into `/frontends/$name/$version` instead and set the "default frontend" fields appropriately.
+
+Currently, there is no backup system, i.e. when installing `master` version it _will_ overwrite installed `master` version, for now if you want to keep previous version you should back it up manually, i.e. running `cp -r ./frontends/pleroma-fe/master ./frontends/pleroma-fe/master_old` in your static dir.
+
+## Managing front-ends through Admin Dashboard
+
+Open up Admin Dashboard (gauge icon in top bar, same as where link to AdminFE was),__
+![location of Admin Dashboard icon](../assets/admin_dash_location.png)
+switch to "Front-ends" tab.
+![screenshot of Front-ends tab](../assets/frontends_tab.png)
+This page is designed to be self-explanatory and easy to use, while avoiding issues and pitfalls of AdminFE, but it's also early in development, everything is subject to change.
+
+!!! warning
+ This goes without saying, but if you set default frontend to anything except >2.6.0 version of PleromaFE you'll lose the access to Admin Dashboard and will have to use AdminFE to get it back. See below on how to use AdminFE.
+
+### Limitations
+
+Currently the list of available for install frontends is essentially hard-coded in backend's configuration, each providing only one version, with exception for PleromaFE which overrides 'pleroma-fe' to also include `develop` version. There is no way to manually install build with a URL (coming soon) nor add more available frontends to the repository (it's broken).
+
+There is also no way to tell if there is an update available or not, for now you should watch for [announcements](https://pleroma.social/announcements/) of new PleromaFE stable releases to see if there is new stable version. For `develop` version it's up to you whether you want to follow the development process or just reinstall it periodically hoping for new stuff.
+
+## Using AdminFE to manage frontends
+
+Access AdminFE either directly by going to `/pleroma/admin` of your instance or by opening Admin Dashboard and clicking the link at the bottom of the window
+![link to open old AdminFE](../assets/old_adminfe_link.png)
+
+
+Go to Settings -> Frontend.
+
+### Installing front-ends
+
+At the very top of the page there's a list of available frontends and button to install custom front-end
+
+!!! tip
+ Remember to click "Submit" in bottom right corner to save your changes!
+
+!!! bug
+ **Available Frontends** section lets you _install_ frontends but **NOT** update/reinstall them. It's only useful for installing a frontend once.
+
+Due to aforementioned bug, preferred way of installing frontends in AdminFE is by clicking the "Install another frontend"
+![screenshot of admin-fe with instructions on how to install a frontend](../assets/way_to_install_frontends.png)
+and filling in the fields. Unfortunately AdminFE does not provide the raw data necessary for you to fill those fields, so your best bet is to see what backend returns in browser's devtools or refer to the [source code](https://git.pleroma.social/pleroma/pleroma/-/blob/develop/config/config.exs?ref_type=heads#L742-791). For the most part, only **Name**, **Ref** (i.e. version) and **Build URL** fields are required, although some frontends might also require **Build Directory** to work.
+
+For pleroma-fe you can use either `master` or `develop` refs, or potentially any ref in GitLab that has artifacts for `build` job, but that's outside scope of this document.
+
+### Selecting default frontend
+
+Scroll page waaaaay down, search for "Frontends" section, subtitled "Installed frontends management", change the name and reference of the "Primary" frontend.
+![screenshot of admin-fe with instructions on how to install a frontend](../assets/primary_frontend_section.png)
+
+
+!!! danger
+ If you change "Admin" frontend name/reference you risk losing access to AdminFE as well.
+
+!!! warning
+ Don't put anything into the "Available" section as it will break the list of available frontends completely, including the "add another frontend" button. If you accidentally put something in there, click the trashbin icon next to "Available" to reset it and restore the frontends list.
diff --git a/docs/administration/updating.md b/docs/administration/updating.md
index 01d3b9b0e..00eca36a0 100644
--- a/docs/administration/updating.md
+++ b/docs/administration/updating.md
@@ -17,7 +17,7 @@ su pleroma -s $SHELL -lc "./bin/pleroma_ctl migrate"
## For from source installations (using git)
1. Go to the working directory of Pleroma (default is `/opt/pleroma`)
-2. Run `git pull` [^1]. This pulls the latest changes from upstream.
+2. Run `git checkout ` [^1]. e.g. `git checkout v2.4.5` This pulls the [tagged release](https://git.pleroma.social/pleroma/pleroma/-/releases) from upstream.
3. Run `mix deps.get` [^1]. This pulls in any new dependencies.
4. Stop the Pleroma service.
5. Run `mix ecto.migrate` [^1] [^2]. This task performs database migrations, if there were any.
diff --git a/docs/assets/admin_dash_location.png b/docs/assets/admin_dash_location.png
new file mode 100644
index 000000000..4e1d110e7
Binary files /dev/null and b/docs/assets/admin_dash_location.png differ
diff --git a/docs/assets/frontends_tab.png b/docs/assets/frontends_tab.png
new file mode 100644
index 000000000..f7c66adab
Binary files /dev/null and b/docs/assets/frontends_tab.png differ
diff --git a/docs/assets/old_adminfe_link.png b/docs/assets/old_adminfe_link.png
new file mode 100644
index 000000000..5ea6a486c
Binary files /dev/null and b/docs/assets/old_adminfe_link.png differ
diff --git a/docs/assets/primary_frontend_section.png b/docs/assets/primary_frontend_section.png
new file mode 100644
index 000000000..14c3de41b
Binary files /dev/null and b/docs/assets/primary_frontend_section.png differ
diff --git a/docs/assets/way_to_install_frontends.png b/docs/assets/way_to_install_frontends.png
new file mode 100644
index 000000000..a90ff2b5d
Binary files /dev/null and b/docs/assets/way_to_install_frontends.png differ
diff --git a/docs/clients.md b/docs/clients.md
index 31d2d27c3..ad7eb7807 100644
--- a/docs/clients.md
+++ b/docs/clients.md
@@ -3,12 +3,6 @@ Note: Additional clients may be working but theses are officially supporting Ple
Feel free to contact us to be added to this list!
## Desktop
-### Roma for Desktop
-- Homepage:
-- Source Code:
-- Platforms: Windows, Mac, Linux
-- Features: MastoAPI, Streaming Ready
-
### Social
- Source Code:
- Contact: [@brainblasted@social.libre.fi](https://social.libre.fi/users/brainblasted)
@@ -19,7 +13,14 @@ Feel free to contact us to be added to this list!
### Whalebird
- Homepage:
- Source Code:
-- Contact: [@h3poteto@pleroma.io](https://pleroma.io/users/h3poteto)
+- Contact: [@whalebird@pleroma.io](https://pleroma.io/users/whalebird)
+- Platforms: Windows, Mac, Linux
+- Features: MastoAPI, Streaming Ready
+
+### Fedistar
+- Homepage:
+- Source Code:
+- Contact: [@fedistar@pleroma.io](https://pleroma.io/users/fedistar)
- Platforms: Windows, Mac, Linux
- Features: MastoAPI, Streaming Ready
diff --git a/docs/configuration/cheatsheet.md b/docs/configuration/cheatsheet.md
index d35b33574..ca2ce6369 100644
--- a/docs/configuration/cheatsheet.md
+++ b/docs/configuration/cheatsheet.md
@@ -18,6 +18,7 @@ To add configuration to your config file, you can copy it from the base config.
* `email`: Email used to reach an Administrator/Moderator of the instance.
* `notify_email`: Email used for notifications.
* `description`: The instance’s description, can be seen in nodeinfo and ``/api/v1/instance``.
+* `short_description`: Shorter version of instance description, can be seen on ``/api/v1/instance``.
* `limit`: Posts character limit (CW/Subject included in the counter).
* `description_limit`: The character limit for image descriptions.
* `remote_limit`: Hard character limit beyond which remote posts will be dropped.
@@ -48,6 +49,7 @@ To add configuration to your config file, you can copy it from the base config.
* `autofollowing_nicknames`: Set to nicknames of (local) users that automatically follows every newly registered user.
* `attachment_links`: Set to true to enable automatically adding attachment link text to statuses.
* `max_report_comment_size`: The maximum size of the report comment (Default: `1000`).
+* `report_strip_status`: Strip associated statuses in reports to ids when closed/resolved, otherwise keep a copy.
* `safe_dm_mentions`: If set to true, only mentions at the beginning of a post will be used to address people in direct messages. This is to prevent accidental mentioning of people when talking about them (e.g. "@friend hey i really don't like @enemy"). Default: `false`.
* `healthcheck`: If set to true, system data will be shown on ``/api/v1/pleroma/healthcheck``.
* `remote_post_retention_days`: The default amount of days to retain remote posts when pruning the database.
@@ -64,6 +66,36 @@ To add configuration to your config file, you can copy it from the base config.
* `cleanup_attachments`: Remove attachments along with statuses. Does not affect duplicate files and attachments without status. Enabling this will increase load to database when deleting statuses on larger instances.
* `show_reactions`: Let favourites and emoji reactions be viewed through the API (default: `true`).
* `password_reset_token_validity`: The time after which reset tokens aren't accepted anymore, in seconds (default: one day).
+* `admin_privileges`: A list of privileges an admin has (e.g. delete messages, manage reports...)
+ * Possible values are:
+ * `:users_read`
+ * Allows admins to fetch users through the admin API.
+ * `:users_manage_invites`
+ * Allows admins to manage invites. This includes sending, resending, revoking and approving invites.
+ * `:users_manage_activation_state`
+ * Allows admins to activate and deactivate accounts. This also allows them to see deactivated users through the Mastodon API.
+ * `:users_manage_tags`
+ * Allows admins to set and remove tags for users. This can be useful in combination with MRF policies, such as `Pleroma.Web.ActivityPub.MRF.TagPolicy`.
+ * `:users_manage_credentials`
+ * Allows admins to trigger a password reset and set new credentials for an user.
+ * `:users_delete`
+ * Allows admins to delete accounts. Note that deleting an account is actually deactivating it and removing all data like posts, profile information, etc.
+ * `:messages_read`
+ * Allows admins to read messages through the admin API, including non-public posts and chats.
+ * `:messages_delete`
+ * Allows admins to delete messages from other users.
+ * `:instances_delete,`
+ * Allows admins to remove a whole remote instance from your instance. This will delete all users and messages from that remote instance.
+ * `:reports_manage_reports`
+ * Allows admins to see and manage reports.
+ * `:moderation_log_read,`
+ * Allows admins to read the entries in the moderation log.
+ * `:emoji_manage_emoji`
+ * Allows admins to manage custom emoji on the instance.
+ * `:statistics_read,`
+ * Allows admins to see some simple statistics about the instance.
+* `moderator_privileges`: A list of privileges a moderator has (e.g. delete messages, manage reports...)
+ * Possible values are the same as for `admin_privileges`
## :database
* `improved_hashtag_timeline`: Setting to force toggle / force disable improved hashtags timeline. `:enabled` forces hashtags to be fetched from `hashtags` table for hashtags timeline. `:disabled` forces object-embedded hashtags to be used (slower). Keep it `:auto` for automatic behaviour (it is auto-set to `:enabled` [unless overridden] when HashtagsTableMigrator completes).
@@ -122,12 +154,15 @@ To add configuration to your config file, you can copy it from the base config.
* `Pleroma.Web.ActivityPub.MRF.MentionPolicy`: Drops posts mentioning configurable users. (See [`:mrf_mention`](#mrf_mention)).
* `Pleroma.Web.ActivityPub.MRF.VocabularyPolicy`: Restricts activities to a configured set of vocabulary. (See [`:mrf_vocabulary`](#mrf_vocabulary)).
* `Pleroma.Web.ActivityPub.MRF.ObjectAgePolicy`: Rejects or delists posts based on their age when received. (See [`:mrf_object_age`](#mrf_object_age)).
- * `Pleroma.Web.ActivityPub.MRF.ActivityExpirationPolicy`: Sets a default expiration on all posts made by users of the local instance. Requires `Pleroma.Workers.PurgeExpiredActivity` to be enabled for processing the scheduled delections.
+ * `Pleroma.Web.ActivityPub.MRF.ActivityExpirationPolicy`: Sets a default expiration on all posts made by users of the local instance. Requires `Pleroma.Workers.PurgeExpiredActivity` to be enabled for processing the scheduled deletions.
* `Pleroma.Web.ActivityPub.MRF.ForceBotUnlistedPolicy`: Makes all bot posts to disappear from public timelines.
* `Pleroma.Web.ActivityPub.MRF.FollowBotPolicy`: Automatically follows newly discovered users from the specified bot account. Local accounts, locked accounts, and users with "#nobot" in their bio are respected and excluded from being followed.
* `Pleroma.Web.ActivityPub.MRF.AntiFollowbotPolicy`: Drops follow requests from followbots. Users can still allow bots to follow them by first following the bot.
* `Pleroma.Web.ActivityPub.MRF.KeywordPolicy`: Rejects or removes from the federated timeline or replaces keywords. (See [`:mrf_keyword`](#mrf_keyword)).
* `Pleroma.Web.ActivityPub.MRF.ForceMentionsInContent`: Forces every mentioned user to be reflected in the post content.
+ * `Pleroma.Web.ActivityPub.MRF.InlineQuotePolicy`: Forces quote post URLs to be reflected in the message content inline.
+ * `Pleroma.Web.ActivityPub.MRF.QuoteToLinkTagPolicy`: Force a Link tag for posts quoting another post. (may break outgoing federation of quote posts with older Pleroma versions).
+ * `Pleroma.Web.ActivityPub.MRF.ForceMention`: Forces posts to include a mention of the author of parent post or the author of quoted post.
* `transparency`: Make the content of your Message Rewrite Facility settings public (via nodeinfo).
* `transparency_exclusions`: Exclude specific instance names from MRF transparency. The use of the exclusions feature will be disclosed in nodeinfo as a boolean value.
@@ -203,7 +238,7 @@ config :pleroma, :mrf_user_allowlist, %{
e.g., A value of 900 results in any post with a timestamp older than 15 minutes will be acted upon.
* `actions`: A list of actions to apply to the post:
* `:delist` removes the post from public timelines
- * `:strip_followers` removes followers from the ActivityPub recipient list, ensuring they won't be delivered to home timelines
+ * `:strip_followers` removes followers from the ActivityPub recipient list, ensuring they won't be delivered to home timelines, additionally for followers-only it degrades to a direct message
* `:reject` rejects the message entirely
#### :mrf_steal_emoji
@@ -229,6 +264,18 @@ Notes:
* `follower_nickname`: The name of the bot account to use for following newly discovered users. Using `followbot` or similar is strongly suggested.
+#### :mrf_emoji
+* `remove_url`: A list of patterns which result in emoji whose URL matches being removed from the message. This will apply to statuses, emoji reactions, and user profiles. Each pattern can be a string or a [regular expression](https://hexdocs.pm/elixir/Regex.html).
+* `remove_shortcode`: A list of patterns which result in emoji whose shortcode matches being removed from the message. This will apply to statuses, emoji reactions, and user profiles. Each pattern can be a string or a [regular expression](https://hexdocs.pm/elixir/Regex.html).
+* `federated_timeline_removal_url`: A list of patterns which result in message with emojis whose URLs match being removed from federated timelines (a.k.a unlisted). This will apply only to statuses. Each pattern can be a string or a [regular expression](https://hexdocs.pm/elixir/Regex.html).
+* `federated_timeline_removal_shortcode`: A list of patterns which result in message with emojis whose shortcodes match being removed from federated timelines (a.k.a unlisted). This will apply only to statuses. Each pattern can be a string or a [regular expression](https://hexdocs.pm/elixir/Regex.html).
+
+#### :mrf_inline_quote
+* `template`: The template to append to the post. `{url}` will be replaced with the actual link to the quoted post. Default: `RT: {url}`
+
+#### :mrf_force_mention
+* `mention_parent`: Whether to append mention of parent post author
+* `mention_quoted`: Whether to append mention of parent quoted author
### :activitypub
* `unfollow_blocked`: Whether blocks result in people getting unfollowed
@@ -464,7 +511,7 @@ config :pleroma, :rate_limit,
Means that:
1. In 60 seconds, 15 authentication attempts can be performed from the same IP address.
-2. In 1 second, 10 search requests can be performed from the same IP adress by unauthenticated users, while authenticated users can perform 30 search requests per second.
+2. In 1 second, 10 search requests can be performed from the same IP address by unauthenticated users, while authenticated users can perform 30 search requests per second.
Supported rate limiters:
@@ -652,6 +699,12 @@ This filter reads the ImageDescription and iptc:Caption-Abstract fields with Exi
No specific configuration.
+#### Pleroma.Upload.Filter.OnlyMedia
+
+This filter rejects uploads that are not identified with Content-Type matching audio/\*, image/\*, or video/\*
+
+No specific configuration.
+
#### Pleroma.Upload.Filter.Mogrify
* `args`: List of actions for the `mogrify` command like `"strip"` or `["strip", "auto-orient", {"implode", "1"}]`.
@@ -789,7 +842,7 @@ Web Push Notifications configuration. You can use the mix task `mix web_push.gen
* ``private_key``: VAPID private key
## :logger
-* `backends`: `:console` is used to send logs to stdout, `{ExSyslogger, :ex_syslogger}` to log to syslog, and `Quack.Logger` to log to Slack
+* `backends`: `:console` is used to send logs to stdout, `{ExSyslogger, :ex_syslogger}` to log to syslog
An example to enable ONLY ExSyslogger (f/ex in ``prod.secret.exs``) with info and debug suppressed:
```elixir
@@ -812,10 +865,10 @@ config :logger, :ex_syslogger,
See: [logger’s documentation](https://hexdocs.pm/logger/Logger.html) and [ex_syslogger’s documentation](https://hexdocs.pm/ex_syslogger/)
-An example of logging info to local syslog, but warn to a Slack channel:
+An example of logging info to local syslog, but debug to console:
```elixir
config :logger,
- backends: [ {ExSyslogger, :ex_syslogger}, Quack.Logger ],
+ backends: [ {ExSyslogger, :ex_syslogger}, :console ],
level: :info
config :logger, :ex_syslogger,
@@ -823,14 +876,12 @@ config :logger, :ex_syslogger,
ident: "pleroma",
format: "$metadata[$level] $message"
-config :quack,
- level: :warn,
- meta: [:all],
- webhook_url: "https://hooks.slack.com/services/YOUR-API-KEY-HERE"
+config :logger, :console,
+ level: :debug,
+ format: "\n$time $metadata[$level] $message\n",
+ metadata: [:request_id]
```
-See the [Quack Github](https://github.com/azohra/quack) for more details
-
## Database options
@@ -856,21 +907,8 @@ This will probably take a long time.
### BBS / SSH access
-To enable simple command line interface accessible over ssh, add a setting like this to your configuration file:
-
-```exs
-app_dir = File.cwd!
-priv_dir = Path.join([app_dir, "priv/ssh_keys"])
-
-config :esshd,
- enabled: true,
- priv_dir: priv_dir,
- handler: "Pleroma.BBS.Handler",
- port: 10_022,
- password_authenticator: "Pleroma.BBS.Authenticator"
-```
-
-Feel free to adjust the priv_dir and port number. Then you will have to create the key for the keys (in the example `priv/ssh_keys`) and create the host keys with `ssh-keygen -m PEM -N "" -b 2048 -t rsa -f ssh_host_rsa_key`. After restarting, you should be able to connect to your Pleroma instance with `ssh username@server -p $PORT`
+This feature has been removed from Pleroma core.
+However, a client has been made and is available at https://git.pleroma.social/Duponin/sshocial.
### :gopher
* `enabled`: Enables the gopher interface
@@ -1061,7 +1099,7 @@ config :pleroma, Pleroma.Formatter,
## :configurable_from_database
-Boolean, enables/disables in-database configuration. Read [Transfering the config to/from the database](../administration/CLI_tasks/config.md) for more information.
+Boolean, enables/disables in-database configuration. Read [Transferring the config to/from the database](../administration/CLI_tasks/config.md) for more information.
## :database_config_whitelist
@@ -1122,7 +1160,7 @@ Control favicons for instances.
!!! note
Requires enabled email
-* `:purge_after_days` an integer, remove backup achives after N days.
+* `:purge_after_days` an integer, remove backup achieves after N days.
* `:limit_days` an integer, limit user to export not more often than once per N days.
* `:dir` a string with a path to backup temporary directory or `nil` to let Pleroma choose temporary directory in the following order:
1. the directory named by the TMPDIR environment variable
diff --git a/docs/configuration/custom_emoji.md b/docs/configuration/custom_emoji.md
index 1648840fd..19250cf80 100644
--- a/docs/configuration/custom_emoji.md
+++ b/docs/configuration/custom_emoji.md
@@ -29,7 +29,7 @@ foo, /emoji/custom/foo.png
The files should be PNG (APNG is okay with `.png` for `image/png` Content-type) and under 50kb for compatibility with mastodon.
-Default file extentions and locations for emojis are set in `config.exs`. To use different locations or file-extentions, add the `shortcode_globs` to your secrets file (`prod.secret.exs` or `dev.secret.exs`) and edit it. Note that not all fediverse-software will show emojis with other file extentions:
+Default file extensions and locations for emojis are set in `config.exs`. To use different locations or file-extensions, add the `shortcode_globs` to your secrets file (`prod.secret.exs` or `dev.secret.exs`) and edit it. Note that not all fediverse-software will show emojis with other file extensions:
```elixir
config :pleroma, :emoji, shortcode_globs: ["/emoji/custom/**/*.png", "/emoji/custom/**/*.gif"]
```
diff --git a/docs/configuration/hardening.md b/docs/configuration/hardening.md
index d3bfc4e4a..cc46d1ff9 100644
--- a/docs/configuration/hardening.md
+++ b/docs/configuration/hardening.md
@@ -62,6 +62,20 @@ An additional “Expect-CT” header will be sent with the configured `ct_max_ag
If you click on a link, your browser’s request to the other site will include from where it is coming from. The “Referrer policy” header tells the browser how and if it should send this information. (see [Referrer policy](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy))
+### Uploaded media and media proxy
+
+It is STRONGLY RECOMMENDED to serve both the locally-uploaded media and the media proxy from another domain than the domain that Pleroma runs on, if applicable.
+
+```elixir
+config :pleroma, :media_proxy,
+ base_url: "https://some.other.domain"
+
+config :pleroma, Pleroma.Upload,
+ base_url: "https://some.other.domain/media"
+```
+
+See `installation/pleroma-mediaproxy.nginx` for examples on how to configure your media proxy.
+
## systemd
A systemd unit example is provided at `installation/pleroma.service`.
diff --git a/docs/configuration/how_to_serve_another_domain_for_webfinger.md b/docs/configuration/how_to_serve_another_domain_for_webfinger.md
new file mode 100644
index 000000000..5ae3e7943
--- /dev/null
+++ b/docs/configuration/how_to_serve_another_domain_for_webfinger.md
@@ -0,0 +1,62 @@
+# How to use a different domain name for Pleroma and the users it serves
+
+Pleroma users are primarily identified by a `user@example.org` handle, and you might want this identifier to be the same as your email or jabber account, for instance.
+However, in this case, you are almost certainly serving some web content on `https://example.org` already, and you might want to use another domain (say `pleroma.example.org`) for Pleroma itself.
+
+Pleroma supports that, but it might be tricky to set up, and any error might prevent you from federating with other instances.
+
+*If you are already running Pleroma on `example.org`, it is no longer possible to move it to `pleroma.example.org`.*
+
+## Account identifiers
+
+It is important to understand that for federation purposes, a user in Pleroma has two unique identifiers associated:
+
+- A webfinger `acct:` URI, used for discovery and as a verifiable global name for the user across Pleroma instances. In our example, our account's acct: URI is `acct:user@example.org`
+- An author/actor URI, used in every other aspect of federation. This is the way in which users are identified in ActivityPub, the underlying protocol used for federation with other Pleroma instances.
+In our case, it is `https://pleroma.example.org/users/user`.
+
+Both account identifiers are unique and required for Pleroma. An important risk if you set up your Pleroma instance incorrectly is to create two users (with different acct: URIs) with conflicting author/actor URIs.
+
+## WebFinger
+
+As said earlier, each Pleroma user has an `acct`: URI, which is used for discovery and authentication. When you add @user@example.org, a webfinger query is performed. This is done in two steps:
+
+1. Querying `https://example.org/.well-known/host-meta` (where the domain of the URL matches the domain part of the `acct`: URI) to get information on how to perform the query.
+This file will indeed contain a URL template of the form `https://example.org/.well-known/webfinger?resource={uri}` that will be used in the second step.
+2. Fill the returned template with the `acct`: URI to be queried and perform the query: `https://example.org/.well-known/webfinger?resource=acct:user@example.org`
+
+## Configuring your Pleroma instance
+
+**_DO NOT ATTEMPT TO CONFIGURE YOUR INSTANCE THIS WAY IF YOU DID NOT UNDERSTAND THE ABOVE_**
+
+### Configuring Pleroma
+
+Pleroma has a two configuration settings to enable using different domains for your users and Pleroma itself. `host` in `Pleroma.Web.Endpoint` and `domain` in `Pleroma.Web.WebFinger`. When the latter is not set, it defaults to the value of `host`.
+
+*Be extra careful when configuring your Pleroma instance, as changing `host` may cause remote instances to register different accounts with the same author/actor URI, which will result in federation issues!*
+
+```elixir
+config :pleroma, Pleroma.Web.Endpoint,
+ url: [host: "pleroma.example.org"]
+
+config :pleroma, Pleroma.Web.WebFinger, domain: "example.org"
+```
+
+- `domain` - is the domain for which your Pleroma instance has authority, it's the domain used in `acct:` URI. In our example, `domain` would be set to `example.org`. This is used in WebFinger account ids, which are the canonical account identifier in some other fediverse software like Mastodon. **If you change `domain`, the accounts on your server will be shown as different accounts in those software**.
+- `host` - is the domain used for any URL generated for your instance, including the author/actor URL's. In our case, that would be `pleroma.example.org`. This is used in AP ids, which are the canonical account identifier in Pleroma and some other fediverse software. **You should not change this after you have set up the instance**.
+
+### Configuring WebFinger domain
+
+Now, you have Pleroma running at `https://pleroma.example.org` as well as a website at `https://example.org`. If you recall how webfinger queries work, the first step is to query `https://example.org/.well-known/host-meta`, which will contain an URL template.
+
+Therefore, the easiest way to configure `example.org` is to redirect `/.well-known/host-meta` to `pleroma.example.org`.
+
+With nginx, it would be as simple as adding:
+
+```nginx
+location = /.well-known/host-meta {
+ return 301 https://pleroma.example.org$request_uri;
+}
+```
+
+in example.org's server block.
diff --git a/docs/configuration/howto_database_config.md b/docs/configuration/howto_database_config.md
index ae1462f9b..e5af9097a 100644
--- a/docs/configuration/howto_database_config.md
+++ b/docs/configuration/howto_database_config.md
@@ -59,7 +59,7 @@ The configuration of Pleroma has traditionally been managed with a config file,
Here is an example of a server config stripped down after migration:
```
- use Mix.Config
+ import Config
config :pleroma, Pleroma.Web.Endpoint,
url: [host: "cool.pleroma.site", scheme: "https", port: 443]
diff --git a/docs/configuration/i2p.md b/docs/configuration/i2p.md
index 8c5207d67..17dd9b0cb 100644
--- a/docs/configuration/i2p.md
+++ b/docs/configuration/i2p.md
@@ -1,4 +1,4 @@
-# I2P Federation and Accessability
+# I2P Federation and Accessibility
This guide is going to focus on the Pleroma federation aspect. The actual installation is neatly explained in the official documentation, and more likely to remain up-to-date.
It might be added to this guide if there will be a need for that.
diff --git a/docs/configuration/onion_federation.md b/docs/configuration/onion_federation.md
index 37673211a..8a8137251 100644
--- a/docs/configuration/onion_federation.md
+++ b/docs/configuration/onion_federation.md
@@ -29,7 +29,7 @@ HiddenServiceDir /var/lib/tor/pleroma_hidden_service/
HiddenServicePort 80 127.0.0.1:8099
HiddenServiceVersion 3 # Remove if Tor version is below 0.3 ( tor --version )
```
-Restart Tor to generate an adress:
+Restart Tor to generate an address:
```
systemctl restart tor@default.service
```
diff --git a/docs/configuration/optimizing_beam.md b/docs/configuration/optimizing_beam.md
index e336bd36c..5e81cd003 100644
--- a/docs/configuration/optimizing_beam.md
+++ b/docs/configuration/optimizing_beam.md
@@ -1,6 +1,6 @@
# Optimizing the BEAM
-Pleroma is built upon the Erlang/OTP VM known as BEAM. The BEAM VM is highly optimized for latency, but this has drawbacks in environments without dedicated hardware. One of the tricks used by the BEAM VM is [busy waiting](https://en.wikipedia.org/wiki/Busy_waiting). This allows the application to pretend to be busy working so the OS kernel does not pause the application process and switch to another process waiting for the CPU to execute its workload. It does this by spinning for a period of time which inflates the apparent CPU usage of the application so it is immediately ready to execute another task. This can be observed with utilities like **top(1)** which will show consistently high CPU usage for the process. Switching between procesess is a rather expensive operation and also clears CPU caches further affecting latency and performance. The goal of busy waiting is to avoid this penalty.
+Pleroma is built upon the Erlang/OTP VM known as BEAM. The BEAM VM is highly optimized for latency, but this has drawbacks in environments without dedicated hardware. One of the tricks used by the BEAM VM is [busy waiting](https://en.wikipedia.org/wiki/Busy_waiting). This allows the application to pretend to be busy working so the OS kernel does not pause the application process and switch to another process waiting for the CPU to execute its workload. It does this by spinning for a period of time which inflates the apparent CPU usage of the application so it is immediately ready to execute another task. This can be observed with utilities like **top(1)** which will show consistently high CPU usage for the process. Switching between processes is a rather expensive operation and also clears CPU caches further affecting latency and performance. The goal of busy waiting is to avoid this penalty.
This strategy is very successful in making a performant and responsive application, but is not desirable on Virtual Machines or hardware with few CPU cores. Pleroma instances are often deployed on the same server as the required PostgreSQL database which can lead to situations where the Pleroma application is holding the CPU in a busy-wait loop and as a result the database cannot process requests in a timely manner. The fewer CPUs available, the more this problem is exacerbated. The latency is further amplified by the OS being installed on a Virtual Machine as the Hypervisor uses CPU time-slicing to pause the entire OS and switch between other tasks.
diff --git a/docs/configuration/postgresql.md b/docs/configuration/postgresql.md
index e251eb83b..56f1c60dc 100644
--- a/docs/configuration/postgresql.md
+++ b/docs/configuration/postgresql.md
@@ -22,7 +22,7 @@ config :pleroma, Pleroma.Repo,
]
```
-A more detailed explaination of the issue can be found at .
+A more detailed explanation of the issue can be found at .
## Example configurations
diff --git a/docs/configuration/search.md b/docs/configuration/search.md
new file mode 100644
index 000000000..0316c9bf4
--- /dev/null
+++ b/docs/configuration/search.md
@@ -0,0 +1,123 @@
+# Configuring search
+
+{! backend/administration/CLI_tasks/general_cli_task_info.include !}
+
+## Built-in search
+
+To use built-in search that has no external dependencies, set the search module to `Pleroma.Activity`:
+
+> config :pleroma, Pleroma.Search, module: Pleroma.Search.DatabaseSearch
+
+While it has no external dependencies, it has problems with performance and relevancy.
+
+## Meilisearch
+
+Note that it's quite a bit more memory hungry than PostgreSQL (around 4-5G for ~1.2 million
+posts while idle and up to 7G while indexing initially). The disk usage for this additional index is also
+around 4 gigabytes. Like [RUM](./cheatsheet.md#rum-indexing-for-full-text-search) indexes, it offers considerably
+higher performance and ordering by timestamp in a reasonable amount of time.
+Additionally, the search results seem to be more accurate.
+
+Due to high memory usage, it may be best to set it up on a different machine, if running pleroma on a low-resource
+computer, and use private key authentication to secure the remote search instance.
+
+To use [meilisearch](https://www.meilisearch.com/), set the search module to `Pleroma.Search.Meilisearch`:
+
+> config :pleroma, Pleroma.Search, module: Pleroma.Search.Meilisearch
+
+You then need to set the address of the meilisearch instance, and optionally the private key for authentication. You might
+also want to change the `initial_indexing_chunk_size` to be smaller if you're server is not very powerful, but not higher than `100_000`,
+because meilisearch will refuse to process it if it's too big. However, in general you want this to be as big as possible, because meilisearch
+indexes faster when it can process many posts in a single batch.
+
+> config :pleroma, Pleroma.Search.Meilisearch,
+> url: "http://127.0.0.1:7700/",
+> private_key: "private key",
+> initial_indexing_chunk_size: 100_000
+
+Information about setting up meilisearch can be found in the
+[official documentation](https://docs.meilisearch.com/learn/getting_started/installation.html).
+You probably want to start it with `MEILI_NO_ANALYTICS=true` environment variable to disable analytics.
+At least version 0.25.0 is required, but you are strongly advised to use at least 0.26.0, as it introduces
+the `--enable-auto-batching` option which drastically improves performance. Without this option, the search
+is hardly usable on a somewhat big instance.
+
+### Private key authentication (optional)
+
+To set the private key, use the `MEILI_MASTER_KEY` environment variable when starting. After setting the _master key_,
+you have to get the _private key_, which is actually used for authentication.
+
+=== "OTP"
+ ```sh
+ ./bin/pleroma_ctl search.meilisearch show-keys
+ ```
+
+=== "From Source"
+ ```sh
+ mix pleroma.search.meilisearch show-keys
+ ```
+
+You will see a "Default Admin API Key", this is the key you actually put into your configuration file.
+
+### Initial indexing
+
+After setting up the configuration, you'll want to index all of your already existing posts. Only public posts are indexed. You'll only
+have to do it one time, but it might take a while, depending on the amount of posts your instance has seen. This is also a fairly RAM
+consuming process for `meilisearch`, and it will take a lot of RAM when running if you have a lot of posts (seems to be around 5G for ~1.2
+million posts while idle and up to 7G while indexing initially, but your experience may be different).
+
+The sequence of actions is as follows:
+
+1. First, change the configuration to use `Pleroma.Search.Meilisearch` as the search backend
+2. Restart your instance, at this point it can be used while the search indexing is running, though search won't return anything
+3. Start the initial indexing process (as described below with `index`),
+ and wait until the task says it sent everything from the database to index
+4. Wait until everything is actually indexed (by checking with `stats` as described below),
+ at this point you don't have to do anything, just wait a while.
+
+To start the initial indexing, run the `index` command:
+
+=== "OTP"
+ ```sh
+ ./bin/pleroma_ctl search.meilisearch index
+ ```
+
+=== "From Source"
+ ```sh
+ mix pleroma.search.meilisearch index
+ ```
+
+This will show you the total amount of posts to index, and then show you the amount of posts indexed currently, until the numbers eventually
+become the same. The posts are indexed in big batches and meilisearch will take some time to actually index them, even after you have
+inserted all the posts into it. Depending on the amount of posts, this may be as long as several hours. To get information about the status
+of indexing and how many posts have actually been indexed, use the `stats` command:
+
+=== "OTP"
+ ```sh
+ ./bin/pleroma_ctl search.meilisearch stats
+ ```
+
+=== "From Source"
+ ```sh
+ mix pleroma.search.meilisearch stats
+ ```
+
+### Clearing the index
+
+In case you need to clear the index (for example, to re-index from scratch, if that needs to happen for some reason), you can
+use the `clear` command:
+
+=== "OTP"
+ ```sh
+ ./bin/pleroma_ctl search.meilisearch clear
+ ```
+
+=== "From Source"
+ ```sh
+ mix pleroma.search.meilisearch clear
+ ```
+
+This will clear **all** the posts from the search index. Note, that deleted posts are also removed from index by the instance itself, so
+there is no need to actually clear the whole index, unless you want **all** of it gone. That said, the index does not hold any information
+that cannot be re-created from the database, it should also generally be a lot smaller than the size of your database. Still, the size
+depends on the amount of text in posts.
diff --git a/docs/development/API/admin_api.md b/docs/development/API/admin_api.md
index c46f83839..5b373b8e1 100644
--- a/docs/development/API/admin_api.md
+++ b/docs/development/API/admin_api.md
@@ -303,7 +303,7 @@ Removes the user(s) from follower recommendations.
## `GET /api/v1/pleroma/admin/users/:nickname_or_id`
-### Retrive the details of a user
+### Retrieve the details of a user
- Params:
- `nickname` or `id`
@@ -313,7 +313,7 @@ Removes the user(s) from follower recommendations.
## `GET /api/v1/pleroma/admin/users/:nickname_or_id/statuses`
-### Retrive user's latest statuses
+### Retrieve user's latest statuses
- Params:
- `nickname` or `id`
@@ -337,7 +337,7 @@ Removes the user(s) from follower recommendations.
## `GET /api/v1/pleroma/admin/instances/:instance/statuses`
-### Retrive instance's latest statuses
+### Retrieve instance's latest statuses
- Params:
- `instance`: instance name
@@ -377,7 +377,7 @@ It may take some time.
## `GET /api/v1/pleroma/admin/statuses`
-### Retrives all latest statuses
+### Retrieves all latest statuses
- Params:
- *optional* `page_size`: number of statuses to return (default is `20`)
@@ -541,7 +541,7 @@ Response:
## `PATCH /api/v1/pleroma/admin/users/force_password_reset`
-### Force passord reset for a user with a given nickname
+### Force password reset for a user with a given nickname
- Params:
- `nicknames`
@@ -1064,7 +1064,6 @@ List of settings which support only full update by key:
```elixir
@full_key_update [
{:pleroma, :ecto_repos},
- {:quack, :meta},
{:mime, :types},
{:cors_plug, [:max_age, :methods, :expose, :headers]},
{:auto_linker, :opts},
@@ -1084,18 +1083,18 @@ List of settings which support only full update by subkey:
]
```
-*Settings without explicit key must be sended in separate config object params.*
+*Settings without explicit key must be sent in separate config object params.*
```elixir
-config :quack,
- level: :debug,
- meta: [:all],
+config :foo,
+ bar: :baz,
+ meta: [:data],
...
```
```json
{
"configs": [
- {"group": ":quack", "key": ":level", "value": ":debug"},
- {"group": ":quack", "key": ":meta", "value": [":all"]},
+ {"group": ":foo", "key": ":bar", "value": ":baz"},
+ {"group": ":foo", "key": ":meta", "value": [":data"]},
...
]
}
@@ -1586,6 +1585,7 @@ Returns the content of the document
"build_url": "https://git.pleroma.social/pleroma/fedi-fe/-/jobs/artifacts/${ref}/download?job=build",
"git": "https://git.pleroma.social/pleroma/fedi-fe",
"installed": true,
+ "installed_refs": ["master"],
"name": "fedi-fe",
"ref": "master"
},
@@ -1593,6 +1593,7 @@ Returns the content of the document
"build_url": "https://git.pleroma.social/lambadalambda/kenoma/-/jobs/artifacts/${ref}/download?job=build",
"git": "https://git.pleroma.social/lambadalambda/kenoma",
"installed": false,
+ "installed_refs": [],
"name": "kenoma",
"ref": "master"
}
@@ -1750,3 +1751,53 @@ Note that this differs from the Mastodon API variant: Mastodon API only returns
```json
{}
```
+
+
+## `GET /api/v1/pleroma/admin/rules`
+
+### List rules
+
+- Response: JSON, list of rules
+
+```json
+[
+ {
+ "id": "1",
+ "priority": 1,
+ "text": "There are no rules",
+ "hint": null
+ }
+]
+```
+
+## `POST /api/v1/pleroma/admin/rules`
+
+### Create a rule
+
+- Params:
+ - `text`: string, required, rule content
+ - `hint`: string, optional, rule description
+ - `priority`: integer, optional, rule ordering priority
+
+- Response: JSON, a single rule
+
+## `PATCH /api/v1/pleroma/admin/rules/:id`
+
+### Update a rule
+
+- Params:
+ - `text`: string, optional, rule content
+ - `hint`: string, optional, rule description
+ - `priority`: integer, optional, rule ordering priority
+
+- Response: JSON, a single rule
+
+## `DELETE /api/v1/pleroma/admin/rules/:id`
+
+### Delete a rule
+
+- Response: JSON, empty object
+
+```json
+{}
+```
diff --git a/docs/development/API/differences_in_mastoapi_responses.md b/docs/development/API/differences_in_mastoapi_responses.md
index 73c46fff8..e3b6a3c77 100644
--- a/docs/development/API/differences_in_mastoapi_responses.md
+++ b/docs/development/API/differences_in_mastoapi_responses.md
@@ -1,6 +1,6 @@
# Differences in Mastodon API responses from vanilla Mastodon
-A Pleroma instance can be identified by " (compatible; Pleroma )" present in `version` field in response from `/api/v1/instance`
+A Pleroma instance can be identified by " (compatible; Pleroma )" present in `version` field in response from `/api/v1/instance` and `/api/v2/instance`
## Flake IDs
@@ -39,6 +39,13 @@ Has these additional fields under the `pleroma` object:
- `emoji_reactions`: A list with emoji / reaction maps. The format is `{name: "☕", count: 1, me: true}`. Contains no information about the reacting users, for that use the `/statuses/:id/reactions` endpoint.
- `parent_visible`: If the parent of this post is visible to the user or not.
- `pinned_at`: a datetime (iso8601) when status was pinned, `null` otherwise.
+- `quotes_count`: the count of status quotes.
+- `non_anonymous`: true if the source post specifies the poll results are not anonymous. Currently only implemented by Smithereen.
+- `bookmark_folder`: the ID of the folder bookmark is stored within (if any).
+
+The `GET /api/v1/statuses/:id/source` endpoint additionally has the following attributes:
+
+- `content_type`: The content type of the status source.
## Scheduled statuses
@@ -60,6 +67,12 @@ Some apps operate under the assumption that no more than 4 attachments can be re
Pleroma does not process remote images and therefore cannot include fields such as `meta` and `blurhash`. It does not support focal points or aspect ratios. The frontend is expected to handle it.
+## Bookmarks
+
+The `GET /api/v1/bookmarks` endpoint accepts optional parameter `folder_id` for bookmark folder ID.
+
+The `POST /api/v1/statuses/:id/bookmark` endpoint accepts optional parameter `folder_id` for bookmark folder ID.
+
## Accounts
The `id` parameter can also be the `nickname` of the user. This only works in these endpoints, not the deeper nested ones for following etc.
@@ -300,19 +313,27 @@ Has these additional parameters (which are the same as in Pleroma-API):
`GET /api/v1/instance` has additional fields
- `max_toot_chars`: The maximum characters per post
+- `max_media_attachments`: Maximum number of post media attachments
- `chat_limit`: The maximum characters per chat message
- `description_limit`: The maximum characters per image description
- `poll_limits`: The limits of polls
+- `shout_limit`: The maximum characters per Shoutbox message
- `upload_limit`: The maximum upload file size
- `avatar_upload_limit`: The same for avatars
- `background_upload_limit`: The same for backgrounds
- `banner_upload_limit`: The same for banners
- `background_image`: A background image that frontends can use
+- `pleroma.metadata.account_activation_required`: Whether users are required to confirm their emails before signing in
+- `pleroma.metadata.birthday_required`: Whether users are required to provide their birth day when signing in
+- `pleroma.metadata.birthday_min_age`: The minimum user age (in days)
- `pleroma.metadata.features`: A list of supported features
- `pleroma.metadata.federation`: The federation restrictions of this instance
- `pleroma.metadata.fields_limits`: A list of values detailing the length and count limitation for various instance-configurable fields.
- `pleroma.metadata.post_formats`: A list of the allowed post format types
-- `vapid_public_key`: The public key needed for push messages
+- `pleroma.stats.mau`: Monthly active user count
+- `pleroma.vapid_public_key`: The public key needed for push messages
+
+In, `GET /api/v2/instance` Pleroma-specific fields are all moved into `pleroma` object. `max_toot_chars`, `poll_limits` and `upload_limit` are replaced with their MastoAPI counterparts.
## Push Subscription
@@ -353,6 +374,122 @@ The message payload consist of:
- `follower_count`: follower count
- `following_count`: following count
+### Authenticating via `sec-websocket-protocol` header
+
+Pleroma allows to authenticate via the `sec-websocket-protocol` header, for example, if your access token is `your-access-token`, you can authenticate using the following:
+
+```
+sec-websocket-protocol: your-access-token
+```
+
+### Authenticating after connection via `pleroma:authenticate` event
+
+Pleroma allows to authenticate after connection is established, via the `pleroma:authenticate` event. For example, if your access token is `your-access-token`, you can send the following after the connection is established:
+
+```
+{"type": "pleroma:authenticate", "token": "your-access-token"}
+```
+
+### Response to client-sent events
+
+Pleroma will respond to client-sent events that it recognizes. Supported event types are:
+
+- `subscribe`
+- `unsubscribe`
+- `pleroma:authenticate`
+
+The reply will be in the following format:
+
+```
+{
+ "event": "pleroma:respond",
+ "payload": "{\"type\": \"\", \"result\": \"\", \"error\": \"\"}"
+}
+```
+
+Result of the action can be either `success`, `ignored` or `error`. If it is `error`, the `error` property will contain the error code. Otherwise, the `error` property will not be present. Below are some examples:
+
+```
+{
+ "event": "pleroma:respond",
+ "payload": "{\"type\": \"pleroma:authenticate\", \"result\": \"success\"}"
+}
+
+{
+ "event": "pleroma:respond",
+ "payload": "{\"type\": \"subscribe\", \"result\": \"ignored\"}"
+}
+
+{
+ "event": "pleroma:respond",
+ "payload": "{\"type\": \"unsubscribe\", \"result\": \"error\", \"error\": \"bad_topic\"}"
+}
+```
+
+If the sent event is not of a type that Pleroma supports, it will not reply.
+
+### The `stream` attribute of a server-sent event
+
+Technically, this is in Mastodon, but its documentation does nothing to specify its format.
+
+This attribute appears on every event type except `pleroma:respond` and `delete`. It helps clients determine where they should display the new statuses.
+
+The value of the attribute is an array containing one or two elements. The first element is the type of the stream. The second is the identifier related to that specific stream, if applicable.
+
+For the following stream types, there is a second element in the array:
+
+- `list`: The second element is the id of the list, as a string.
+- `hashtag`: The second element is the name of the hashtag.
+- `public:remote:media` and `public:remote`: The second element is the domain of the corresponding instance.
+
+For all other stream types, there is no second element.
+
+Some examples of valid `stream` values:
+
+- `["list", "1"]`: List of id 1.
+- `["hashtag", "mew"]`: The hashtag #mew.
+- `["user:notifications"]`: Notifications for the current user.
+- `["user"]`: Home timeline.
+- `["public:remote", "mew.moe"]`: Public posts from the instance mew.moe .
+
+### The unified streaming endpoint
+
+If you do not specify a stream to connect to when requesting `/api/v1/streaming`, you will enter a connection that subscribes to no streams. After the connection is established, you can authenticate and then subscribe to different streams.
+
+### List of supported streams
+
+Below is a list of supported streams by Pleroma. To make a single-stream WebSocket connection, append the string specified in "Query style" to the streaming endpoint url.
+To subscribe to a stream after the connection is established, merge the JSON object specified in "Subscribe style" with `{"type": "subscribe"}`. To unsubscribe, merge it with `{"type": "unsubscribe"}`.
+
+For example, to receive updates on the list 1, you can connect to `/api/v1/streaming/?stream=list&list=1`, or send
+
+```
+{"type": "subscribe", "stream": "list", "list": "1"}
+```
+
+upon establishing the websocket connection.
+
+To unsubscribe to list 1, send
+
+```
+{"type": "unsubscribe", "stream": "list", "list": "1"}
+```
+
+Note that if you specify a stream that requires a logged-in user in the query string (for example, `user` or `list`), you have to specify the access token when you are trying to establish the connection, i.e. in the query string or via the `sec-websocket-protocol` header.
+
+- `list`
+ - Query style: `?stream=list&list=`
+ - Subscribe style: `{"stream": "list", "list": ""}`
+- `public`, `public:local`, `public:media`, `public:local:media`, `user`, `user:pleroma_chat`, `user:notifications`, `direct`
+ - Query style: `?stream=`
+ - Subscribe style: `{"stream": ""}`
+- `hashtag`
+ - Query style: `?stream=hashtag&tag=`
+ - Subscribe style: `{"stream": "hashtag", "tag": ""}`
+- `public:remote`, `public:remote:media`
+ - Query style: `?stream=&instance=`
+ - Subscribe style: `{"stream": "", "instance": ""}`
+
## User muting and thread muting
Both user muting and thread muting can be done for only a certain time by adding an `expires_in` parameter to the API calls and giving the expiration time in seconds.
diff --git a/docs/development/API/pleroma_api.md b/docs/development/API/pleroma_api.md
index 0d15384b9..57d333ffe 100644
--- a/docs/development/API/pleroma_api.md
+++ b/docs/development/API/pleroma_api.md
@@ -129,7 +129,7 @@ The `/api/v1/pleroma/*` path is backwards compatible with `/api/pleroma/*` (`/ap
* method: `GET`
* Authentication: required
* OAuth scope: `write:security`
-* Response: JSON. Returns `{"codes": codes}`when successful, otherwise HTTP 422 `{"error": "[error message]"}`
+* Response: JSON. Returns `{"codes": codes}` when successful, otherwise HTTP 422 `{"error": "[error message]"}`
## `/api/v1/pleroma/admin/`
See [Admin-API](admin_api.md)
@@ -251,6 +251,15 @@ See [Admin-API](admin_api.md)
]
```
+
+## `/api/v1/pleroma/accounts/:id/endorsements`
+### Returns users endorsed by a user
+* Method `GET`
+* Authentication: not required
+* Params:
+ * `id`: the id of the account for whom to return results
+* Response: JSON, returns a list of Mastodon Account entities
+
## `/api/v1/pleroma/accounts/update_*`
### Set and clear account avatar, banner, and background
@@ -266,6 +275,58 @@ See [Admin-API](admin_api.md)
* Authentication: not required
* Response: 204 No Content
+## `/api/v1/pleroma/statuses/:id/quotes`
+### Gets quotes for a given status
+* Method `GET`
+* Authentication: not required
+* Params:
+ * `id`: the id of the status
+* Response: JSON, returns a list of Mastodon Status entities
+
+## `GET /api/v1/pleroma/bookmark_folders`
+### Gets user bookmark folders
+* Authentication: required
+
+* Response: JSON. Returns a list of bookmark folders.
+* Example response:
+```json
+[
+ {
+ "id": "9umDrYheeY451cQnEe",
+ "name": "Read later",
+ "emoji": "🕓",
+ "emoji_url": null
+ }
+]
+```
+
+## `POST /api/v1/pleroma/bookmark_folders`
+### Creates a bookmark folder
+* Authentication: required
+
+* Params:
+ * `name`: folder name
+ * `emoji`: folder emoji (optional)
+* Response: JSON. Returns a single bookmark folder.
+
+## `PATCH /api/v1/pleroma/bookmark_folders/:id`
+### Updates a bookmark folder
+* Authentication: required
+
+* Params:
+ * `id`: folder id
+ * `name`: folder name (optional)
+ * `emoji`: folder emoji (optional)
+* Response: JSON. Returns a single bookmark folder.
+
+## `DELETE /api/v1/pleroma/bookmark_folders/:id`
+### Deletes a bookmark folder
+* Authentication: required
+
+* Params:
+ * `id`: folder id
+* Response: JSON. Returns a single bookmark folder.
+
## `/api/v1/pleroma/mascot`
### Gets user mascot image
* Method `GET`
@@ -342,6 +403,45 @@ See [Admin-API](admin_api.md)
* Response: JSON. Returns `{"status": "success"}` if the change was successful, `{"error": "[error message]"}` otherwise
* Note: Currently, Mastodon has no API for changing email. If they add it in future it might be incompatible with Pleroma.
+## `/api/pleroma/move_account`
+### Move account
+* Method `POST`
+* Authentication: required
+* Params:
+ * `password`: user's password
+ * `target_account`: the nickname of the target account (e.g. `foo@example.org`)
+* Response: JSON. Returns `{"status": "success"}` if the change was successful, `{"error": "[error message]"}` otherwise
+* Note: This endpoint emits a `Move` activity to all followers of the current account. Some remote servers will automatically unfollow the current account and follow the target account upon seeing this, but this depends on the remote server implementation and cannot be guaranteed. For local followers , they will automatically unfollow and follow if and only if they have set the `allow_following_move` preference ("Allow auto-follow when following account moves").
+
+## `/api/pleroma/aliases`
+### Get aliases of the current account
+* Method `GET`
+* Authentication: required
+* Response: JSON. Returns `{"aliases": [alias, ...]}`, where `alias` is the nickname of an alias, e.g. `foo@example.org`.
+
+### Add alias to the current account
+* Method `PUT`
+* Authentication: required
+* Params:
+ * `alias`: the nickname of the alias to add, e.g. `foo@example.org`.
+* Response: JSON. Returns `{"status": "success"}` if the change was successful, `{"error": "[error message]"}` otherwise
+
+### Delete alias from the current account
+* Method `DELETE`
+* Authentication: required
+* Params:
+ * `alias`: the nickname of the alias to delete, e.g. `foo@example.org`.
+* Response: JSON. Returns `{"status": "success"}` if the change was successful, `{"error": "[error message]"}` otherwise
+
+## `/api/v1/pleroma/remote_interaction`
+## Interact with profile or status from remote account
+* Metod `POST`
+* Authentication: not required
+* Params:
+ * `ap_id`: Profile or status ActivityPub ID
+ * `profile`: Remote profile webfinger
+* Response: JSON. Returns `{"url": "[redirect url]"}` on success, `{"error": "[error message]"}` otherwise
+
# Pleroma Conversations
Pleroma Conversations have the same general structure that Mastodon Conversations have. The behavior differs in the following ways when using these endpoints:
@@ -352,7 +452,7 @@ Pleroma Conversations have the same general structure that Mastodon Conversation
Conversations have the additional field `recipients` under the `pleroma` key. This holds a list of all the accounts that will receive a message in this conversation.
-The status posting endpoint takes an additional parameter, `in_reply_to_conversation_id`, which, when set, will set the visiblity to direct and address only the people who are the recipients of that Conversation.
+The status posting endpoint takes an additional parameter, `in_reply_to_conversation_id`, which, when set, will set the visibility to direct and address only the people who are the recipients of that Conversation.
⚠ Conversation IDs can be found in direct messages with the `pleroma.direct_conversation_id` key, do not confuse it with `pleroma.conversation_id`.
@@ -547,6 +647,9 @@ The status posting endpoint takes an additional parameter, `in_reply_to_conversa
404 if the pack does not exist
## `GET /api/v1/pleroma/accounts/:id/scrobbles`
+
+Audio scrobbling in Pleroma is **deprecated**.
+
### Requests a list of current and recent Listen activities for an account
* Method `GET`
* Authentication: not required
@@ -568,6 +671,9 @@ The status posting endpoint takes an additional parameter, `in_reply_to_conversa
```
## `POST /api/v1/pleroma/scrobble`
+
+Audio scrobbling in Pleroma is **deprecated**.
+
### Creates a new Listen activity for an account
* Method `POST`
* Authentication: required
@@ -695,3 +801,42 @@ Emoji reactions work a lot like favourites do. They make it possible to react to
* Authentication: required
* Params: none
* Response: HTTP 200 on success, 500 on error
+
+## `/api/v1/pleroma/settings/:app`
+### Gets settings for some application
+* Method `GET`
+* Authentication: `read:accounts`
+
+* Response: JSON. The settings for that application, or empty object if there is none.
+* Example response:
+```json
+{
+ "some key": "some value"
+}
+```
+
+### Updates settings for some application
+* Method `PATCH`
+* Authentication: `write:accounts`
+* Request body: JSON object. The object will be merged recursively with old settings. If some field is set to null, it is removed.
+* Example request:
+```json
+{
+ "some key": "some value",
+ "key to remove": null,
+ "nested field": {
+ "some key": "some value",
+ "key to remove": null
+ }
+}
+```
+* Response: JSON. Updated (merged) settings for that application.
+* Example response:
+```json
+{
+ "some key": "some value",
+ "nested field": {
+ "some key": "some value",
+ }
+}
+```
diff --git a/docs/development/ap_extensions.md b/docs/development/ap_extensions.md
index 3d1caeb3e..75c8a7b54 100644
--- a/docs/development/ap_extensions.md
+++ b/docs/development/ap_extensions.md
@@ -20,16 +20,16 @@ Content-Type: multipart/form-data
Parameters:
- (required) `file`: The file being uploaded
-- (optionnal) `description`: A plain-text description of the media, for accessibility purposes.
+- (optional) `description`: A plain-text description of the media, for accessibility purposes.
Response: HTTP 201 Created with the object into the body, no `Location` header provided as it doesn't have an `id`
-The object given in the reponse should then be inserted into an Object's `attachment` field.
+The object given in the response should then be inserted into an Object's `attachment` field.
## ChatMessages
`ChatMessage`s are the messages sent in 1-on-1 chats. They are similar to
-`Note`s, but the addresing is done by having a single AP actor in the `to`
+`Note`s, but the addressing is done by having a single AP actor in the `to`
field. Addressing multiple actors is not allowed. These messages are always
private, there is no public version of them. They are created with a `Create`
activity.
diff --git a/docs/development/setting_up_a_gitlab_runner.md b/docs/development/setting_up_a_gitlab_runner.md
new file mode 100644
index 000000000..88beb82f2
--- /dev/null
+++ b/docs/development/setting_up_a_gitlab_runner.md
@@ -0,0 +1,9 @@
+# Setting up a Gitlab-runner
+
+When you push changes, a pipeline will start some automated jobs. These are done with so called [runners](https://docs.gitlab.com/runner/), services that run somewhere on a server and run these automated jobs. These jobs typically run tests and should pass. If not, you probably need to fix something.
+
+Generally, Pleroma provides a runner, so you don't need to set up your own. However, if for whatever reason you want to set up your own, here's some high level instructions.
+
+1. We use docker to run the jobs, so you should install that. For Debian, you need to allow non-free packages in the [source list](https://wiki.debian.org/SourcesList). Then you can install docker with `apt install docker-compose`.
+2. You can [install](https://docs.gitlab.com/runner/install/index.html) and [configure](https://docs.gitlab.com/runner/register/index.html) a Gitlab-runner. It's probably easiest to install from the packages, but there are other options as well.
+3. When registering the runner, you'll need some values. You can find them in the project under your own name. Choose "Settings", "CI/CD", and then expand "Runners". For executor you can choose "docker". For default image, you can use the image used in (although it shouldn't matter much).
diff --git a/docs/development/setting_up_pleroma_dev.md b/docs/development/setting_up_pleroma_dev.md
index 8da761d62..24f358e4a 100644
--- a/docs/development/setting_up_pleroma_dev.md
+++ b/docs/development/setting_up_pleroma_dev.md
@@ -15,7 +15,7 @@ Pleroma requires some adjustments from the defaults for running the instance loc
2. Change the dev.secret.exs
* Change the scheme in `config :pleroma, Pleroma.Web.Endpoint` to http (see examples below)
* If you want to change other settings, you can do that too
-3. You can now start the server `mix phx.server`. Once it's build and started, you can access the instance on `http://:` (e.g.http://localhost:4000 ) and should be able to do everything locally you normaly can.
+3. You can now start the server `mix phx.server`. Once it's build and started, you can access the instance on `http://:` (e.g.http://localhost:4000 ) and should be able to do everything locally you normally can.
Example config to change the scheme to http. Change the port if you want to run on another port.
```elixir
@@ -38,7 +38,7 @@ config :logger, :console,
## Testing
-1. Create a `test.secret.exs` file with the content as shown below
+1. Create a `config/test.secret.exs` file with the content as shown below
2. Create the database user and test database.
1. You can use the `config/setup_db.psql` as a template. Copy the file if you want and change the database name, user and password to the values for the test-database (e.g. 'pleroma_local_test' for database and user). Then run this file like you did during installation.
2. The tests will try to create the Database, so we'll have to allow our test-database user to create databases, `sudo -Hu postgres psql -c "ALTER USER pleroma_local_test WITH CREATEDB;"`
diff --git a/docs/installation/alpine_linux_en.md b/docs/installation/alpine_linux_en.md
index c37ff0c63..7154bca48 100644
--- a/docs/installation/alpine_linux_en.md
+++ b/docs/installation/alpine_linux_en.md
@@ -183,6 +183,9 @@ server {
...
}
```
+* (Strongly recommended) serve media on another domain
+
+Refer to the [Hardening your instance](../configuration/hardening.md) document on how to serve media on another domain. We STRONGLY RECOMMEND you to do this to minimize attack vectors.
* Enable and start nginx:
diff --git a/docs/installation/arch_linux_en.md b/docs/installation/arch_linux_en.md
index 285743d56..f7d722ef9 100644
--- a/docs/installation/arch_linux_en.md
+++ b/docs/installation/arch_linux_en.md
@@ -173,6 +173,11 @@ sudo ln -s /etc/nginx/sites-available/pleroma.nginx /etc/nginx/sites-enabled/ple
```
* Before starting nginx edit the configuration and change it to your needs (e.g. change servername, change cert paths)
+
+* (Strongly recommended) serve media on another domain
+
+Refer to the [Hardening your instance](../configuration/hardening.md) document on how to serve media on another domain. We STRONGLY RECOMMEND you to do this to minimize attack vectors.
+
* Enable and start nginx:
```shell
diff --git a/docs/installation/debian_based_en.md b/docs/installation/debian_based_en.md
index 4e52b2155..b61e4addd 100644
--- a/docs/installation/debian_based_en.md
+++ b/docs/installation/debian_based_en.md
@@ -4,7 +4,7 @@
## Installation
-This guide will assume you are on Debian 11 (“bullseye”) or later. This guide should also work with Ubuntu 18.04 (“Bionic Beaver”) and later. It also assumes that you have administrative rights, either as root or a user with [sudo permissions](https://www.digitalocean.com/community/tutorials/how-to-add-delete-and-grant-sudo-privileges-to-users-on-a-debian-vps). If you want to run this guide with root, ignore the `sudo` at the beginning of the lines, unless it calls a user like `sudo -Hu pleroma`; in this case, use `su -s $SHELL -c 'command'` instead.
+This guide will assume you are on Debian 12 (“bookworm”) or later. This guide should also work with Ubuntu 22.04 (“jammy”) and later. It also assumes that you have administrative rights, either as root or a user with [sudo permissions](https://www.digitalocean.com/community/tutorials/how-to-add-delete-and-grant-sudo-privileges-to-users-on-a-debian-vps). If you want to run this guide with root, ignore the `sudo` at the beginning of the lines, unless it calls a user like `sudo -Hu pleroma`; in this case, use `su -s $SHELL -c 'command'` instead.
{! backend/installation/generic_dependencies.include !}
@@ -136,6 +136,11 @@ sudo ln -s /etc/nginx/sites-available/pleroma.nginx /etc/nginx/sites-enabled/ple
```
* Before starting nginx edit the configuration and change it to your needs (e.g. change servername, change cert paths)
+
+* (Strongly recommended) serve media on another domain
+
+Refer to the [Hardening your instance](../configuration/hardening.md) document on how to serve media on another domain. We STRONGLY RECOMMEND you to do this to minimize attack vectors.
+
* Enable and start nginx:
```shell
diff --git a/docs/installation/debian_based_jp.md b/docs/installation/debian_based_jp.md
index 3736e857f..5a0823a63 100644
--- a/docs/installation/debian_based_jp.md
+++ b/docs/installation/debian_based_jp.md
@@ -1,17 +1,20 @@
# Pleromaの入れ方
+
+Note: This article is potentially outdated because at this time we may not have people who can speak this language well enough to update it. To see the up-to-date version, which may have significant differences or important caveats of the installation process, look up the English version.
+
## 日本語訳について
この記事は [Installing on Debian based distributions](Installing on Debian based distributions) の日本語訳です。何かがおかしいと思ったら、原文を見てください。
## インストール
-このガイドはDebian Stretchを利用することを想定しています。Ubuntu 16.04や18.04でもおそらく動作します。また、ユーザはrootもしくはsudoにより管理者権限を持っていることを前提とします。もし、以下の操作をrootユーザで行う場合は、 `sudo` を無視してください。ただし、`sudo -Hu pleroma` のようにユーザを指定している場合には `su -s $SHELL -c 'command'` を代わりに使ってください。
+このガイドはDebian Bookwormを利用することを想定しています。Ubuntu 22.04でもおそらく動作します。また、ユーザはrootもしくはsudoにより管理者権限を持っていることを前提とします。もし、以下の操作をrootユーザで行う場合は、 `sudo` を無視してください。ただし、`sudo -Hu pleroma` のようにユーザを指定している場合には `su -s $SHELL -c 'command'` を代わりに使ってください。
### 必要なソフトウェア
-- PostgreSQL 9.6以上 (Ubuntu16.04では9.5しか提供されていないので,[](https://www.postgresql.org/download/linux/ubuntu/)こちらから新しいバージョンを入手してください)
-- `postgresql-contrib` 9.6以上 (同上)
-- Elixir 1.8 以上 ([Debianのリポジトリからインストールしないこと!!! ここからインストールすること!](https://elixir-lang.org/install.html#unix-and-unix-like)。または [asdf](https://github.com/asdf-vm/asdf) をpleromaユーザーでインストールしてください)
+- PostgreSQL 11.0以上 (Ubuntu16.04では9.5しか提供されていないので,[](https://www.postgresql.org/download/linux/ubuntu/)こちらから新しいバージョンを入手してください)
+- `postgresql-contrib` 11.0以上 (同上)
+- Elixir 1.13 以上 ([Debianのリポジトリからインストールしないこと!!! ここからインストールすること!](https://elixir-lang.org/install.html#unix-and-unix-like)。または [asdf](https://github.com/asdf-vm/asdf) をpleromaユーザーでインストールしてください)
- `erlang-dev`
- `erlang-nox`
- `git`
diff --git a/docs/installation/freebsd_en.md b/docs/installation/freebsd_en.md
index 9cbe0f203..02513daf2 100644
--- a/docs/installation/freebsd_en.md
+++ b/docs/installation/freebsd_en.md
@@ -9,7 +9,7 @@ This document was written for FreeBSD 12.1, but should be work on future release
This assumes the target system has `pkg(8)`.
```
-# pkg install elixir postgresql12-server postgresql12-client postgresql12-contrib git-lite sudo nginx gmake acme.sh cmake
+# pkg install elixir postgresql12-server postgresql12-client postgresql12-contrib git-lite sudo nginx gmake acme.sh cmake vips
```
Copy the rc.d scripts to the right directory:
@@ -41,6 +41,7 @@ Create a user for Pleroma:
```
# pw add user pleroma -m
# echo 'export LC_ALL="en_US.UTF-8"' >> /home/pleroma/.profile
+# echo 'export VIX_COMPILATION_MODE=PLATFORM_PROVIDED_LIBVIPS' >> /home/pleroma/.profile
# su -l pleroma
```
@@ -173,6 +174,10 @@ Edit the defaults of `/usr/local/etc/nginx/sites-available/pleroma.nginx`:
* Change `ssl_certificate_key` to `/var/db/acme/certs/example.tld/example.tld.key`.
* Change all references of `example.tld` to your instance's domain name.
+#### (Strongly recommended) serve media on another domain
+
+Refer to the [Hardening your instance](../configuration/hardening.md) document on how to serve media on another domain. We STRONGLY RECOMMEND you to do this to minimize attack vectors.
+
## Creating a startup script for Pleroma
Pleroma will need to compile when it initially starts, which typically takes a longer
diff --git a/docs/installation/generic_dependencies.include b/docs/installation/generic_dependencies.include
index 2dbd93e42..666f49fbb 100644
--- a/docs/installation/generic_dependencies.include
+++ b/docs/installation/generic_dependencies.include
@@ -1,11 +1,11 @@
## Required dependencies
-* PostgreSQL 9.6+
-* Elixir 1.9+
-* Erlang OTP 22.2+
+* PostgreSQL >=11.0
+* Elixir >=1.13.0 <1.15
+* Erlang OTP >=22.2.0 (supported: <27)
* git
* file / libmagic
-* gcc (clang might also work)
+* gcc or clang
* GNU make
* CMake
diff --git a/docs/installation/gentoo_en.md b/docs/installation/gentoo_en.md
index 36882c8c8..dc47d27f8 100644
--- a/docs/installation/gentoo_en.md
+++ b/docs/installation/gentoo_en.md
@@ -1,6 +1,8 @@
-# Installing on Gentoo GNU/Linux
+# Manual install on Gentoo GNU/Linux
-{! backend/installation/otp_vs_from_source_source.include !}
+{! backend/installation/otp_vs_from_source.include !}
+
+This guide covers a manual from-source installation. To use the gentoo package, please check the [packaged installation guide for gentoo](./gentoo_otp_en.md).
## Installation
@@ -57,7 +59,7 @@ Gentoo quite pointedly does not come with a cron daemon installed, and as such i
If you would not like to install the optional packages, remove them from this line.
-If you're running this from a low-powered virtual machine, it should work though it will take some time. There were no issues on a VPS with a single core and 1GB of RAM; if you are using an even more limited device and run into issues, you can try creating a swapfile or use a more powerful machine running Gentoo to [cross build](https://wiki.gentoo.org/wiki/Cross_build_environment). If you have a wait ahead of you, now would be a good time to take a break, strech a bit, refresh your beverage of choice and/or get a snack, and reply to Arch users' posts with "I use Gentoo btw" as we do.
+If you're running this from a low-powered virtual machine, it should work though it will take some time. There were no issues on a VPS with a single core and 1GB of RAM; if you are using an even more limited device and run into issues, you can try creating a swapfile or use a more powerful machine running Gentoo to [cross build](https://wiki.gentoo.org/wiki/Cross_build_environment). If you have a wait ahead of you, now would be a good time to take a break, stretch a bit, refresh your beverage of choice and/or get a snack, and reply to Arch users' posts with "I use Gentoo btw" as we do.
### Install PostgreSQL
@@ -102,7 +104,7 @@ Not only does this make it much easier to deploy changes you make, as you can co
* Add a new system user for the Pleroma service and set up default directories:
-Remove `,wheel` if you do not want this user to be able to use `sudo`, however note that being able to `sudo` as the `pleroma` user will make finishing the insallation and common maintenence tasks somewhat easier:
+Remove `,wheel` if you do not want this user to be able to use `sudo`, however note that being able to `sudo` as the `pleroma` user will make finishing the installation and common maintenance tasks somewhat easier:
```shell
# useradd -m -G users,wheel -s /bin/bash pleroma
@@ -227,6 +229,10 @@ Replace all instances of `example.tld` with your instance's public URL. If for w
Pay special attention to the line that begins with `ssl_ecdh_curve`. It is stongly advised to comment that line out so that OpenSSL will use its full capabilities, and it is also possible you are running OpenSSL 1.0.2 necessitating that you do this.
+* (Strongly recommended) serve media on another domain
+
+Refer to the [Hardening your instance](../configuration/hardening.md) document on how to serve media on another domain. We STRONGLY RECOMMEND you to do this to minimize attack vectors.
+
* Enable and start nginx:
```shell
diff --git a/docs/installation/gentoo_otp_en.md b/docs/installation/gentoo_otp_en.md
new file mode 100644
index 000000000..20d8835da
--- /dev/null
+++ b/docs/installation/gentoo_otp_en.md
@@ -0,0 +1,207 @@
+# Packaged install on Gentoo Linux
+
+{! backend/installation/otp_vs_from_source.include !}
+
+This guide covers installation via Gentoo provided packaging. A [manual installation guide for gentoo](./gentoo_en.md) is also available.
+
+## Installation
+
+This guide will assume that you have administrative rights, either as root or a user with [sudo permissions](https://wiki.gentoo.org/wiki/Sudo). Lines that begin with `#` indicate that they should be run as the superuser. Lines using `$` should be run as the indicated user, e.g. `pleroma$` should be run as the `pleroma` user.
+
+{! backend/installation/generic_dependencies.include !}
+
+### Installing a cron daemon
+
+Gentoo quite pointedly does not come with a cron daemon installed, and as such it is recommended you install one to automate certbot renewals and to allow other system administration tasks to be run automatically. Gentoo has [a whole wide world of cron options](https://wiki.gentoo.org/wiki/Cron) but if you just want A Cron That Works, `emerge --ask virtual/cron` will install the default cron implementation (probably cronie) which will work just fine. For the purpouses of this guide, we will be doing just that.
+
+### Required ebuilds
+
+* `www-apps/pleroma`
+
+#### Optional ebuilds used in this guide
+
+* `www-servers/nginx` (preferred, example configs for other reverse proxies can be found in the repo)
+* `app-crypt/certbot` (or any other ACME client for Let’s Encrypt certificates)
+* `app-crypt/certbot-nginx` (nginx certbot plugin that allows use of the all-powerful `--nginx` flag on certbot)
+* `media-gfx/imagemagick`
+* `media-video/ffmpeg`
+* `media-libs/exiftool`
+
+### Prepare the system
+
+* If you haven't yet done so, add the [Gentoo User Repository (GURU)](https://wiki.gentoo.org/wiki/Project:GURU), where the `www-apps/pleroma` ebuild currently lives at:
+```shell
+ # eselect repository enable guru
+```
+
+* Ensure that you have the latest copy of the Gentoo and GURU ebuilds if you have not synced them yet:
+
+```shell
+ # emaint sync -a
+```
+
+
+* Emerge all required the required and suggested software in one go:
+
+```shell
+ # emerge --ask www-apps/pleroma www-servers/nginx app-crypt/certbot app-crypt/certbot-nginx
+```
+
+If you would not like to install the optional packages, remove them from this line.
+
+If you're running this from a low-powered virtual machine, it should work though it will take some time. There were no issues on a VPS with a single core and 1GB of RAM; if you are using an even more limited device and run into issues, you can try creating a swapfile or use a more powerful machine running Gentoo to [cross build](https://wiki.gentoo.org/wiki/Cross_build_environment). If you have a wait ahead of you, now would be a good time to take a break, stretch a bit, refresh your beverage of choice and/or get a snack, and reply to Arch users' posts with "I use Gentoo btw" as we do.
+
+### Setup PostgreSQL
+
+[Gentoo Wiki article](https://wiki.gentoo.org/wiki/PostgreSQL) as well as [PostgreSQL QuickStart](https://wiki.gentoo.org/wiki/PostgreSQL/QuickStart) might be worth a quick glance, as the way Gentoo handles postgres is slightly unusual, with built in capability to have two different databases running for testing and live or whatever other purpouse. While it is still straightforward to install, it does mean that the version numbers used in this guide might change for future updates, so keep an eye out for the output you get from `emerge` to ensure you are using the correct ones.
+
+* Initialize the database cluster
+
+The output from emerging postgresql should give you a command for initializing the postgres database. The default slot should be indicated in this command, ensure that it matches the command below.
+
+```shell
+ # emerge --config dev-db/postgresql:11
+```
+
+### Install media / graphics packages (optional)
+
+See [Optional software packages needed for specific functionality](optional/media_graphics_packages.md) for details.
+
+```shell
+# emerge --ask media-video/ffmpeg media-gfx/imagemagick media-libs/exiftool
+```
+
+### Setup PleromaBE
+
+* Generate the configuration:
+
+```shell
+ # pleroma_ctl instance gen --output /etc/pleroma/config.exs --output-psql /tmp/setup_db.psql"
+```
+
+* Create the PostgreSQL database
+
+```shell
+ # sudo -u postgres -s $SHELL -lc "psql -f /tmp/setup_db.psql"
+```
+
+* Now run the database migration:
+
+```shell
+ # pleroma_ctl migrate
+```
+
+* Optional: If you have installed RUM indexes (`dev-db/rum`) you also need to run:
+```
+ # sudo -Hu pleroma "pleroma_ctl migrate --migrations-path priv/repo/optional_migrations/rum_indexing/"
+```
+
+* Now you can start Pleroma already and add it in the default runlevel
+
+```shell
+ # rc-service pleroma start
+ # rc-update add pleroma default
+```
+
+It probably won't work over the public internet quite yet, however, as we still need to set up a web server to proxy to the pleroma application, as well as configure SSL.
+
+### Finalize installation
+
+Assuming you want to open your newly installed federated social network to, well, the federation, you should run nginx or some other webserver/proxy in front of Pleroma. It is also a good idea to set up Pleroma to run as a system service.
+
+#### Nginx
+
+* Install nginx, if not already done:
+
+```shell
+ # emerge --ask www-servers/nginx
+```
+
+* Create directories for available and enabled sites:
+
+```shell
+ # mkdir -p /etc/nginx/sites-{available,enabled}
+```
+
+* Append the following line at the end of the `http` block in `/etc/nginx/nginx.conf`:
+
+```Nginx
+include sites-enabled/*;
+```
+
+* Setup your SSL cert, using your method of choice or certbot. If using certbot, install it if you haven't already:
+
+```shell
+ # emerge --ask app-crypt/certbot app-crypt/certbot-nginx
+```
+
+and then set it up:
+
+```shell
+ # mkdir -p /var/lib/letsencrypt/
+ # certbot certonly --email -d --standalone
+```
+
+If that doesn't work the first time, add `--dry-run` to further attempts to avoid being ratelimited as you identify the issue, and do not remove it until the dry run succeeds. If that doesn’t work, make sure, that nginx is not already running. If it still doesn’t work, try setting up nginx first (change ssl “on” to “off” and try again). Often the answer to issues with certbot is to use the `--nginx` flag once you have nginx up and running.
+
+If you are using any additional subdomains, such as for a media proxy, you can re-run the same command with the subdomain in question. When it comes time to renew later, you will not need to run multiple times for each domain, one renew will handle it.
+
+---
+
+* Copy the example nginx configuration and activate it:
+
+```shell
+ # cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/sites-available/
+ # ln -s /etc/nginx/sites-available/pleroma.nginx /etc/nginx/sites-enabled/pleroma.nginx
+```
+
+* Take some time to ensure that your nginx config is correct
+
+Replace all instances of `example.tld` with your instance's public URL. If for whatever reason you made changes to the port that your pleroma app runs on, be sure that is reflected in your configuration.
+
+Pay special attention to the line that begins with `ssl_ecdh_curve`. It is stongly advised to comment that line out so that OpenSSL will use its full capabilities, and it is also possible you are running OpenSSL 1.0.2 necessitating that you do this.
+
+* Enable and start nginx:
+
+```shell
+ # rc-update add nginx default
+ # /etc/init.d/nginx start
+```
+
+If you are using certbot, it is HIGHLY recommend you set up a cron job that renews your certificate, and that you install the suggested `certbot-nginx` plugin. If you don't do these things, you only have yourself to blame when your instance breaks suddenly because you forgot about it.
+
+First, ensure that the command you will be installing into your crontab works.
+
+```shell
+ # /usr/bin/certbot renew --nginx
+```
+
+Assuming not much time has passed since you got certbot working a few steps ago, you should get a message for all domains you installed certificates for saying `Cert not yet due for renewal`.
+
+Now, run crontab as a superuser with `crontab -e` or `sudo crontab -e` as appropriate, and add the following line to your cron:
+
+```cron
+0 0 1 * * /usr/bin/certbot renew --nginx
+```
+
+This will run certbot on the first of the month at midnight. If you'd rather run more frequently, it's not a bad idea, feel free to go for it.
+
+#### Other webserver/proxies
+
+If you would like to use other webservers or proxies, there are example configurations for some popular alternatives in `/opt/pleroma/installation/`. You can, of course, check out [the Gentoo wiki](https://wiki.gentoo.org) for more information on installing and configuring said alternatives.
+
+#### Create your first user
+
+If your instance is up and running, you can create your first user with administrative rights with the following task:
+
+```shell
+pleroma$ pleroma_ctl user new --admin
+```
+
+#### Further reading
+
+{! backend/installation/further_reading.include !}
+
+## Questions
+
+Questions about the installation or didn’t it work as it should be, ask in [#pleroma:libera.chat](https://matrix.to/#/#pleroma:libera.chat) via Matrix or **#pleroma** on **libera.chat** via IRC.
diff --git a/docs/installation/migrating_from_source_otp_en.md b/docs/installation/migrating_from_source_otp_en.md
index e4a01d8db..798862566 100644
--- a/docs/installation/migrating_from_source_otp_en.md
+++ b/docs/installation/migrating_from_source_otp_en.md
@@ -5,7 +5,7 @@
In this guide we cover how you can migrate from a from source installation to one using OTP releases.
## Pre-requisites
-You will be running commands as root. If you aren't root already, please elevate your priviledges by executing `sudo su`/`su`.
+You will be running commands as root. If you aren't root already, please elevate your privileges by executing `sudo su`/`su`.
The system needs to have `curl` and `unzip` installed for downloading and unpacking release builds.
@@ -86,26 +86,26 @@ export FLAVOUR="amd64-musl"
# Clone the release build into a temporary directory and unpack it
# Replace `stable` with `unstable` if you want to run the unstable branch
-su pleroma -s $SHELL -lc "
+sudo -Hu pleroma "
curl 'https://git.pleroma.social/api/v4/projects/2/jobs/artifacts/stable/download?job=$FLAVOUR' -o /tmp/pleroma.zip
unzip /tmp/pleroma.zip -d /tmp/
"
# Move the release to the home directory and delete temporary files
-su pleroma -s $SHELL -lc "
+sudo -Hu pleroma "
mv /tmp/release/* ~pleroma/
rmdir /tmp/release
rm /tmp/pleroma.zip
"
# Start the instance to verify that everything is working as expected
-su pleroma -s $SHELL -lc "./bin/pleroma daemon"
+sudo -Hu pleroma "./bin/pleroma daemon"
# Wait for about 20 seconds and query the instance endpoint, if it shows your uri, name and email correctly, you are configured correctly
sleep 20 && curl http://localhost:4000/api/v1/instance
# Stop the instance
-su pleroma -s $SHELL -lc "./bin/pleroma stop"
+sudo -Hu pleroma "./bin/pleroma stop"
```
## Setting up a system service
diff --git a/docs/installation/netbsd_en.md b/docs/installation/netbsd_en.md
index 41b3b0072..2ade7df98 100644
--- a/docs/installation/netbsd_en.md
+++ b/docs/installation/netbsd_en.md
@@ -123,6 +123,10 @@ Edit the defaults:
* Change `ssl_certificate_key` to `/etc/nginx/tls/key`.
* Change `example.tld` to your instance's domain name.
+### (Strongly recommended) serve media on another domain
+
+Refer to the [Hardening your instance](../configuration/hardening.md) document on how to serve media on another domain. We STRONGLY RECOMMEND you to do this to minimize attack vectors.
+
## Configuring acme.sh
We'll be using acme.sh in Stateless Mode for TLS certificate renewal.
diff --git a/docs/installation/nixos_en.md b/docs/installation/nixos_en.md
new file mode 100644
index 000000000..f3c4988b1
--- /dev/null
+++ b/docs/installation/nixos_en.md
@@ -0,0 +1,15 @@
+# Installing on NixOS
+
+NixOS contains a source build package of pleroma and a NixOS module to install it.
+For installation add this to your configuration.nix and add a config.exs next to it:
+```nix
+ services.pleroma = {
+ enable = true;
+ configs = [ (lib.fileContents ./config.exs) ];
+ secretConfigFile = "/var/lib/pleroma/secret.exs";
+ };
+```
+
+## Questions
+The nix community uses matrix for communication: [#nix:nixos.org](https://matrix.to/#/#nix:nixos.org)
+
diff --git a/docs/installation/openbsd_en.md b/docs/installation/openbsd_en.md
index c80c8f678..e58e144d2 100644
--- a/docs/installation/openbsd_en.md
+++ b/docs/installation/openbsd_en.md
@@ -62,7 +62,7 @@ rcctl start postgresql
To check that it started properly and didn't fail right after starting, you can run `ps aux | grep postgres`, there should be multiple lines of output.
#### httpd
-httpd will have three fuctions:
+httpd will have three functions:
* redirect requests trying to reach the instance over http to the https URL
* serve a robots.txt file
@@ -195,6 +195,10 @@ rcctl enable relayd
rcctl start relayd
```
+##### (Strongly recommended) serve media on another domain
+
+Refer to the [Hardening your instance](../configuration/hardening.md) document on how to serve media on another domain. We STRONGLY RECOMMEND you to do this to minimize attack vectors.
+
#### pf
Enabling and configuring pf is highly recommended.
In /etc/pf.conf, insert the following configuration:
@@ -221,7 +225,7 @@ pass in quick on $if inet6 proto icmp6 to ($if) icmp6-type { echoreq unreach par
pass in quick on $if proto tcp to ($if) port { http https } # relayd/httpd
pass in quick on $if proto tcp from $authorized_ssh_clients to ($if) port ssh
```
-Replace ** by your server's network interface name (which you can get with ifconfig). Consider replacing the content of the authorized\_ssh\_clients macro by, for exemple, your home IP address, to avoid SSH connection attempts from bots.
+Replace ** by your server's network interface name (which you can get with ifconfig). Consider replacing the content of the authorized\_ssh\_clients macro by, for example, your home IP address, to avoid SSH connection attempts from bots.
Check pf's configuration by running `pfctl -nf /etc/pf.conf`, load it with `pfctl -f /etc/pf.conf` and enable pf at boot with `rcctl enable pf`.
diff --git a/docs/installation/openbsd_fi.md b/docs/installation/openbsd_fi.md
index 3c40b2d1a..73aca3a6f 100644
--- a/docs/installation/openbsd_fi.md
+++ b/docs/installation/openbsd_fi.md
@@ -1,5 +1,7 @@
# Pleroman asennus OpenBSD:llä
+Note: This article is potentially outdated because at this time we may not have people who can speak this language well enough to update it. To see the up-to-date version, which may have significant differences or important caveats of the installation process, look up the English version.
+
Tarvitset:
* Oman domainin
* OpenBSD 6.3 -serverin
diff --git a/docs/installation/optional/media_graphics_packages.md b/docs/installation/optional/media_graphics_packages.md
index de402d1c4..ad01d47d1 100644
--- a/docs/installation/optional/media_graphics_packages.md
+++ b/docs/installation/optional/media_graphics_packages.md
@@ -1,9 +1,10 @@
# Optional software packages needed for specific functionality
For specific Pleroma functionality (which is disabled by default) some or all of the below packages are required:
- * `ImageMagic`
- * `ffmpeg`
- * `exiftool`
+
+* `ImageMagic`
+* `ffmpeg`
+* `exiftool`
Please refer to documentation in `docs/installation` on how to install them on specific OS.
@@ -14,20 +15,23 @@ Note: the packages are not required with the current default settings of Pleroma
`ImageMagick` is a set of tools to create, edit, compose, or convert bitmap images.
It is required for the following Pleroma features:
- * `Pleroma.Upload.Filters.Mogrify`, `Pleroma.Upload.Filters.Mogrifun` upload filters (related config: `Plaroma.Upload/filters` in `config/config.exs`)
- * Media preview proxy for still images (related config: `media_preview_proxy/enabled` in `config/config.exs`)
+
+* `Pleroma.Upload.Filters.Mogrify`, `Pleroma.Upload.Filters.Mogrifun` upload filters (related config: `Plaroma.Upload/filters` in `config/config.exs`)
+* Media preview proxy for still images (related config: `media_preview_proxy/enabled` in `config/config.exs`)
## `ffmpeg`
`ffmpeg` is software to record, convert and stream audio and video.
It is required for the following Pleroma features:
- * Media preview proxy for videos (related config: `media_preview_proxy/enabled` in `config/config.exs`)
+
+* Media preview proxy for videos (related config: `media_preview_proxy/enabled` in `config/config.exs`)
## `exiftool`
`exiftool` is media files metadata reader/writer.
It is required for the following Pleroma features:
- * `Pleroma.Upload.Filters.Exiftool.StripLocation` upload filter (related config: `Plaroma.Upload/filters` in `config/config.exs`)
- * `Pleroma.Upload.Filters.Exiftool.ReadDescription` upload filter (related config: `Plaroma.Upload/filters` in `config/config.exs`)
+
+* `Pleroma.Upload.Filters.Exiftool.StripLocation` upload filter (related config: `Plaroma.Upload/filters` in `config/config.exs`)
+* `Pleroma.Upload.Filters.Exiftool.ReadDescription` upload filter (related config: `Plaroma.Upload/filters` in `config/config.exs`)
diff --git a/docs/installation/otp_en.md b/docs/installation/otp_en.md
index 0861a8157..86efa27f8 100644
--- a/docs/installation/otp_en.md
+++ b/docs/installation/otp_en.md
@@ -2,15 +2,16 @@
{! backend/installation/otp_vs_from_source.include !}
-This guide covers a installation using an OTP release. To install Pleroma from source, please check out the corresponding guide for your distro.
+This guide covers a installation using OTP releases as built by the Pleroma project, it is meant as a fallback to distribution packages/recipes which are the preferred installation method.
+To install Pleroma from source, please check out the corresponding guide for your distro.
## Pre-requisites
-* A machine running Linux with GNU (e.g. Debian, Ubuntu) or musl (e.g. Alpine) libc and `x86_64`, `aarch64` or `armv7l` CPU, you have root access to. If you are not sure if it's compatible see [Detecting flavour section](#detecting-flavour) below
+* A machine you have root access to running Debian GNU/Linux or compatible (eg. Ubuntu), or Alpine on `x86_64`, `aarch64` or `armv7l` CPU. If you are not sure what you are running see [Detecting flavour section](#detecting-flavour) below
* A (sub)domain pointed to the machine
-You will be running commands as root. If you aren't root already, please elevate your priviledges by executing `sudo su`/`su`.
+You will be running commands as root. If you aren't root already, please elevate your privileges by executing `sudo -i`/`su`.
-While in theory OTP releases are possbile to install on any compatible machine, for the sake of simplicity this guide focuses only on Debian/Ubuntu and Alpine.
+Similarly to other binaries, OTP releases tend to be only compatible with the distro they are built on, as such this guide focuses only on Debian/Ubuntu and Alpine.
### Detecting flavour
@@ -19,7 +20,7 @@ Paste the following into the shell:
arch="$(uname -m)";if [ "$arch" = "x86_64" ];then arch="amd64";elif [ "$arch" = "armv7l" ];then arch="arm";elif [ "$arch" = "aarch64" ];then arch="arm64";else echo "Unsupported arch: $arch">&2;fi;if getconf GNU_LIBC_VERSION>/dev/null;then libc_postfix="";elif [ "$(ldd 2>&1|head -c 9)" = "musl libc" ];then libc_postfix="-musl";elif [ "$(find /lib/libc.musl*|wc -l)" ];then libc_postfix="-musl";else echo "Unsupported libc">&2;fi;echo "$arch$libc_postfix"
```
-If your platform is supported the output will contain the flavour string, you will need it later. If not, this just means that we don't build releases for your platform, you can still try installing from source.
+This should give your flavour string. If not this just means that we don't build releases for your platform, you can still try installing from source.
### Installing the required packages
@@ -114,13 +115,13 @@ adduser --system --shell /bin/false --home /opt/pleroma pleroma
export FLAVOUR="amd64-musl"
# Clone the release build into a temporary directory and unpack it
-su pleroma -s $SHELL -lc "
+sudo -Hu pleroma "
curl 'https://git.pleroma.social/api/v4/projects/2/jobs/artifacts/stable/download?job=$FLAVOUR' -o /tmp/pleroma.zip
unzip /tmp/pleroma.zip -d /tmp/
"
# Move the release to the home directory and delete temporary files
-su pleroma -s $SHELL -lc "
+sudo -Hu pleroma "
mv /tmp/release/* /opt/pleroma
rmdir /tmp/release
rm /tmp/pleroma.zip
@@ -141,25 +142,25 @@ mkdir -p /etc/pleroma
chown -R pleroma /etc/pleroma
# Run the config generator
-su pleroma -s $SHELL -lc "./bin/pleroma_ctl instance gen --output /etc/pleroma/config.exs --output-psql /tmp/setup_db.psql"
+sudo -Hu pleroma "./bin/pleroma_ctl instance gen --output /etc/pleroma/config.exs --output-psql /tmp/setup_db.psql"
# Create the postgres database
-su postgres -s $SHELL -lc "psql -f /tmp/setup_db.psql"
+sudo -u postgres -s $SHELL -lc "psql -f /tmp/setup_db.psql"
# Create the database schema
-su pleroma -s $SHELL -lc "./bin/pleroma_ctl migrate"
+sudo -Hu pleroma "./bin/pleroma_ctl migrate"
# If you have installed RUM indexes uncommend and run
-# su pleroma -s $SHELL -lc "./bin/pleroma_ctl migrate --migrations-path priv/repo/optional_migrations/rum_indexing/"
+# sudo -Hu pleroma "./bin/pleroma_ctl migrate --migrations-path priv/repo/optional_migrations/rum_indexing/"
# Start the instance to verify that everything is working as expected
-su pleroma -s $SHELL -lc "./bin/pleroma daemon"
+sudo -Hu pleroma "./bin/pleroma daemon"
# Wait for about 20 seconds and query the instance endpoint, if it shows your uri, name and email correctly, you are configured correctly
sleep 20 && curl http://localhost:4000/api/v1/instance
# Stop the instance
-su pleroma -s $SHELL -lc "./bin/pleroma stop"
+sudo -Hu pleroma "./bin/pleroma stop"
```
### Setting up nginx and getting Let's Encrypt SSL certificaties
@@ -197,6 +198,10 @@ $EDITOR path-to-nginx-config
# Verify that the config is valid
nginx -t
```
+#### (Strongly recommended) serve media on another domain
+
+Refer to the [Hardening your instance](../configuration/hardening.md) document on how to serve media on another domain. We STRONGLY RECOMMEND you to do this to minimize attack vectors.
+
#### Start nginx
=== "Alpine"
@@ -233,7 +238,7 @@ At this point if you open your (sub)domain in a browser you should see a 502 err
systemctl enable pleroma
```
-If everything worked, you should see Pleroma-FE when visiting your domain. If that didn't happen, try reviewing the installation steps, starting Pleroma in the foreground and seeing if there are any errrors.
+If everything worked, you should see Pleroma-FE when visiting your domain. If that didn't happen, try reviewing the installation steps, starting Pleroma in the foreground and seeing if there are any errors.
Questions about the installation or didn’t it work as it should be, ask in [#pleroma:libera.chat](https://matrix.to/#/#pleroma:libera.chat) via Matrix or **#pleroma** on **libera.chat** via IRC, you can also [file an issue on our Gitlab](https://git.pleroma.social/pleroma/pleroma-support/issues/new).
diff --git a/docs/installation/otp_vs_from_source.include b/docs/installation/otp_vs_from_source.include
index 63e837a53..6c7820275 100644
--- a/docs/installation/otp_vs_from_source.include
+++ b/docs/installation/otp_vs_from_source.include
@@ -1,3 +1,8 @@
-## OTP releases vs from-source installations
+## Packaged (OTP) installation vs Manual (from-source) installations
-There are two ways to install Pleroma. You can use OTP releases or do a from-source installation. OTP releases are as close as you can get to binary releases with Erlang/Elixir. The release is self-contained, and provides everything needed to boot it, it is easily administered via the provided shell script to open up a remote console, start/stop/restart the release, start in the background, send remote commands, and more. With from source installations you install Pleroma from source, meaning you have to install certain dependencies like Erlang+Elixir and compile Pleroma yourself.
+There is multiple ways to install Pleroma.
+
+Distro-provided packages This is the recommended method, where you can get the strongest compatibility guarantees and the best dependency-management
+Pleroma-provided OTP binaries Intended as fallback for Alpine/Debian-compatible systems lacking a proper Pleroma package, they are heavier than proper distro packages as they also contain Erlang/Elixir and can break after system updates
+Manual from-source installation Needs build-dependencies to be installed and manual updates+rebuilds. Allows for easier source-customisations.
+
diff --git a/docs/installation/otp_vs_from_source_source.include b/docs/installation/otp_vs_from_source_source.include
index 63482b69d..9f7134229 100644
--- a/docs/installation/otp_vs_from_source_source.include
+++ b/docs/installation/otp_vs_from_source_source.include
@@ -1,3 +1,3 @@
{! backend/installation/otp_vs_from_source.include !}
-This guide covers a from-source installation. To install using OTP releases, please check out [the OTP guide](./otp_en.md).
+This guide covers a manual from-source installation. To install using OTP releases, please check for the presence of a distro package, failing that you can use [Pleroma-provided OTP binaries](./otp_en.md).
diff --git a/elixir_buildpack.config b/elixir_buildpack.config
index 946408c12..1102e7145 100644
--- a/elixir_buildpack.config
+++ b/elixir_buildpack.config
@@ -1,2 +1,2 @@
-elixir_version=1.9.4
+elixir_version=1.10.4
erlang_version=22.3.4.1
diff --git a/installation/init.d/pleroma b/installation/init.d/pleroma
index 384536f7e..cb6635a0b 100755
--- a/installation/init.d/pleroma
+++ b/installation/init.d/pleroma
@@ -8,6 +8,7 @@ pidfile="/var/run/pleroma.pid"
directory=/opt/pleroma
healthcheck_delay=60
healthcheck_timer=30
+no_new_privs="yes"
: ${pleroma_port:-4000}
diff --git a/installation/pleroma-mediaproxy.nginx b/installation/pleroma-mediaproxy.nginx
new file mode 100644
index 000000000..6f2edf212
--- /dev/null
+++ b/installation/pleroma-mediaproxy.nginx
@@ -0,0 +1,97 @@
+# This file is for those who want to serve uploaded media and media proxy over
+# another domain. This is STRONGLY RECOMMENDED.
+# This is meant to be used ALONG WITH `pleroma.nginx`.
+
+# If this is a new instance, replace the `location ~ ^/(media|proxy)` section in
+# `pleroma.nginx` with the following to completely disable access to media from the main domain:
+# location ~ ^/(media|proxy) {
+# return 404;
+# }
+#
+# If you are configuring an existing instance to use another domain
+# for media, you will want to keep redirecting all existing local media to the new domain
+# so already-uploaded media will not break.
+# Replace the `location ~ ^/(media|proxy)` section in `pleroma.nginx` with the following:
+#
+# location /media {
+# return 301 https://some.other.domain$request_uri;
+# }
+#
+# location /proxy {
+# return 404;
+# }
+
+server {
+ server_name some.other.domain;
+
+ listen 80;
+ listen [::]:80;
+
+ # Uncomment this if you need to use the 'webroot' method with certbot. Make sure
+ # that the directory exists and that it is accessible by the webserver. If you followed
+ # the guide, you already ran 'mkdir -p /var/lib/letsencrypt' to create the folder.
+ # You may need to load this file with the ssl server block commented out, run certbot
+ # to get the certificate, and then uncomment it.
+ #
+ # location ~ /\.well-known/acme-challenge {
+ # root /var/lib/letsencrypt/;
+ # }
+ location / {
+ return 301 https://$server_name$request_uri;
+ }
+}
+
+server {
+ server_name some.other.domain;
+
+ listen 443 ssl http2;
+ listen [::]:443 ssl http2;
+ ssl_session_timeout 1d;
+ ssl_session_cache shared:MozSSL:10m; # about 40000 sessions
+ ssl_session_tickets off;
+
+ ssl_trusted_certificate /etc/letsencrypt/live/some.other.domain/chain.pem;
+ ssl_certificate /etc/letsencrypt/live/some.other.domain/fullchain.pem;
+ ssl_certificate_key /etc/letsencrypt/live/some.other.domain/privkey.pem;
+
+ ssl_protocols TLSv1.2 TLSv1.3;
+ ssl_ciphers "ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:!aNULL:!eNULL:!EXPORT:!DES:!MD5:!PSK:!RC4";
+ ssl_prefer_server_ciphers off;
+ # In case of an old server with an OpenSSL version of 1.0.2 or below,
+ # leave only prime256v1 or comment out the following line.
+ ssl_ecdh_curve X25519:prime256v1:secp384r1:secp521r1;
+ ssl_stapling on;
+ ssl_stapling_verify on;
+
+ gzip_vary on;
+ gzip_proxied any;
+ gzip_comp_level 6;
+ gzip_buffers 16 8k;
+ gzip_http_version 1.1;
+ gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript application/activity+json application/atom+xml;
+
+ # the nginx default is 1m, not enough for large media uploads
+ client_max_body_size 16m;
+ ignore_invalid_headers off;
+
+ proxy_http_version 1.1;
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection "upgrade";
+ proxy_set_header Host $http_host;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+
+ location / { return 404; }
+
+ location ~ ^/(media|proxy) {
+ proxy_cache pleroma_media_cache;
+ slice 1m;
+ proxy_cache_key $host$uri$is_args$args$slice_range;
+ proxy_set_header Range $slice_range;
+ proxy_cache_valid 200 206 301 304 1h;
+ proxy_cache_lock on;
+ proxy_ignore_client_abort on;
+ proxy_buffering on;
+ chunked_transfer_encoding on;
+ proxy_pass http://phoenix;
+ }
+}
diff --git a/installation/pleroma-mongooseim.cfg b/installation/pleroma-mongooseim.cfg
index 576f83541..6b568fd03 100755
--- a/installation/pleroma-mongooseim.cfg
+++ b/installation/pleroma-mongooseim.cfg
@@ -204,7 +204,7 @@
]}
]},
- %% Following HTTP API is deprected, the new one abouve should be used instead
+ %% Following HTTP API is deprecated, the new one above should be used instead
{ {5288, "127.0.0.1"} , ejabberd_cowboy, [
{num_acceptors, 10},
@@ -466,7 +466,7 @@
%% == PostgreSQL ==
%% {rdbms, global, default, [{workers, 10}],
-%% [{server, {pgsql, "server", 5432, "database", "username", "password"}}]},
+%% [{server, {pgsql, "server", "port", "database", "username", "password"}}]},
%% == ODBC (MSSQL) ==
%% {rdbms, global, default, [{workers, 10}],
@@ -824,7 +824,7 @@
%% Enable archivization for private messages (default)
% {pm, [
- %% Top-level options can be overriden here if needed, for example:
+ %% Top-level options can be overridden here if needed, for example:
% {async_writer, false}
% ]},
@@ -834,7 +834,7 @@
%%
% {muc, [
% {host, "muc.@HOST@"}
- %% As with pm, top-level options can be overriden for MUC archive
+ %% As with pm, top-level options can be overridden for MUC archive
% ]},
%
%% Do not use a element (by default stanzaid is used)
diff --git a/installation/pleroma.nginx b/installation/pleroma.nginx
index 9890cb2b1..273cfb390 100644
--- a/installation/pleroma.nginx
+++ b/installation/pleroma.nginx
@@ -81,6 +81,19 @@ server {
proxy_pass http://phoenix;
}
+ # Uncomment this if you want notice compatibility routes for frontends like Soapbox.
+ # location ~ ^/@[^/]+/([^/]+)$ {
+ # proxy_pass http://phoenix/notice/$1;
+ # }
+ #
+ # location ~ ^/@[^/]+/posts/([^/]+)$ {
+ # proxy_pass http://phoenix/notice/$1;
+ # }
+ #
+ # location ~ ^/[^/]+/status/([^/]+)$ {
+ # proxy_pass http://phoenix/notice/$1;
+ # }
+
location ~ ^/(media|proxy) {
proxy_cache pleroma_media_cache;
slice 1m;
diff --git a/installation/pleroma.vcl b/installation/pleroma.vcl
index 4752510ea..4eb2f3cfa 100644
--- a/installation/pleroma.vcl
+++ b/installation/pleroma.vcl
@@ -1,4 +1,5 @@
# Recommended varnishncsa logging format: '%h %l %u %t "%m %{X-Forwarded-Proto}i://%{Host}i%U%q %H" %s %b "%{Referer}i" "%{User-agent}i"'
+# Please use Varnish 7.0+ for proper Range Requests / Chunked encoding support
vcl 4.1;
import std;
@@ -22,11 +23,6 @@ sub vcl_recv {
set req.http.X-Forwarded-Proto = "https";
}
- # CHUNKED SUPPORT
- if (req.http.Range ~ "bytes=") {
- set req.http.x-range = req.http.Range;
- }
-
# Pipe if WebSockets request is coming through
if (req.http.upgrade ~ "(?i)websocket") {
return (pipe);
@@ -35,9 +31,9 @@ sub vcl_recv {
# Allow purging of the cache
if (req.method == "PURGE") {
if (!client.ip ~ purge) {
- return(synth(405,"Not allowed."));
+ return (synth(405,"Not allowed."));
}
- return(purge);
+ return (purge);
}
}
@@ -53,17 +49,11 @@ sub vcl_backend_response {
return (retry);
}
- # CHUNKED SUPPORT
- if (bereq.http.x-range ~ "bytes=" && beresp.status == 206) {
- set beresp.ttl = 10m;
- set beresp.http.CR = beresp.http.content-range;
- }
-
# Bypass cache for large files
# 50000000 ~ 50MB
if (std.integer(beresp.http.content-length, 0) > 50000000) {
set beresp.uncacheable = true;
- return(deliver);
+ return (deliver);
}
# Don't cache objects that require authentication
@@ -94,7 +84,7 @@ sub vcl_synth {
if (resp.status == 750) {
set resp.status = 301;
set resp.http.Location = req.http.x-redir;
- return(deliver);
+ return (deliver);
}
}
@@ -106,25 +96,12 @@ sub vcl_pipe {
}
}
-sub vcl_hash {
- # CHUNKED SUPPORT
- if (req.http.x-range ~ "bytes=") {
- hash_data(req.http.x-range);
- unset req.http.Range;
- }
-}
-
sub vcl_backend_fetch {
# Be more lenient for slow servers on the fediverse
if (bereq.url ~ "^/proxy/") {
set bereq.first_byte_timeout = 300s;
}
- # CHUNKED SUPPORT
- if (bereq.http.x-range) {
- set bereq.http.Range = bereq.http.x-range;
- }
-
if (bereq.retries == 0) {
# Clean up the X-Varnish-Backend-503 flag that is used internally
# to mark broken backend responses that should be retried.
@@ -143,14 +120,6 @@ sub vcl_backend_fetch {
}
}
-sub vcl_deliver {
- # CHUNKED SUPPORT
- if (resp.http.CR) {
- set resp.http.Content-Range = resp.http.CR;
- unset resp.http.CR;
- }
-}
-
sub vcl_backend_error {
# Retry broken backend responses.
set bereq.http.X-Varnish-Backend-503 = "1";
diff --git a/lib/mix/tasks/pleroma/config.ex b/lib/mix/tasks/pleroma/config.ex
index 33d147d36..3a2ea44f8 100644
--- a/lib/mix/tasks/pleroma/config.ex
+++ b/lib/mix/tasks/pleroma/config.ex
@@ -304,13 +304,8 @@ defp write_config(file, path, opts) do
System.cmd("mix", ["format", path])
end
- if Code.ensure_loaded?(Config.Reader) do
- defp config_header, do: "import Config\r\n\r\n"
- defp read_file(config_file), do: Config.Reader.read_imports!(config_file)
- else
- defp config_header, do: "use Mix.Config\r\n\r\n"
- defp read_file(config_file), do: Mix.Config.eval!(config_file)
- end
+ defp config_header, do: "import Config\r\n\r\n"
+ defp read_file(config_file), do: Config.Reader.read_imports!(config_file)
defp write_and_delete(config, file, delete?) do
config
diff --git a/lib/mix/tasks/pleroma/database.ex b/lib/mix/tasks/pleroma/database.ex
index 6b8f0ef68..93ee57dc3 100644
--- a/lib/mix/tasks/pleroma/database.ex
+++ b/lib/mix/tasks/pleroma/database.ex
@@ -154,9 +154,8 @@ def run(["ensure_expiration"]) do
|> join(:inner, [a], o in Object,
on:
fragment(
- "(?->>'id') = COALESCE((?)->'object'->> 'id', (?)->>'object')",
+ "(?->>'id') = associated_object_id((?))",
o.data,
- a.data,
a.data
)
)
@@ -194,7 +193,7 @@ def run(["set_text_search_config", tsconfig]) do
"ALTER DATABASE #{db} SET default_text_search_config = '#{tsconfig}';"
)
- # non-exist config will not raise excpetion but only give >0 messages
+ # non-exist config will not raise exception but only give >0 messages
if length(msg) > 0 do
shell_info("Error: #{inspect(msg, pretty: true)}")
else
diff --git a/lib/mix/tasks/pleroma/digest.ex b/lib/mix/tasks/pleroma/digest.ex
index aea9c8ac5..53cac0b94 100644
--- a/lib/mix/tasks/pleroma/digest.ex
+++ b/lib/mix/tasks/pleroma/digest.ex
@@ -30,7 +30,7 @@ def run(["test", nickname | opts]) do
shell_info("Digest email have been sent to #{nickname} (#{user.email})")
else
_ ->
- shell_info("Cound't find any mentions for #{nickname} since #{last_digest_emailed_at}")
+ shell_info("Couldn't find any mentions for #{nickname} since #{last_digest_emailed_at}")
end
end
end
diff --git a/lib/mix/tasks/pleroma/ecto/rollback.ex b/lib/mix/tasks/pleroma/ecto/rollback.ex
index 3d78eaec4..121890f39 100644
--- a/lib/mix/tasks/pleroma/ecto/rollback.ex
+++ b/lib/mix/tasks/pleroma/ecto/rollback.ex
@@ -61,7 +61,7 @@ def run(args \\ []) do
Logger.configure(level: :info)
if opts[:env] == "test" do
- Logger.info("Rollback succesfully")
+ Logger.info("Rollback successfully")
else
{:ok, _, _} =
Ecto.Migrator.with_repo(Pleroma.Repo, &Ecto.Migrator.run(&1, path, :down, opts))
diff --git a/lib/mix/tasks/pleroma/emoji.ex b/lib/mix/tasks/pleroma/emoji.ex
index 537f0715e..8b9c921c8 100644
--- a/lib/mix/tasks/pleroma/emoji.ex
+++ b/lib/mix/tasks/pleroma/emoji.ex
@@ -111,7 +111,7 @@ def run(["get-packs" | args]) do
{:ok, _} =
:zip.unzip(binary_archive,
- cwd: pack_path,
+ cwd: String.to_charlist(pack_path),
file_list: files_to_unzip
)
diff --git a/lib/mix/tasks/pleroma/instance.ex b/lib/mix/tasks/pleroma/instance.ex
index 5c93f19ff..0dc30549c 100644
--- a/lib/mix/tasks/pleroma/instance.ex
+++ b/lib/mix/tasks/pleroma/instance.ex
@@ -266,12 +266,20 @@ def run(["gen" | rest]) do
config_dir = Path.dirname(config_path)
psql_dir = Path.dirname(psql_path)
+ # Note: Distros requiring group read (0o750) on those directories should
+ # pre-create the directories.
[config_dir, psql_dir, static_dir, uploads_dir]
|> Enum.reject(&File.exists?/1)
- |> Enum.map(&File.mkdir_p!/1)
+ |> Enum.each(fn dir ->
+ File.mkdir_p!(dir)
+ File.chmod!(dir, 0o700)
+ end)
shell_info("Writing config to #{config_path}.")
+ # Sadly no fchmod(2) equivalent in Elixir…
+ File.touch!(config_path)
+ File.chmod!(config_path, 0o640)
File.write(config_path, result_config)
shell_info("Writing the postgres script to #{psql_path}.")
File.write(psql_path, result_psql)
@@ -284,14 +292,13 @@ def run(["gen" | rest]) do
if db_configurable? do
shell_info(
- " Please transfer your config to the database after running database migrations. Refer to \"Transfering the config to/from the database\" section of the docs for more information."
+ " Please transfer your config to the database after running database migrations. Refer to \"Transferring the config to/from the database\" section of the docs for more information."
)
end
else
shell_error(
"The task would have overwritten the following files:\n" <>
- (Enum.map(will_overwrite, &"- #{&1}\n") |> Enum.join("")) <>
- "Rerun with `--force` to overwrite them."
+ Enum.map_join(will_overwrite, &"- #{&1}\n") <> "Rerun with `--force` to overwrite them."
)
end
end
@@ -345,6 +352,4 @@ defp upload_filters(filters) when is_map(filters) do
enabled_filters
end
-
- defp upload_filters(_), do: []
end
diff --git a/lib/mix/tasks/pleroma/openapi_spec.ex b/lib/mix/tasks/pleroma/openapi_spec.ex
index 884f931f8..1ea468476 100644
--- a/lib/mix/tasks/pleroma/openapi_spec.ex
+++ b/lib/mix/tasks/pleroma/openapi_spec.ex
@@ -6,7 +6,70 @@ defmodule Mix.Tasks.Pleroma.OpenapiSpec do
def run([path]) do
# Load Pleroma application to get version info
Application.load(:pleroma)
- spec = Pleroma.Web.ApiSpec.spec(server_specific: false) |> Jason.encode!()
- File.write(path, spec)
+
+ spec_json = Pleroma.Web.ApiSpec.spec(server_specific: false) |> Jason.encode!()
+ # to get rid of the structs
+ spec_regened = spec_json |> Jason.decode!()
+
+ check_specs!(spec_regened)
+
+ File.write(path, spec_json)
+ end
+
+ defp check_specs!(spec) do
+ with :ok <- check_specs(spec) do
+ :ok
+ else
+ {_, errors} ->
+ IO.puts(IO.ANSI.format([:red, :bright, "Spec check failed, errors:"]))
+ Enum.map(errors, &IO.puts/1)
+
+ raise "Spec check failed"
+ end
+ end
+
+ def check_specs(spec) do
+ errors =
+ spec["paths"]
+ |> Enum.flat_map(fn {path, %{} = endpoints} ->
+ Enum.map(
+ endpoints,
+ fn {method, endpoint} ->
+ with :ok <- check_endpoint(spec, endpoint) do
+ :ok
+ else
+ error ->
+ "#{endpoint["operationId"]} (#{method} #{path}): #{error}"
+ end
+ end
+ )
+ |> Enum.reject(fn res -> res == :ok end)
+ end)
+
+ if errors == [] do
+ :ok
+ else
+ {:error, errors}
+ end
+ end
+
+ defp check_endpoint(spec, endpoint) do
+ valid_tags = available_tags(spec)
+
+ with {_, [_ | _] = tags} <- {:tags, endpoint["tags"]},
+ {_, []} <- {:unavailable, Enum.reject(tags, &(&1 in valid_tags))} do
+ :ok
+ else
+ {:tags, _} ->
+ "No tags specified"
+
+ {:unavailable, tags} ->
+ "Tags #{inspect(tags)} not available. Please add it in \"x-tagGroups\" in Pleroma.Web.ApiSpec"
+ end
+ end
+
+ defp available_tags(spec) do
+ spec["x-tagGroups"]
+ |> Enum.flat_map(fn %{"tags" => tags} -> tags end)
end
end
diff --git a/lib/mix/tasks/pleroma/search/meilisearch.ex b/lib/mix/tasks/pleroma/search/meilisearch.ex
new file mode 100644
index 000000000..8379a0c25
--- /dev/null
+++ b/lib/mix/tasks/pleroma/search/meilisearch.ex
@@ -0,0 +1,145 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2021 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Mix.Tasks.Pleroma.Search.Meilisearch do
+ require Pleroma.Constants
+
+ import Mix.Pleroma
+ import Ecto.Query
+
+ import Pleroma.Search.Meilisearch,
+ only: [meili_post: 2, meili_put: 2, meili_get: 1, meili_delete: 1]
+
+ def run(["index"]) do
+ start_pleroma()
+ Pleroma.HTML.compile_scrubbers()
+
+ meili_version =
+ (
+ {:ok, result} = meili_get("/version")
+
+ result["pkgVersion"]
+ )
+
+ # The ranking rule syntax was changed but nothing about that is mentioned in the changelog
+ if not Version.match?(meili_version, ">= 0.25.0") do
+ raise "Meilisearch <0.24.0 not supported"
+ end
+
+ {:ok, _} =
+ meili_post(
+ "/indexes/objects/settings/ranking-rules",
+ [
+ "published:desc",
+ "words",
+ "exactness",
+ "proximity",
+ "typo",
+ "attribute",
+ "sort"
+ ]
+ )
+
+ {:ok, _} =
+ meili_post(
+ "/indexes/objects/settings/searchable-attributes",
+ [
+ "content"
+ ]
+ )
+
+ IO.puts("Created indices. Starting to insert posts.")
+
+ chunk_size = Pleroma.Config.get([Pleroma.Search.Meilisearch, :initial_indexing_chunk_size])
+
+ Pleroma.Repo.transaction(
+ fn ->
+ query =
+ from(Pleroma.Object,
+ # Only index public and unlisted posts which are notes and have some text
+ where:
+ fragment("data->>'type' = 'Note'") and
+ (fragment("data->'to' \\? ?", ^Pleroma.Constants.as_public()) or
+ fragment("data->'cc' \\? ?", ^Pleroma.Constants.as_public())),
+ order_by: [desc: fragment("data->'published'")]
+ )
+
+ count = query |> Pleroma.Repo.aggregate(:count, :data)
+ IO.puts("Entries to index: #{count}")
+
+ Pleroma.Repo.stream(
+ query,
+ timeout: :infinity
+ )
+ |> Stream.map(&Pleroma.Search.Meilisearch.object_to_search_data/1)
+ |> Stream.filter(fn o -> not is_nil(o) end)
+ |> Stream.chunk_every(chunk_size)
+ |> Stream.transform(0, fn objects, acc ->
+ new_acc = acc + Enum.count(objects)
+
+ # Reset to the beginning of the line and rewrite it
+ IO.write("\r")
+ IO.write("Indexed #{new_acc} entries")
+
+ {[objects], new_acc}
+ end)
+ |> Stream.each(fn objects ->
+ result =
+ meili_put(
+ "/indexes/objects/documents",
+ objects
+ )
+
+ with {:ok, res} <- result do
+ if not Map.has_key?(res, "uid") do
+ IO.puts("\nFailed to index: #{inspect(result)}")
+ end
+ else
+ e -> IO.puts("\nFailed to index due to network error: #{inspect(e)}")
+ end
+ end)
+ |> Stream.run()
+ end,
+ timeout: :infinity
+ )
+
+ IO.write("\n")
+ end
+
+ def run(["clear"]) do
+ start_pleroma()
+
+ meili_delete("/indexes/objects/documents")
+ end
+
+ def run(["show-keys", master_key]) do
+ start_pleroma()
+
+ endpoint = Pleroma.Config.get([Pleroma.Search.Meilisearch, :url])
+
+ {:ok, result} =
+ Pleroma.HTTP.get(
+ Path.join(endpoint, "/keys"),
+ [{"Authorization", "Bearer #{master_key}"}]
+ )
+
+ decoded = Jason.decode!(result.body)
+
+ if decoded["results"] do
+ Enum.each(decoded["results"], fn %{"description" => desc, "key" => key} ->
+ IO.puts("#{desc}: #{key}")
+ end)
+ else
+ IO.puts("Error fetching the keys, check the master key is correct: #{inspect(decoded)}")
+ end
+ end
+
+ def run(["stats"]) do
+ start_pleroma()
+
+ {:ok, result} = meili_get("/indexes/objects/stats")
+ IO.puts("Number of entries: #{result["numberOfDocuments"]}")
+ IO.puts("Indexing? #{result["isIndexing"]}")
+ end
+end
diff --git a/lib/mix/tasks/pleroma/user.ex b/lib/mix/tasks/pleroma/user.ex
index 96d4eb90b..929fa1717 100644
--- a/lib/mix/tasks/pleroma/user.ex
+++ b/lib/mix/tasks/pleroma/user.ex
@@ -112,9 +112,10 @@ def run(["reset_password", nickname]) do
{:ok, token} <- Pleroma.PasswordResetToken.create_token(user) do
shell_info("Generated password reset token for #{user.nickname}")
- IO.puts("URL: #{Pleroma.Web.Router.Helpers.reset_password_url(Pleroma.Web.Endpoint,
- :reset,
- token.token)}")
+ url =
+ Pleroma.Web.Router.Helpers.reset_password_url(Pleroma.Web.Endpoint, :reset, token.token)
+
+ IO.puts("URL: #{url}")
else
_ ->
shell_error("No local user #{nickname}")
@@ -421,6 +422,38 @@ def run(["list"]) do
|> Stream.run()
end
+ def run(["fix_follow_state", local_user, remote_user]) do
+ start_pleroma()
+
+ with {:local, %User{} = local} <- {:local, User.get_by_nickname(local_user)},
+ {:remote, %User{} = remote} <- {:remote, User.get_by_nickname(remote_user)},
+ {:follow_data, %{data: %{"state" => request_state}}} <-
+ {:follow_data, Pleroma.Web.ActivityPub.Utils.fetch_latest_follow(local, remote)} do
+ calculated_state = User.following?(local, remote)
+
+ shell_info(
+ "Request state is #{request_state}, vs calculated state of following=#{calculated_state}"
+ )
+
+ if calculated_state == false && request_state == "accept" do
+ shell_info("Discrepancy found, fixing")
+ Pleroma.Web.CommonAPI.reject_follow_request(local, remote)
+ shell_info("Relationship fixed")
+ else
+ shell_info("No discrepancy found")
+ end
+ else
+ {:local, _} ->
+ shell_error("No local user #{local_user}")
+
+ {:remote, _} ->
+ shell_error("No remote user #{remote_user}")
+
+ {:follow_data, _} ->
+ shell_error("No follow data for #{local_user} and #{remote_user}")
+ end
+ end
+
defp set_moderator(user, value) do
{:ok, user} =
user
diff --git a/lib/phoenix/transports/web_socket/raw.ex b/lib/phoenix/transports/web_socket/raw.ex
deleted file mode 100644
index 8cf9c32a2..000000000
--- a/lib/phoenix/transports/web_socket/raw.ex
+++ /dev/null
@@ -1,94 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2022 Pleroma Authors
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Phoenix.Transports.WebSocket.Raw do
- import Plug.Conn,
- only: [
- fetch_query_params: 1,
- send_resp: 3
- ]
-
- alias Phoenix.Socket.Transport
-
- def default_config do
- [
- timeout: 60_000,
- transport_log: false,
- cowboy: Phoenix.Endpoint.CowboyWebSocket
- ]
- end
-
- def init(%Plug.Conn{method: "GET"} = conn, {endpoint, handler, transport}) do
- {_, opts} = handler.__transport__(transport)
-
- conn =
- conn
- |> fetch_query_params
- |> Transport.transport_log(opts[:transport_log])
- |> Transport.force_ssl(handler, endpoint, opts)
- |> Transport.check_origin(handler, endpoint, opts)
-
- case conn do
- %{halted: false} = conn ->
- case handler.connect(%{
- endpoint: endpoint,
- transport: transport,
- options: [serializer: nil],
- params: conn.params
- }) do
- {:ok, socket} ->
- {:ok, conn, {__MODULE__, {socket, opts}}}
-
- :error ->
- send_resp(conn, :forbidden, "")
- {:error, conn}
- end
-
- _ ->
- {:error, conn}
- end
- end
-
- def init(conn, _) do
- send_resp(conn, :bad_request, "")
- {:error, conn}
- end
-
- def ws_init({socket, config}) do
- Process.flag(:trap_exit, true)
- {:ok, %{socket: socket}, config[:timeout]}
- end
-
- def ws_handle(op, data, state) do
- state.socket.handler
- |> apply(:handle, [op, data, state])
- |> case do
- {op, data} ->
- {:reply, {op, data}, state}
-
- {op, data, state} ->
- {:reply, {op, data}, state}
-
- %{} = state ->
- {:ok, state}
-
- _ ->
- {:ok, state}
- end
- end
-
- def ws_info({_, _} = tuple, state) do
- {:reply, tuple, state}
- end
-
- def ws_info(_tuple, state), do: {:ok, state}
-
- def ws_close(state) do
- ws_handle(:closed, :normal, state)
- end
-
- def ws_terminate(reason, state) do
- ws_handle(:closed, reason, state)
- end
-end
diff --git a/lib/pleroma/activity.ex b/lib/pleroma/activity.ex
index 12c1a3b2e..8a512dc57 100644
--- a/lib/pleroma/activity.ex
+++ b/lib/pleroma/activity.ex
@@ -53,7 +53,7 @@ defmodule Pleroma.Activity do
#
# ```
# |> join(:inner, [activity], o in Object,
- # on: fragment("(?->>'id') = COALESCE((?)->'object'->> 'id', (?)->>'object')",
+ # on: fragment("(?->>'id') = associated_object_id((?))",
# o.data, activity.data, activity.data))
# |> preload([activity, object], [object: object])
# ```
@@ -69,9 +69,8 @@ def with_joined_object(query, join_type \\ :inner) do
join(query, join_type, [activity], o in Object,
on:
fragment(
- "(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
+ "(?->>'id') = associated_object_id(?)",
o.data,
- activity.data,
activity.data
),
as: :object
@@ -362,12 +361,14 @@ def following_requests_for_actor(%User{ap_id: ap_id}) do
end
def restrict_deactivated_users(query) do
- deactivated_users_query = from(u in User.Query.build(%{deactivated: true}), select: u.ap_id)
-
- from(activity in query, where: activity.actor not in subquery(deactivated_users_query))
+ query
+ |> join(:inner, [activity], user in User,
+ as: :user,
+ on: activity.actor == user.ap_id and user.is_active == true
+ )
end
- defdelegate search(user, query, options \\ []), to: Pleroma.Activity.Search
+ defdelegate search(user, query, options \\ []), to: Pleroma.Search.DatabaseSearch
def direct_conversation_id(activity, for_user) do
alias Pleroma.Conversation.Participation
diff --git a/lib/pleroma/activity/html.ex b/lib/pleroma/activity/html.ex
index 071a89c8d..ba284b4d5 100644
--- a/lib/pleroma/activity/html.ex
+++ b/lib/pleroma/activity/html.ex
@@ -8,6 +8,40 @@ defmodule Pleroma.Activity.HTML do
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
+ # We store a list of cache keys related to an activity in a
+ # separate cache, scrubber_management_cache. It has the same
+ # size as scrubber_cache (see application.ex). Every time we add
+ # a cache to scrubber_cache, we update scrubber_management_cache.
+ #
+ # The most recent write of a certain key in the management cache
+ # is the same as the most recent write of any record related to that
+ # key in the main cache.
+ # Assuming LRW ( https://hexdocs.pm/cachex/Cachex.Policy.LRW.html ),
+ # this means when the management cache is evicted by cachex, all
+ # related records in the main cache will also have been evicted.
+
+ defp get_cache_keys_for(activity_id) do
+ with {:ok, list} when is_list(list) <- @cachex.get(:scrubber_management_cache, activity_id) do
+ list
+ else
+ _ -> []
+ end
+ end
+
+ def add_cache_key_for(activity_id, additional_key) do
+ current = get_cache_keys_for(activity_id)
+
+ unless additional_key in current do
+ @cachex.put(:scrubber_management_cache, activity_id, [additional_key | current])
+ end
+ end
+
+ def invalidate_cache_for(activity_id) do
+ keys = get_cache_keys_for(activity_id)
+ Enum.map(keys, &@cachex.del(:scrubber_cache, &1))
+ @cachex.del(:scrubber_management_cache, activity_id)
+ end
+
def get_cached_scrubbed_html_for_activity(
content,
scrubbers,
@@ -19,6 +53,8 @@ def get_cached_scrubbed_html_for_activity(
@cachex.fetch!(:scrubber_cache, key, fn _key ->
object = Object.normalize(activity, fetch: false)
+
+ add_cache_key_for(activity.id, key)
HTML.ensure_scrubbed_html(content, scrubbers, object.data["fake"] || false, callback)
end)
end
diff --git a/lib/pleroma/activity/ir/topics.ex b/lib/pleroma/activity/ir/topics.ex
index 56c52e9d1..8249cbe27 100644
--- a/lib/pleroma/activity/ir/topics.ex
+++ b/lib/pleroma/activity/ir/topics.ex
@@ -13,6 +13,14 @@ def get_activity_topics(activity) do
|> List.flatten()
end
+ defp generate_topics(%{data: %{"type" => "ChatMessage"}}, %{data: %{"type" => "Delete"}}) do
+ ["user", "user:pleroma_chat"]
+ end
+
+ defp generate_topics(%{data: %{"type" => "ChatMessage"}}, %{data: %{"type" => "Create"}}) do
+ []
+ end
+
defp generate_topics(%{data: %{"type" => "Answer"}}, _) do
[]
end
@@ -21,7 +29,7 @@ defp generate_topics(object, activity) do
["user", "list"] ++ visibility_tags(object, activity)
end
- defp visibility_tags(object, activity) do
+ defp visibility_tags(object, %{data: %{"type" => type}} = activity) when type != "Announce" do
case Visibility.get_visibility(activity) do
"public" ->
if activity.local do
@@ -31,6 +39,10 @@ defp visibility_tags(object, activity) do
end
|> item_creation_tags(object, activity)
+ "local" ->
+ ["public:local"]
+ |> item_creation_tags(object, activity)
+
"direct" ->
["direct"]
@@ -39,6 +51,10 @@ defp visibility_tags(object, activity) do
end
end
+ defp visibility_tags(_object, _activity) do
+ []
+ end
+
defp item_creation_tags(tags, object, %{data: %{"type" => "Create"}} = activity) do
tags ++
remote_topics(activity) ++ hashtags_to_topics(object) ++ attachment_topics(object, activity)
@@ -63,7 +79,18 @@ defp remote_topics(_), do: []
defp attachment_topics(%{data: %{"attachment" => []}}, _act), do: []
- defp attachment_topics(_object, %{local: true}), do: ["public:media", "public:local:media"]
+ defp attachment_topics(_object, %{local: true} = activity) do
+ case Visibility.get_visibility(activity) do
+ "public" ->
+ ["public:media", "public:local:media"]
+
+ "local" ->
+ ["public:local:media"]
+
+ _ ->
+ []
+ end
+ end
defp attachment_topics(_object, %{actor: actor}) when is_binary(actor),
do: ["public:media", "public:remote:media:" <> URI.parse(actor).host]
diff --git a/lib/pleroma/activity/queries.ex b/lib/pleroma/activity/queries.ex
index a898b2ea7..d770b9ff3 100644
--- a/lib/pleroma/activity/queries.ex
+++ b/lib/pleroma/activity/queries.ex
@@ -9,7 +9,7 @@ defmodule Pleroma.Activity.Queries do
import Ecto.Query, only: [from: 2, where: 3]
- @type query :: Ecto.Queryable.t() | Activity.t()
+ @type query :: Ecto.Queryable.t() | Pleroma.Activity.t()
alias Pleroma.Activity
alias Pleroma.User
@@ -52,8 +52,7 @@ def by_object_id(query, object_ids) when is_list(object_ids) do
activity in query,
where:
fragment(
- "coalesce((?)->'object'->>'id', (?)->>'object') = ANY(?)",
- activity.data,
+ "associated_object_id((?)) = ANY(?)",
activity.data,
^object_ids
)
@@ -64,8 +63,7 @@ def by_object_id(query, object_id) when is_binary(object_id) do
from(activity in query,
where:
fragment(
- "coalesce((?)->'object'->>'id', (?)->>'object') = ?",
- activity.data,
+ "associated_object_id((?)) = ?",
activity.data,
^object_id
)
diff --git a/lib/pleroma/announcement.ex b/lib/pleroma/announcement.ex
index d97c5e728..5a3c710e8 100644
--- a/lib/pleroma/announcement.ex
+++ b/lib/pleroma/announcement.ex
@@ -23,19 +23,21 @@ defmodule Pleroma.Announcement do
timestamps(type: :utc_datetime)
end
- def change(struct, params \\ %{}) do
- struct
- |> cast(validate_params(struct, params), [:data, :starts_at, :ends_at, :rendered])
+ @doc "Generates changeset for %Pleroma.Announcement{}"
+ @spec changeset(%__MODULE__{}, map()) :: %Ecto.Changeset{}
+ def changeset(announcement \\ %__MODULE__{}, params \\ %{data: %{}}) do
+ announcement
+ |> cast(validate_params(announcement, params), [:data, :starts_at, :ends_at, :rendered])
|> validate_required([:data])
end
- defp validate_params(struct, params) do
+ defp validate_params(announcement, params) do
base_data =
%{
"content" => "",
"all_day" => false
}
- |> Map.merge((struct && struct.data) || %{})
+ |> Map.merge((announcement && announcement.data) || %{})
merged_data =
Map.merge(base_data, params.data)
@@ -61,13 +63,13 @@ def add_rendered_properties(params) do
end
def add(params) do
- changeset = change(%__MODULE__{}, params)
+ changeset = changeset(%__MODULE__{}, params)
Repo.insert(changeset)
end
def update(announcement, params) do
- changeset = change(announcement, params)
+ changeset = changeset(announcement, params)
Repo.update(changeset)
end
diff --git a/lib/pleroma/application.ex b/lib/pleroma/application.ex
index d808bc732..d266d1836 100644
--- a/lib/pleroma/application.ex
+++ b/lib/pleroma/application.ex
@@ -14,7 +14,6 @@ defmodule Pleroma.Application do
@name Mix.Project.config()[:name]
@version Mix.Project.config()[:version]
@repository Mix.Project.config()[:source_url]
- @mix_env Mix.env()
def name, do: @name
def version, do: @version
@@ -54,7 +53,6 @@ def start(_type, _args) do
Config.DeprecationWarnings.warn()
Pleroma.Web.Plugs.HTTPSecurityPlug.warn_if_disabled()
Pleroma.ApplicationRequirements.verify!()
- setup_instrumenters()
load_custom_modules()
Pleroma.Docs.JSON.compile()
limiters_setup()
@@ -91,13 +89,15 @@ def start(_type, _args) do
# Define workers and child supervisors to be supervised
children =
[
+ Pleroma.PromEx,
Pleroma.Repo,
Config.TransferTask,
Pleroma.Emoji,
- Pleroma.Web.Plugs.RateLimiter.Supervisor
+ Pleroma.Web.Plugs.RateLimiter.Supervisor,
+ {Task.Supervisor, name: Pleroma.TaskSupervisor}
] ++
cachex_children() ++
- http_children(adapter, @mix_env) ++
+ http_children(adapter) ++
[
Pleroma.Stats,
Pleroma.JobQueueMonitor,
@@ -105,36 +105,22 @@ def start(_type, _args) do
{Oban, Config.get(Oban)},
Pleroma.Web.Endpoint
] ++
- task_children(@mix_env) ++
- dont_run_in_test(@mix_env) ++
+ task_children() ++
+ streamer_registry() ++
+ background_migrators() ++
shout_child(shout_enabled?()) ++
- [Pleroma.Gopher.Server]
+ [Pleroma.Gopher.Server] ++
+ [Pleroma.Search.Healthcheck]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
- opts = [strategy: :one_for_one, name: Pleroma.Supervisor]
- result = Supervisor.start_link(children, opts)
+ # If we have a lot of caches, default max_restarts can cause test
+ # resets to fail.
+ # Go for the default 3 unless we're in test
+ max_restarts = Application.get_env(:pleroma, __MODULE__)[:max_restarts]
- set_postgres_server_version()
-
- result
- end
-
- defp set_postgres_server_version do
- version =
- with %{rows: [[version]]} <- Ecto.Adapters.SQL.query!(Pleroma.Repo, "show server_version"),
- {num, _} <- Float.parse(version) do
- num
- else
- e ->
- Logger.warn(
- "Could not get the postgres version: #{inspect(e)}.\nSetting the default value of 9.6"
- )
-
- 9.6
- end
-
- :persistent_term.put({Pleroma.Repo, :postgres_version}, version)
+ opts = [strategy: :one_for_one, name: Pleroma.Supervisor, max_restarts: max_restarts]
+ Supervisor.start_link(children, opts)
end
def load_custom_modules do
@@ -148,7 +134,7 @@ def load_custom_modules do
raise "Invalid custom modules"
{:ok, modules, _warnings} ->
- if @mix_env != :test do
+ if Application.get_env(:pleroma, __MODULE__)[:load_custom_modules] do
Enum.each(modules, fn mod ->
Logger.info("Custom module loaded: #{inspect(mod)}")
end)
@@ -159,29 +145,6 @@ def load_custom_modules do
end
end
- defp setup_instrumenters do
- require Prometheus.Registry
-
- if Application.get_env(:prometheus, Pleroma.Repo.Instrumenter) do
- :ok =
- :telemetry.attach(
- "prometheus-ecto",
- [:pleroma, :repo, :query],
- &Pleroma.Repo.Instrumenter.handle_event/4,
- %{}
- )
-
- Pleroma.Repo.Instrumenter.setup()
- end
-
- Pleroma.Web.Endpoint.MetricsExporter.setup()
- Pleroma.Web.Endpoint.PipelineInstrumenter.setup()
-
- # Note: disabled until prometheus-phx is integrated into prometheus-phoenix:
- # Pleroma.Web.Endpoint.Instrumenter.setup()
- PrometheusPhx.setup()
- end
-
defp cachex_children do
[
build_cachex("used_captcha", ttl_interval: seconds_valid_interval()),
@@ -189,15 +152,19 @@ defp cachex_children do
build_cachex("object", default_ttl: 25_000, ttl_interval: 1000, limit: 2500),
build_cachex("rich_media", default_ttl: :timer.minutes(120), limit: 5000),
build_cachex("scrubber", limit: 2500),
+ build_cachex("scrubber_management", limit: 2500),
build_cachex("idempotency", expiration: idempotency_expiration(), limit: 2500),
build_cachex("web_resp", limit: 2500),
build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10),
build_cachex("failed_proxy_url", limit: 2500),
+ build_cachex("failed_media_helper_url", default_ttl: :timer.minutes(15), limit: 2_500),
build_cachex("banned_urls", default_ttl: :timer.hours(24 * 30), limit: 5_000),
build_cachex("chat_message_id_idempotency_key",
expiration: chat_message_id_idempotency_key_expiration(),
limit: 500_000
- )
+ ),
+ build_cachex("rel_me", limit: 2500),
+ build_cachex("host_meta", default_ttl: :timer.minutes(120), limit: 5000)
]
end
@@ -223,23 +190,30 @@ def build_cachex(type, opts),
defp shout_enabled?, do: Config.get([:shout, :enabled])
- defp dont_run_in_test(env) when env in [:test, :benchmark], do: []
-
- defp dont_run_in_test(_) do
- [
- {Registry,
- [
- name: Pleroma.Web.Streamer.registry(),
- keys: :duplicate,
- partitions: System.schedulers_online()
- ]}
- ] ++ background_migrators()
+ defp streamer_registry do
+ if Application.get_env(:pleroma, __MODULE__)[:streamer_registry] do
+ [
+ {Registry,
+ [
+ name: Pleroma.Web.Streamer.registry(),
+ keys: :duplicate,
+ partitions: System.schedulers_online()
+ ]}
+ ]
+ else
+ []
+ end
end
defp background_migrators do
- [
- Pleroma.Migrators.HashtagsTableMigrator
- ]
+ if Application.get_env(:pleroma, __MODULE__)[:background_migrators] do
+ [
+ Pleroma.Migrators.HashtagsTableMigrator,
+ Pleroma.Migrators.ContextObjectsDeletionMigrator
+ ]
+ else
+ []
+ end
end
defp shout_child(true) do
@@ -251,37 +225,43 @@ defp shout_child(true) do
defp shout_child(_), do: []
- defp task_children(:test) do
- [
+ defp task_children do
+ children = [
%{
id: :web_push_init,
start: {Task, :start_link, [&Pleroma.Web.Push.init/0]},
restart: :temporary
}
]
- end
- defp task_children(_) do
- [
- %{
- id: :web_push_init,
- start: {Task, :start_link, [&Pleroma.Web.Push.init/0]},
- restart: :temporary
- },
- %{
- id: :internal_fetch_init,
- start: {Task, :start_link, [&Pleroma.Web.ActivityPub.InternalFetchActor.init/0]},
- restart: :temporary
- }
- ]
+ if Application.get_env(:pleroma, __MODULE__)[:internal_fetch] do
+ children ++
+ [
+ %{
+ id: :internal_fetch_init,
+ start: {Task, :start_link, [&Pleroma.Web.ActivityPub.InternalFetchActor.init/0]},
+ restart: :temporary
+ }
+ ]
+ else
+ children
+ end
end
# start hackney and gun pools in tests
- defp http_children(_, :test) do
- http_children(Tesla.Adapter.Hackney, nil) ++ http_children(Tesla.Adapter.Gun, nil)
+ defp http_children(adapter) do
+ if Application.get_env(:pleroma, __MODULE__)[:test_http_pools] do
+ http_children_hackney() ++ http_children_gun()
+ else
+ cond do
+ match?(Tesla.Adapter.Hackney, adapter) -> http_children_hackney()
+ match?(Tesla.Adapter.Gun, adapter) -> http_children_gun()
+ true -> []
+ end
+ end
end
- defp http_children(Tesla.Adapter.Hackney, _) do
+ defp http_children_hackney do
pools = [:federation, :media]
pools =
@@ -297,18 +277,20 @@ defp http_children(Tesla.Adapter.Hackney, _) do
end
end
- defp http_children(Tesla.Adapter.Gun, _) do
+ defp http_children_gun do
Pleroma.Gun.ConnectionPool.children() ++
[{Task, &Pleroma.HTTP.AdapterHelper.Gun.limiter_setup/0}]
end
- defp http_children(_, _), do: []
-
@spec limiters_setup() :: :ok
def limiters_setup do
config = Config.get(ConcurrentLimiter, [])
- [Pleroma.Web.RichMedia.Helpers, Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy]
+ [
+ Pleroma.Web.RichMedia.Helpers,
+ Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy,
+ Pleroma.Search
+ ]
|> Enum.each(fn module ->
mod_config = Keyword.get(config, module, [])
diff --git a/lib/pleroma/application_requirements.ex b/lib/pleroma/application_requirements.ex
index 44b1c1705..8c0df64fc 100644
--- a/lib/pleroma/application_requirements.ex
+++ b/lib/pleroma/application_requirements.ex
@@ -7,7 +7,10 @@ defmodule Pleroma.ApplicationRequirements do
The module represents the collection of validations to runs before start server.
"""
- defmodule VerifyError, do: defexception([:message])
+ defmodule VerifyError do
+ defexception([:message])
+ @type t :: %__MODULE__{}
+ end
alias Pleroma.Config
alias Pleroma.Helpers.MediaHelper
@@ -25,6 +28,7 @@ def verify! do
|> check_welcome_message_config!()
|> check_rum!()
|> check_repo_pool_size!()
+ |> check_mrfs()
|> handle_result()
end
@@ -34,7 +38,7 @@ defp handle_result({:error, message}), do: raise(VerifyError, message: message)
defp check_welcome_message_config!(:ok) do
if Pleroma.Config.get([:welcome, :email, :enabled], false) and
not Pleroma.Emails.Mailer.enabled?() do
- Logger.warn("""
+ Logger.warning("""
To send welcome emails, you need to enable the mailer.
Welcome emails will NOT be sent with the current config.
@@ -53,7 +57,7 @@ defp check_welcome_message_config!(result), do: result
def check_confirmation_accounts!(:ok) do
if Pleroma.Config.get([:instance, :account_activation_required]) &&
not Pleroma.Emails.Mailer.enabled?() do
- Logger.warn("""
+ Logger.warning("""
Account activation is required, but the mailer is disabled.
Users will NOT be able to confirm their accounts with this config.
Either disable account activation or enable the mailer.
@@ -168,8 +172,6 @@ defp check_system_commands!(:ok) do
check_filter(Pleroma.Upload.Filter.Exiftool.ReadDescription, "exiftool"),
check_filter(Pleroma.Upload.Filter.Mogrify, "mogrify"),
check_filter(Pleroma.Upload.Filter.Mogrifun, "mogrify"),
- check_filter(Pleroma.Upload.Filter.AnalyzeMetadata, "mogrify"),
- check_filter(Pleroma.Upload.Filter.AnalyzeMetadata, "convert"),
check_filter(Pleroma.Upload.Filter.AnalyzeMetadata, "ffprobe")
]
@@ -195,8 +197,6 @@ defp check_system_commands!(:ok) do
end
end
- defp check_system_commands!(result), do: result
-
defp check_repo_pool_size!(:ok) do
if Pleroma.Config.get([Pleroma.Repo, :pool_size], 10) != 10 and
not Pleroma.Config.get([:dangerzone, :override_repo_pool_size], false) do
@@ -235,4 +235,25 @@ defp check_filter(filter, command_required) do
true
end
end
+
+ defp check_mrfs(:ok) do
+ mrfs = Config.get!([:mrf, :policies])
+
+ missing_mrfs =
+ Enum.reduce(mrfs, [], fn x, acc ->
+ if Code.ensure_compiled(x) do
+ acc
+ else
+ acc ++ [x]
+ end
+ end)
+
+ if Enum.empty?(missing_mrfs) do
+ :ok
+ else
+ {:error, "The following MRF modules are configured but missing: #{inspect(missing_mrfs)}"}
+ end
+ end
+
+ defp check_mrfs(result), do: result
end
diff --git a/lib/pleroma/bbs/authenticator.ex b/lib/pleroma/bbs/authenticator.ex
deleted file mode 100644
index 0f7543ff5..000000000
--- a/lib/pleroma/bbs/authenticator.ex
+++ /dev/null
@@ -1,20 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2022 Pleroma Authors
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.BBS.Authenticator do
- use Sshd.PasswordAuthenticator
- alias Pleroma.User
- alias Pleroma.Web.Plugs.AuthenticationPlug
-
- def authenticate(username, password) do
- username = to_string(username)
- password = to_string(password)
-
- with %User{} = user <- User.get_by_nickname(username) do
- AuthenticationPlug.checkpw(password, user.password_hash)
- else
- _e -> false
- end
- end
-end
diff --git a/lib/pleroma/bbs/handler.ex b/lib/pleroma/bbs/handler.ex
deleted file mode 100644
index a3b623bdf..000000000
--- a/lib/pleroma/bbs/handler.ex
+++ /dev/null
@@ -1,149 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2022 Pleroma Authors
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.BBS.Handler do
- use Sshd.ShellHandler
- alias Pleroma.Activity
- alias Pleroma.HTML
- alias Pleroma.Web.ActivityPub.ActivityPub
- alias Pleroma.Web.CommonAPI
-
- def on_shell(username, _pubkey, _ip, _port) do
- :ok = IO.puts("Welcome to #{Pleroma.Config.get([:instance, :name])}!")
- user = Pleroma.User.get_cached_by_nickname(to_string(username))
- Logger.debug("#{inspect(user)}")
- loop(run_state(user: user))
- end
-
- def on_connect(username, ip, port, method) do
- Logger.debug(fn ->
- """
- Incoming SSH shell #{inspect(self())} requested for #{username} from #{inspect(ip)}:#{inspect(port)} using #{inspect(method)}
- """
- end)
- end
-
- def on_disconnect(username, ip, port) do
- Logger.debug(fn ->
- "Disconnecting SSH shell for #{username} from #{inspect(ip)}:#{inspect(port)}"
- end)
- end
-
- defp loop(state) do
- self_pid = self()
- counter = state.counter
- prefix = state.prefix
- user = state.user
-
- input = spawn(fn -> io_get(self_pid, prefix, counter, user.nickname) end)
- wait_input(state, input)
- end
-
- def puts_activity(activity) do
- status = Pleroma.Web.MastodonAPI.StatusView.render("show.json", %{activity: activity})
- IO.puts("-- #{status.id} by #{status.account.display_name} (#{status.account.acct})")
- IO.puts(HTML.strip_tags(status.content))
- IO.puts("")
- end
-
- def handle_command(state, "help") do
- IO.puts("Available commands:")
- IO.puts("help - This help")
- IO.puts("home - Show the home timeline")
- IO.puts("p - Post the given text")
- IO.puts("r - Reply to the post with the given id")
- IO.puts("quit - Quit")
-
- state
- end
-
- def handle_command(%{user: user} = state, "r " <> text) do
- text = String.trim(text)
- [activity_id, rest] = String.split(text, " ", parts: 2)
-
- with %Activity{} <- Activity.get_by_id(activity_id),
- {:ok, _activity} <-
- CommonAPI.post(user, %{status: rest, in_reply_to_status_id: activity_id}) do
- IO.puts("Replied!")
- else
- _e -> IO.puts("Could not reply...")
- end
-
- state
- end
-
- def handle_command(%{user: user} = state, "p " <> text) do
- text = String.trim(text)
-
- with {:ok, _activity} <- CommonAPI.post(user, %{status: text}) do
- IO.puts("Posted!")
- else
- _e -> IO.puts("Could not post...")
- end
-
- state
- end
-
- def handle_command(state, "home") do
- user = state.user
-
- params =
- %{}
- |> Map.put(:type, ["Create"])
- |> Map.put(:blocking_user, user)
- |> Map.put(:muting_user, user)
- |> Map.put(:user, user)
-
- activities =
- [user.ap_id | Pleroma.User.following(user)]
- |> ActivityPub.fetch_activities(params)
-
- Enum.each(activities, fn activity ->
- puts_activity(activity)
- end)
-
- state
- end
-
- def handle_command(state, command) do
- IO.puts("Unknown command '#{command}'")
- state
- end
-
- defp wait_input(state, input) do
- receive do
- {:input, ^input, "quit\n"} ->
- IO.puts("Exiting...")
-
- {:input, ^input, code} when is_binary(code) ->
- code = String.trim(code)
-
- state = handle_command(state, code)
-
- loop(%{state | counter: state.counter + 1})
-
- {:error, :interrupted} ->
- IO.puts("Caught Ctrl+C...")
- loop(%{state | counter: state.counter + 1})
-
- {:input, ^input, msg} ->
- :ok = Logger.warn("received unknown message: #{inspect(msg)}")
- loop(%{state | counter: state.counter + 1})
- end
- end
-
- defp run_state(opts) do
- %{prefix: "pleroma", counter: 1, user: opts[:user]}
- end
-
- defp io_get(pid, prefix, counter, username) do
- prompt = prompt(prefix, counter, username)
- send(pid, {:input, self(), IO.gets(:stdio, prompt)})
- end
-
- defp prompt(prefix, counter, username) do
- prompt = "#{username}@#{prefix}:#{counter}>"
- prompt <> " "
- end
-end
diff --git a/lib/pleroma/bookmark.ex b/lib/pleroma/bookmark.ex
index 187749e86..1a2a63b82 100644
--- a/lib/pleroma/bookmark.ex
+++ b/lib/pleroma/bookmark.ex
@@ -10,6 +10,7 @@ defmodule Pleroma.Bookmark do
alias Pleroma.Activity
alias Pleroma.Bookmark
+ alias Pleroma.BookmarkFolder
alias Pleroma.Repo
alias Pleroma.User
@@ -18,33 +19,46 @@ defmodule Pleroma.Bookmark do
schema "bookmarks" do
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
belongs_to(:activity, Activity, type: FlakeId.Ecto.CompatType)
+ belongs_to(:folder, BookmarkFolder, type: FlakeId.Ecto.CompatType)
timestamps()
end
- @spec create(FlakeId.Ecto.CompatType.t(), FlakeId.Ecto.CompatType.t()) ::
- {:ok, Bookmark.t()} | {:error, Changeset.t()}
- def create(user_id, activity_id) do
+ @spec create(Ecto.UUID.t(), Ecto.UUID.t()) ::
+ {:ok, Bookmark.t()} | {:error, Ecto.Changeset.t()}
+ def create(user_id, activity_id, folder_id \\ nil) do
attrs = %{
user_id: user_id,
- activity_id: activity_id
+ activity_id: activity_id,
+ folder_id: folder_id
}
%Bookmark{}
- |> cast(attrs, [:user_id, :activity_id])
+ |> cast(attrs, [:user_id, :activity_id, :folder_id])
|> validate_required([:user_id, :activity_id])
|> unique_constraint(:activity_id, name: :bookmarks_user_id_activity_id_index)
- |> Repo.insert()
+ |> Repo.insert(
+ on_conflict: [set: [folder_id: folder_id]],
+ conflict_target: [:user_id, :activity_id]
+ )
end
- @spec for_user_query(FlakeId.Ecto.CompatType.t()) :: Ecto.Query.t()
- def for_user_query(user_id) do
+ @spec for_user_query(Ecto.UUID.t()) :: Ecto.Query.t()
+ def for_user_query(user_id, folder_id \\ nil) do
Bookmark
|> where(user_id: ^user_id)
+ |> maybe_filter_by_folder(folder_id)
|> join(:inner, [b], activity in assoc(b, :activity))
|> preload([b, a], activity: a)
end
+ defp maybe_filter_by_folder(query, nil), do: query
+
+ defp maybe_filter_by_folder(query, folder_id) do
+ query
+ |> where(folder_id: ^folder_id)
+ end
+
def get(user_id, activity_id) do
Bookmark
|> where(user_id: ^user_id)
@@ -52,8 +66,8 @@ def get(user_id, activity_id) do
|> Repo.one()
end
- @spec destroy(FlakeId.Ecto.CompatType.t(), FlakeId.Ecto.CompatType.t()) ::
- {:ok, Bookmark.t()} | {:error, Changeset.t()}
+ @spec destroy(Ecto.UUID.t(), Ecto.UUID.t()) ::
+ {:ok, Bookmark.t()} | {:error, Ecto.Changeset.t()}
def destroy(user_id, activity_id) do
from(b in Bookmark,
where: b.user_id == ^user_id,
@@ -62,4 +76,11 @@ def destroy(user_id, activity_id) do
|> Repo.one()
|> Repo.delete()
end
+
+ def set_folder(bookmark, folder_id) do
+ bookmark
+ |> cast(%{folder_id: folder_id}, [:folder_id])
+ |> validate_required([:folder_id])
+ |> Repo.update()
+ end
end
diff --git a/lib/pleroma/bookmark_folder.ex b/lib/pleroma/bookmark_folder.ex
new file mode 100644
index 000000000..14d37e197
--- /dev/null
+++ b/lib/pleroma/bookmark_folder.ex
@@ -0,0 +1,115 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2024 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.BookmarkFolder do
+ use Ecto.Schema
+
+ import Ecto.Changeset
+ import Ecto.Query
+
+ alias Pleroma.BookmarkFolder
+ alias Pleroma.Emoji
+ alias Pleroma.Repo
+ alias Pleroma.User
+
+ @type t :: %__MODULE__{}
+ @primary_key {:id, FlakeId.Ecto.CompatType, autogenerate: true}
+
+ schema "bookmark_folders" do
+ field(:name, :string)
+ field(:emoji, :string)
+
+ belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
+
+ timestamps()
+ end
+
+ def get_by_id(id), do: Repo.get_by(BookmarkFolder, id: id)
+
+ def create(user_id, name, emoji \\ nil) do
+ %BookmarkFolder{}
+ |> cast(
+ %{
+ user_id: user_id,
+ name: name,
+ emoji: emoji
+ },
+ [:user_id, :name, :emoji]
+ )
+ |> validate_required([:user_id, :name])
+ |> fix_emoji()
+ |> validate_emoji()
+ |> unique_constraint([:user_id, :name])
+ |> Repo.insert()
+ end
+
+ def update(folder_id, name, emoji \\ nil) do
+ get_by_id(folder_id)
+ |> cast(
+ %{
+ name: name,
+ emoji: emoji
+ },
+ [:name, :emoji]
+ )
+ |> fix_emoji()
+ |> validate_emoji()
+ |> unique_constraint([:user_id, :name])
+ |> Repo.update()
+ end
+
+ defp fix_emoji(changeset) do
+ with {:emoji_field, emoji} when is_binary(emoji) <-
+ {:emoji_field, get_field(changeset, :emoji)},
+ {:fixed_emoji, emoji} <-
+ {:fixed_emoji,
+ emoji
+ |> Pleroma.Emoji.fully_qualify_emoji()
+ |> Pleroma.Emoji.maybe_quote()} do
+ put_change(changeset, :emoji, emoji)
+ else
+ {:emoji_field, _} -> changeset
+ end
+ end
+
+ defp validate_emoji(changeset) do
+ validate_change(changeset, :emoji, fn
+ :emoji, nil ->
+ []
+
+ :emoji, emoji ->
+ if Emoji.unicode?(emoji) or valid_local_custom_emoji?(emoji) do
+ []
+ else
+ [emoji: "Invalid emoji"]
+ end
+ end)
+ end
+
+ defp valid_local_custom_emoji?(emoji) do
+ with %{file: _path} <- Emoji.get(emoji) do
+ true
+ else
+ _ -> false
+ end
+ end
+
+ def delete(folder_id) do
+ BookmarkFolder
+ |> Repo.get_by(id: folder_id)
+ |> Repo.delete()
+ end
+
+ def for_user(user_id) do
+ BookmarkFolder
+ |> where(user_id: ^user_id)
+ |> Repo.all()
+ end
+
+ def belongs_to_user?(folder_id, user_id) do
+ BookmarkFolder
+ |> where(id: ^folder_id, user_id: ^user_id)
+ |> Repo.exists?()
+ end
+end
diff --git a/lib/pleroma/caching.ex b/lib/pleroma/caching.ex
index eb0588708..796a465af 100644
--- a/lib/pleroma/caching.ex
+++ b/lib/pleroma/caching.ex
@@ -8,10 +8,13 @@ defmodule Pleroma.Caching do
@callback put(Cachex.cache(), any(), any(), Keyword.t()) :: {Cachex.status(), boolean()}
@callback put(Cachex.cache(), any(), any()) :: {Cachex.status(), boolean()}
@callback fetch!(Cachex.cache(), any(), function() | nil) :: any()
+ @callback fetch(Cachex.cache(), any(), function() | nil) ::
+ {atom(), any()} | {atom(), any(), any()}
# @callback del(Cachex.cache(), any(), Keyword.t()) :: {Cachex.status(), boolean()}
@callback del(Cachex.cache(), any()) :: {Cachex.status(), boolean()}
@callback stream!(Cachex.cache(), any()) :: Enumerable.t()
@callback expire_at(Cachex.cache(), binary(), number()) :: {Cachex.status(), boolean()}
+ @callback expire(Cachex.cache(), binary(), number()) :: {Cachex.status(), boolean()}
@callback exists?(Cachex.cache(), any()) :: {Cachex.status(), boolean()}
@callback execute!(Cachex.cache(), function()) :: any()
@callback get_and_update(Cachex.cache(), any(), function()) ::
diff --git a/lib/pleroma/captcha/kocaptcha.ex b/lib/pleroma/captcha/kocaptcha.ex
index e786e28b9..c4987d4fd 100644
--- a/lib/pleroma/captcha/kocaptcha.ex
+++ b/lib/pleroma/captcha/kocaptcha.ex
@@ -29,7 +29,7 @@ def new do
@impl Service
def validate(_token, captcha, answer_data) do
- # Here the token is unsed, because the unencrypted captcha answer is just passed to method
+ # Here the token is unused, because the unencrypted captcha answer is just passed to method
if not is_nil(captcha) and
:crypto.hash(:md5, captcha) |> Base.encode16() == String.upcase(answer_data),
do: :ok,
diff --git a/lib/pleroma/chat.ex b/lib/pleroma/chat.ex
index fe32ec08c..5c4dbc1ff 100644
--- a/lib/pleroma/chat.ex
+++ b/lib/pleroma/chat.ex
@@ -42,7 +42,7 @@ def changeset(struct, params) do
|> unique_constraint(:user_id, name: :chats_user_id_recipient_index)
end
- @spec get_by_user_and_id(User.t(), FlakeId.Ecto.CompatType.t()) ::
+ @spec get_by_user_and_id(User.t(), Ecto.UUID.t()) ::
{:ok, t()} | {:error, :not_found}
def get_by_user_and_id(%User{id: user_id}, id) do
from(c in __MODULE__,
@@ -52,17 +52,17 @@ def get_by_user_and_id(%User{id: user_id}, id) do
|> Repo.find_resource()
end
- @spec get_by_id(FlakeId.Ecto.CompatType.t()) :: t() | nil
+ @spec get_by_id(Ecto.UUID.t()) :: t() | nil
def get_by_id(id) do
Repo.get(__MODULE__, id)
end
- @spec get(FlakeId.Ecto.CompatType.t(), String.t()) :: t() | nil
+ @spec get(Ecto.UUID.t(), String.t()) :: t() | nil
def get(user_id, recipient) do
Repo.get_by(__MODULE__, user_id: user_id, recipient: recipient)
end
- @spec get_or_create(FlakeId.Ecto.CompatType.t(), String.t()) ::
+ @spec get_or_create(Ecto.UUID.t(), String.t()) ::
{:ok, t()} | {:error, Ecto.Changeset.t()}
def get_or_create(user_id, recipient) do
%__MODULE__{}
@@ -75,7 +75,7 @@ def get_or_create(user_id, recipient) do
)
end
- @spec bump_or_create(FlakeId.Ecto.CompatType.t(), String.t()) ::
+ @spec bump_or_create(Ecto.UUID.t(), String.t()) ::
{:ok, t()} | {:error, Ecto.Changeset.t()}
def bump_or_create(user_id, recipient) do
%__MODULE__{}
@@ -87,7 +87,7 @@ def bump_or_create(user_id, recipient) do
)
end
- @spec for_user_query(FlakeId.Ecto.CompatType.t()) :: Ecto.Query.t()
+ @spec for_user_query(Ecto.UUID.t()) :: Ecto.Query.t()
def for_user_query(user_id) do
from(c in Chat,
where: c.user_id == ^user_id,
diff --git a/lib/pleroma/config/deprecation_warnings.ex b/lib/pleroma/config/deprecation_warnings.ex
index 599f1d3cf..58d164dc7 100644
--- a/lib/pleroma/config/deprecation_warnings.ex
+++ b/lib/pleroma/config/deprecation_warnings.ex
@@ -24,7 +24,7 @@ def check_exiftool_filter do
filters = Config.get([Pleroma.Upload]) |> Keyword.get(:filters, [])
if Pleroma.Upload.Filter.Exiftool in filters do
- Logger.warn("""
+ Logger.warning("""
!!!DEPRECATION WARNING!!!
Your config is using Exiftool as a filter instead of Exiftool.StripLocation. This should work for now, but you are advised to change to the new configuration to prevent possible issues later:
@@ -63,7 +63,7 @@ def check_simple_policy_tuples do
|> Enum.any?(fn {_, v} -> Enum.any?(v, &is_binary/1) end)
if has_strings do
- Logger.warn("""
+ Logger.warning("""
!!!DEPRECATION WARNING!!!
Your config is using strings in the SimplePolicy configuration instead of tuples. They should work for now, but you are advised to change to the new configuration to prevent possible issues later:
@@ -121,7 +121,7 @@ def check_quarantined_instances_tuples do
has_strings = Config.get([:instance, :quarantined_instances]) |> Enum.any?(&is_binary/1)
if has_strings do
- Logger.warn("""
+ Logger.warning("""
!!!DEPRECATION WARNING!!!
Your config is using strings in the quarantined_instances configuration instead of tuples. They should work for now, but you are advised to change to the new configuration to prevent possible issues later:
@@ -158,7 +158,7 @@ def check_transparency_exclusions_tuples do
has_strings = Config.get([:mrf, :transparency_exclusions]) |> Enum.any?(&is_binary/1)
if has_strings do
- Logger.warn("""
+ Logger.warning("""
!!!DEPRECATION WARNING!!!
Your config is using strings in the transparency_exclusions configuration instead of tuples. They should work for now, but you are advised to change to the new configuration to prevent possible issues later:
@@ -172,7 +172,7 @@ def check_transparency_exclusions_tuples do
```
config :pleroma, :mrf,
- transparency_exclusions: [{"instance.tld", "Reason to exlude transparency"}]
+ transparency_exclusions: [{"instance.tld", "Reason to exclude transparency"}]
```
""")
@@ -193,7 +193,7 @@ def check_transparency_exclusions_tuples do
def check_hellthread_threshold do
if Config.get([:mrf_hellthread, :threshold]) do
- Logger.warn("""
+ Logger.warning("""
!!!DEPRECATION WARNING!!!
You are using the old configuration mechanism for the hellthread filter. Please check config.md.
""")
@@ -213,7 +213,7 @@ def warn do
check_gun_pool_options(),
check_activity_expiration_config(),
check_remote_ip_plug_name(),
- check_uploders_s3_public_endpoint(),
+ check_uploaders_s3_public_endpoint(),
check_old_chat_shoutbox(),
check_quarantined_instances_tuples(),
check_transparency_exclusions_tuples(),
@@ -256,7 +256,7 @@ def check_old_mrf_config do
move_namespace_and_warn(@mrf_config_map, warning_preface)
end
- @spec move_namespace_and_warn([config_map()], String.t()) :: :ok | nil
+ @spec move_namespace_and_warn([config_map()], String.t()) :: :ok | :error
def move_namespace_and_warn(config_map, warning_preface) do
warning =
Enum.reduce(config_map, "", fn
@@ -274,17 +274,17 @@ def move_namespace_and_warn(config_map, warning_preface) do
if warning == "" do
:ok
else
- Logger.warn(warning_preface <> warning)
+ Logger.warning(warning_preface <> warning)
:error
end
end
- @spec check_media_proxy_whitelist_config() :: :ok | nil
+ @spec check_media_proxy_whitelist_config() :: :ok | :error
def check_media_proxy_whitelist_config do
whitelist = Config.get([:media_proxy, :whitelist])
if Enum.any?(whitelist, &(not String.starts_with?(&1, "http"))) do
- Logger.warn("""
+ Logger.warning("""
!!!DEPRECATION WARNING!!!
Your config is using old format (only domain) for MediaProxy whitelist option. Setting should work for now, but you are advised to change format to scheme with port to prevent possible issues later.
""")
@@ -299,7 +299,7 @@ def check_gun_pool_options do
pool_config = Config.get(:connections_pool)
if timeout = pool_config[:await_up_timeout] do
- Logger.warn("""
+ Logger.warning("""
!!!DEPRECATION WARNING!!!
Your config is using old setting `config :pleroma, :connections_pool, await_up_timeout`. Please change to `config :pleroma, :connections_pool, connect_timeout` to ensure compatibility with future releases.
""")
@@ -311,7 +311,7 @@ def check_gun_pool_options do
warning_preface = """
!!!DEPRECATION WARNING!!!
- Your config is using old setting name `timeout` instead of `recv_timeout` in pool settings. Setting should work for now, but you are advised to change format to scheme with port to prevent possible issues later.
+ Your config is using old setting name `timeout` instead of `recv_timeout` in pool settings. The setting will not take effect until updated.
"""
updated_config =
@@ -331,7 +331,7 @@ def check_gun_pool_options do
"\n* `:timeout` options in #{pool_name} pool is now `:recv_timeout`"
end)
- Logger.warn(Enum.join([warning_preface | pool_warnings]))
+ Logger.warning(Enum.join([warning_preface | pool_warnings]))
Config.put(:pools, updated_config)
:error
@@ -340,7 +340,7 @@ def check_gun_pool_options do
end
end
- @spec check_activity_expiration_config() :: :ok | nil
+ @spec check_activity_expiration_config() :: :ok | :error
def check_activity_expiration_config do
warning_preface = """
!!!DEPRECATION WARNING!!!
@@ -356,7 +356,7 @@ def check_activity_expiration_config do
)
end
- @spec check_remote_ip_plug_name() :: :ok | nil
+ @spec check_remote_ip_plug_name() :: :ok | :error
def check_remote_ip_plug_name do
warning_preface = """
!!!DEPRECATION WARNING!!!
@@ -372,8 +372,8 @@ def check_remote_ip_plug_name do
)
end
- @spec check_uploders_s3_public_endpoint() :: :ok | nil
- def check_uploders_s3_public_endpoint do
+ @spec check_uploaders_s3_public_endpoint() :: :ok | :error
+ def check_uploaders_s3_public_endpoint do
s3_config = Pleroma.Config.get([Pleroma.Uploaders.S3])
use_old_config = Keyword.has_key?(s3_config, :public_endpoint)
@@ -393,7 +393,7 @@ def check_uploders_s3_public_endpoint do
end
end
- @spec check_old_chat_shoutbox() :: :ok | nil
+ @spec check_old_chat_shoutbox() :: :ok | :error
def check_old_chat_shoutbox do
instance_config = Pleroma.Config.get([:instance])
chat_config = Pleroma.Config.get([:chat]) || []
diff --git a/lib/pleroma/config/getting.ex b/lib/pleroma/config/getting.ex
index f9b66bba6..ec93fd02a 100644
--- a/lib/pleroma/config/getting.ex
+++ b/lib/pleroma/config/getting.ex
@@ -5,4 +5,11 @@
defmodule Pleroma.Config.Getting do
@callback get(any()) :: any()
@callback get(any(), any()) :: any()
+
+ def get(key), do: get(key, nil)
+ def get(key, default), do: impl().get(key, default)
+
+ def impl do
+ Application.get_env(:pleroma, :config_impl, Pleroma.Config)
+ end
end
diff --git a/lib/pleroma/config/loader.ex b/lib/pleroma/config/loader.ex
index 015be3d8e..bd85eccab 100644
--- a/lib/pleroma/config/loader.ex
+++ b/lib/pleroma/config/loader.ex
@@ -19,21 +19,10 @@ defmodule Pleroma.Config.Loader do
:tesla
]
- if Code.ensure_loaded?(Config.Reader) do
- @reader Config.Reader
-
- def read(path), do: @reader.read!(path)
- else
- # support for Elixir less than 1.9
- @reader Mix.Config
- def read(path) do
- path
- |> @reader.eval!()
- |> elem(0)
- end
- end
+ @reader Config.Reader
@spec read(Path.t()) :: keyword()
+ def read(path), do: @reader.read!(path)
@spec merge(keyword(), keyword()) :: keyword()
def merge(c1, c2), do: @reader.merge(c1, c2)
diff --git a/lib/pleroma/config/oban.ex b/lib/pleroma/config/oban.ex
index 483d2bb79..836f0c1a7 100644
--- a/lib/pleroma/config/oban.ex
+++ b/lib/pleroma/config/oban.ex
@@ -23,7 +23,7 @@ def warn do
You are using old workers in Oban crontab settings, which were removed.
Please, remove setting from crontab in your config file (prod.secret.exs): #{inspect(setting)}
"""
- |> Logger.warn()
+ |> Logger.warning()
List.delete(acc, setting)
else
diff --git a/lib/pleroma/config/release_runtime_provider.ex b/lib/pleroma/config/release_runtime_provider.ex
index 91e5f1a54..351639836 100644
--- a/lib/pleroma/config/release_runtime_provider.ex
+++ b/lib/pleroma/config/release_runtime_provider.ex
@@ -20,6 +20,20 @@ def load(config, opts) do
with_runtime_config =
if File.exists?(config_path) do
+ #
+ %File.Stat{mode: mode} = File.stat!(config_path)
+
+ if Bitwise.band(mode, 0o007) > 0 do
+ raise "Configuration at #{config_path} has world-permissions, execute the following: chmod o= #{config_path}"
+ end
+
+ if Bitwise.band(mode, 0o020) > 0 do
+ raise "Configuration at #{config_path} has group-wise write permissions, execute the following: chmod g-w #{config_path}"
+ end
+
+ # Note: Elixir doesn't provides a getuid(2)
+ # so cannot forbid group-read only when config is owned by us
+
runtime_config = Config.Reader.read!(config_path)
with_defaults
diff --git a/lib/pleroma/config/transfer_task.ex b/lib/pleroma/config/transfer_task.ex
index 4199630af..91885347f 100644
--- a/lib/pleroma/config/transfer_task.ex
+++ b/lib/pleroma/config/transfer_task.ex
@@ -47,7 +47,7 @@ def load_and_update_env(deleted_settings \\ [], restart_pleroma? \\ true) do
{logger, other} =
(Repo.all(ConfigDB) ++ deleted_settings)
|> Enum.map(&merge_with_default/1)
- |> Enum.split_with(fn {group, _, _, _} -> group in [:logger, :quack] end)
+ |> Enum.split_with(fn {group, _, _, _} -> group in [:logger] end)
logger
|> Enum.sort()
@@ -55,8 +55,7 @@ def load_and_update_env(deleted_settings \\ [], restart_pleroma? \\ true) do
started_applications = Application.started_applications()
- # TODO: some problem with prometheus after restart!
- reject = [nil, :prometheus, :postgrex]
+ reject = [nil, :postgrex]
reject =
if restart_pleroma? do
@@ -104,11 +103,6 @@ defp merge_with_default(%{group: group, key: key, value: value} = setting) do
end
# change logger configuration in runtime, without restart
- defp configure({:quack, key, _, merged}) do
- Logger.configure_backend(Quack.Logger, [{key, merged}])
- :ok = update_env(:quack, key, merged)
- end
-
defp configure({_, :backends, _, merged}) do
# removing current backends
Enum.each(Application.get_env(:logger, :backends), &Logger.remove_backend/1)
@@ -150,7 +144,7 @@ defp update({group, key, value, merged}) do
error_msg =
"updating env causes error, group: #{inspect(group)}, key: #{inspect(key)}, value: #{inspect(value)} error: #{inspect(error)}"
- Logger.warn(error_msg)
+ Logger.warning(error_msg)
nil
end
@@ -184,12 +178,12 @@ defp restart(started_applications, app, _) do
:ok = Application.start(app)
else
nil ->
- Logger.warn("#{app} is not started.")
+ Logger.warning("#{app} is not started.")
error ->
error
|> inspect()
- |> Logger.warn()
+ |> Logger.warning()
end
end
diff --git a/lib/pleroma/config_db.ex b/lib/pleroma/config_db.ex
index 6befbbe19..e28fcb124 100644
--- a/lib/pleroma/config_db.ex
+++ b/lib/pleroma/config_db.ex
@@ -54,7 +54,7 @@ def get_by_group_and_key(group, key) do
@spec get_by_params(map()) :: ConfigDB.t() | nil
def get_by_params(%{group: _, key: _} = params), do: Repo.get_by(ConfigDB, params)
- @spec changeset(ConfigDB.t(), map()) :: Changeset.t()
+ @spec changeset(ConfigDB.t(), map()) :: Ecto.Changeset.t()
def changeset(config, params \\ %{}) do
config
|> cast(params, [:key, :group, :value])
@@ -138,7 +138,7 @@ defp deep_merge(_key, value1, value2) do
end
end
- @spec update_or_create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
+ @spec update_or_create(map()) :: {:ok, ConfigDB.t()} | {:error, Ecto.Changeset.t()}
def update_or_create(params) do
params = Map.put(params, :value, to_elixir_types(params[:value]))
search_opts = Map.take(params, [:group, :key])
@@ -163,7 +163,6 @@ defp can_be_partially_updated?(%ConfigDB{} = config), do: not only_full_update?(
defp only_full_update?(%ConfigDB{group: group, key: key}) do
full_key_update = [
{:pleroma, :ecto_repos},
- {:quack, :meta},
{:mime, :types},
{:cors_plug, [:max_age, :methods, :expose, :headers]},
{:swarm, :node_blacklist},
@@ -176,7 +175,7 @@ defp only_full_update?(%ConfigDB{group: group, key: key}) do
end)
end
- @spec delete(ConfigDB.t() | map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
+ @spec delete(ConfigDB.t() | map()) :: {:ok, ConfigDB.t()} | {:error, Ecto.Changeset.t()}
def delete(%ConfigDB{} = config), do: Repo.delete(config)
def delete(params) do
@@ -386,7 +385,7 @@ defp find_valid_delimiter([delimiter | others], pattern, regex_delimiter) do
@spec module_name?(String.t()) :: boolean()
def module_name?(string) do
- Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Quack|Ueberauth|Swoosh)\./, string) or
+ Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Ueberauth|Swoosh)\./, string) or
string in ["Oban", "Ueberauth", "ExSyslogger", "ConcurrentLimiter"]
end
end
diff --git a/lib/pleroma/constants.ex b/lib/pleroma/constants.ex
index 7b63ab06e..3a5e35301 100644
--- a/lib/pleroma/constants.ex
+++ b/lib/pleroma/constants.ex
@@ -19,7 +19,8 @@ defmodule Pleroma.Constants do
"context_id",
"deleted_activity_id",
"pleroma_internal",
- "generator"
+ "generator",
+ "rules"
]
)
@@ -28,9 +29,82 @@ defmodule Pleroma.Constants do
~w(index.html robots.txt static static-fe finmoji emoji packs sounds images instance sw.js sw-pleroma.js favicon.png schemas doc embed.js embed.css)
)
+ const(status_updatable_fields,
+ do: [
+ "source",
+ "tag",
+ "updated",
+ "emoji",
+ "content",
+ "summary",
+ "sensitive",
+ "attachment",
+ "generator"
+ ]
+ )
+
+ const(status_object_types,
+ do: [
+ "Note",
+ "Question",
+ "Audio",
+ "Video",
+ "Event",
+ "Article",
+ "Page"
+ ]
+ )
+
+ const(updatable_object_types,
+ do: [
+ "Note",
+ "Question",
+ "Audio",
+ "Video",
+ "Event",
+ "Article",
+ "Page"
+ ]
+ )
+
+ const(actor_types,
+ do: [
+ "Application",
+ "Group",
+ "Organization",
+ "Person",
+ "Service"
+ ]
+ )
+
+ const(allowed_user_actor_types,
+ do: [
+ "Person",
+ "Service",
+ "Group"
+ ]
+ )
+
# basic regex, just there to weed out potential mistakes
# https://datatracker.ietf.org/doc/html/rfc2045#section-5.1
const(mime_regex,
do: ~r/^[^[:cntrl:] ()<>@,;:\\"\/\[\]?=]+\/[^[:cntrl:] ()<>@,;:\\"\/\[\]?=]+(; .*)?$/
)
+
+ const(upload_object_types, do: ["Document", "Image"])
+
+ const(activity_json_canonical_mime_type,
+ do: "application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\""
+ )
+
+ const(activity_json_mime_types,
+ do: [
+ "application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\"",
+ "application/activity+json"
+ ]
+ )
+
+ const(public_streams,
+ do: ["public", "public:local", "public:media", "public:local:media"]
+ )
end
diff --git a/lib/pleroma/conversation.ex b/lib/pleroma/conversation.ex
index 42028aa51..0be609a22 100644
--- a/lib/pleroma/conversation.ex
+++ b/lib/pleroma/conversation.ex
@@ -57,7 +57,7 @@ def maybe_create_recipientships(participation, activity) do
3. Bump all relevant participations to 'unread'
"""
def create_or_bump_for(activity, opts \\ []) do
- with true <- Pleroma.Web.ActivityPub.Visibility.is_direct?(activity),
+ with true <- Pleroma.Web.ActivityPub.Visibility.direct?(activity),
"Create" <- activity.data["type"],
%Object{} = object <- Object.normalize(activity, fetch: false),
true <- object.data["type"] in ["Note", "Question"],
diff --git a/lib/pleroma/data_migration.ex b/lib/pleroma/data_migration.ex
index 59d891d8d..be4bf6489 100644
--- a/lib/pleroma/data_migration.ex
+++ b/lib/pleroma/data_migration.ex
@@ -12,6 +12,8 @@ defmodule Pleroma.DataMigration do
import Ecto.Changeset
import Ecto.Query
+ @type t :: %__MODULE__{}
+
schema "data_migrations" do
field(:name, :string)
field(:state, State, default: :pending)
@@ -42,4 +44,5 @@ def get_by_name(name) do
end
def populate_hashtags_table, do: get_by_name("populate_hashtags_table")
+ def delete_context_objects, do: get_by_name("delete_context_objects")
end
diff --git a/lib/pleroma/docs/generator.ex b/lib/pleroma/docs/generator.ex
index 6508f1947..93b19484f 100644
--- a/lib/pleroma/docs/generator.ex
+++ b/lib/pleroma/docs/generator.ex
@@ -15,8 +15,10 @@ def list_behaviour_implementations(behaviour) do
:code.all_loaded()
|> Enum.filter(fn {module, _} ->
# This shouldn't be needed as all modules are expected to have module_info/1,
- # but in test enviroments some transient modules `:elixir_compiler_XX`
+ # but in test environments some transient modules `:elixir_compiler_XX`
# are loaded for some reason (where XX is a random integer).
+ Code.ensure_loaded(module)
+
if function_exported?(module, :module_info, 1) do
module.module_info(:attributes)
|> Keyword.get_values(:behaviour)
diff --git a/lib/pleroma/docs/json.ex b/lib/pleroma/docs/json.ex
index 05f46f39b..f69854935 100644
--- a/lib/pleroma/docs/json.ex
+++ b/lib/pleroma/docs/json.ex
@@ -18,7 +18,7 @@ def compile do
:persistent_term.put(@term, Pleroma.Docs.Generator.convert_to_strings(descriptions))
end
- @spec compiled_descriptions :: Map.t()
+ @spec compiled_descriptions :: map()
def compiled_descriptions do
:persistent_term.get(@term)
end
diff --git a/lib/pleroma/docs/translator.ex b/lib/pleroma/docs/translator.ex
new file mode 100644
index 000000000..13e33c87e
--- /dev/null
+++ b/lib/pleroma/docs/translator.ex
@@ -0,0 +1,10 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Docs.Translator do
+ require Pleroma.Docs.Translator.Compiler
+ require Pleroma.Web.Gettext
+
+ @before_compile Pleroma.Docs.Translator.Compiler
+end
diff --git a/lib/pleroma/docs/translator/compiler.ex b/lib/pleroma/docs/translator/compiler.ex
new file mode 100644
index 000000000..5d27d9fa2
--- /dev/null
+++ b/lib/pleroma/docs/translator/compiler.ex
@@ -0,0 +1,119 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Docs.Translator.Compiler do
+ @external_resource "config/description.exs"
+ @raw_config Pleroma.Config.Loader.read("config/description.exs")
+ @raw_descriptions @raw_config[:pleroma][:config_description]
+
+ defmacro __before_compile__(_env) do
+ strings =
+ __MODULE__.descriptions()
+ |> __MODULE__.extract_strings()
+
+ quote do
+ def placeholder do
+ unquote do
+ Enum.map(
+ strings,
+ fn {path, type, string} ->
+ ctxt = msgctxt_for(path, type)
+
+ quote do
+ Pleroma.Web.Gettext.dpgettext_noop(
+ "config_descriptions",
+ unquote(ctxt),
+ unquote(string)
+ )
+ end
+ end
+ )
+ end
+ end
+ end
+ end
+
+ def descriptions do
+ Pleroma.Web.ActivityPub.MRF.config_descriptions()
+ |> Enum.reduce(@raw_descriptions, fn description, acc -> [description | acc] end)
+ |> Pleroma.Docs.Generator.convert_to_strings()
+ end
+
+ def extract_strings(descriptions) do
+ descriptions
+ |> Enum.reduce(%{strings: [], path: []}, &process_item/2)
+ |> Map.get(:strings)
+ end
+
+ defp process_item(entity, acc) do
+ current_level =
+ acc
+ |> process_desc(entity)
+ |> process_label(entity)
+
+ process_children(entity, current_level)
+ end
+
+ defp process_desc(acc, %{description: desc} = item) do
+ %{
+ strings: [{acc.path ++ [key_for(item)], "description", desc} | acc.strings],
+ path: acc.path
+ }
+ end
+
+ defp process_desc(acc, _) do
+ acc
+ end
+
+ defp process_label(acc, %{label: label} = item) do
+ %{
+ strings: [{acc.path ++ [key_for(item)], "label", label} | acc.strings],
+ path: acc.path
+ }
+ end
+
+ defp process_label(acc, _) do
+ acc
+ end
+
+ defp process_children(%{children: children} = item, acc) do
+ current_level = Map.put(acc, :path, acc.path ++ [key_for(item)])
+
+ children
+ |> Enum.reduce(current_level, &process_item/2)
+ |> Map.put(:path, acc.path)
+ end
+
+ defp process_children(_, acc) do
+ acc
+ end
+
+ def msgctxt_for(path, type) do
+ "config #{type} at #{Enum.join(path, " > ")}"
+ end
+
+ defp convert_group({_, group}) do
+ group
+ end
+
+ defp convert_group(group) do
+ group
+ end
+
+ def key_for(%{group: group, key: key}) do
+ "#{convert_group(group)}-#{key}"
+ end
+
+ def key_for(%{group: group}) do
+ convert_group(group)
+ end
+
+ def key_for(%{key: key}) do
+ key
+ end
+
+ def key_for(_) do
+ nil
+ end
+end
diff --git a/lib/pleroma/ecto_enums.ex b/lib/pleroma/ecto_enums.ex
index a4890b489..b346b39d6 100644
--- a/lib/pleroma/ecto_enums.ex
+++ b/lib/pleroma/ecto_enums.ex
@@ -27,3 +27,11 @@
failed: 4,
manual: 5
)
+
+defenum(Pleroma.User.Backup.State,
+ pending: 1,
+ running: 2,
+ complete: 3,
+ failed: 4,
+ invalid: 5
+)
diff --git a/lib/pleroma/ecto_type/activity_pub/object_validators/bare_uri.ex b/lib/pleroma/ecto_type/activity_pub/object_validators/bare_uri.ex
new file mode 100644
index 000000000..a1af8faa1
--- /dev/null
+++ b/lib/pleroma/ecto_type/activity_pub/object_validators/bare_uri.ex
@@ -0,0 +1,25 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2023 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.BareUri do
+ use Ecto.Type
+
+ def type, do: :string
+
+ def cast(uri) when is_binary(uri) do
+ parsed = URI.parse(uri)
+
+ if is_nil(parsed.scheme) do
+ :error
+ else
+ {:ok, uri}
+ end
+ end
+
+ def cast(_), do: :error
+
+ def dump(data), do: {:ok, data}
+
+ def load(data), do: {:ok, data}
+end
diff --git a/lib/pleroma/emoji-test.txt b/lib/pleroma/emoji-test.txt
index dd5493366..87d093d64 100644
--- a/lib/pleroma/emoji-test.txt
+++ b/lib/pleroma/emoji-test.txt
@@ -1,13 +1,13 @@
# emoji-test.txt
-# Date: 2021-08-26, 17:22:23 GMT
-# © 2021 Unicode®, Inc.
+# Date: 2022-08-12, 20:24:39 GMT
+# © 2022 Unicode®, Inc.
# Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries.
-# For terms of use, see http://www.unicode.org/terms_of_use.html
+# For terms of use, see https://www.unicode.org/terms_of_use.html
#
# Emoji Keyboard/Display Test Data for UTS #51
-# Version: 14.0
+# Version: 15.0
#
-# For documentation and usage, see http://www.unicode.org/reports/tr51
+# For documentation and usage, see https://www.unicode.org/reports/tr51
#
# This file provides data for testing which emoji forms should be in keyboards and which should also be displayed/processed.
# Format: code points; status # emoji name
@@ -92,6 +92,7 @@
1F62C ; fully-qualified # 😬 E1.0 grimacing face
1F62E 200D 1F4A8 ; fully-qualified # 😮💨 E13.1 face exhaling
1F925 ; fully-qualified # 🤥 E3.0 lying face
+1FAE8 ; fully-qualified # 🫨 E15.0 shaking face
# subgroup: face-sleepy
1F60C ; fully-qualified # 😌 E0.6 relieved face
@@ -155,7 +156,7 @@
# subgroup: face-negative
1F624 ; fully-qualified # 😤 E0.6 face with steam from nose
-1F621 ; fully-qualified # 😡 E0.6 pouting face
+1F621 ; fully-qualified # 😡 E0.6 enraged face
1F620 ; fully-qualified # 😠 E0.6 angry face
1F92C ; fully-qualified # 🤬 E5.0 face with symbols on mouth
1F608 ; fully-qualified # 😈 E1.0 smiling face with horns
@@ -190,8 +191,7 @@
1F649 ; fully-qualified # 🙉 E0.6 hear-no-evil monkey
1F64A ; fully-qualified # 🙊 E0.6 speak-no-evil monkey
-# subgroup: emotion
-1F48B ; fully-qualified # 💋 E0.6 kiss mark
+# subgroup: heart
1F48C ; fully-qualified # 💌 E0.6 love letter
1F498 ; fully-qualified # 💘 E0.6 heart with arrow
1F49D ; fully-qualified # 💝 E0.6 heart with ribbon
@@ -210,14 +210,20 @@
2764 200D 1FA79 ; unqualified # ❤🩹 E13.1 mending heart
2764 FE0F ; fully-qualified # ❤️ E0.6 red heart
2764 ; unqualified # ❤ E0.6 red heart
+1FA77 ; fully-qualified # 🩷 E15.0 pink heart
1F9E1 ; fully-qualified # 🧡 E5.0 orange heart
1F49B ; fully-qualified # 💛 E0.6 yellow heart
1F49A ; fully-qualified # 💚 E0.6 green heart
1F499 ; fully-qualified # 💙 E0.6 blue heart
+1FA75 ; fully-qualified # 🩵 E15.0 light blue heart
1F49C ; fully-qualified # 💜 E0.6 purple heart
1F90E ; fully-qualified # 🤎 E12.0 brown heart
1F5A4 ; fully-qualified # 🖤 E3.0 black heart
+1FA76 ; fully-qualified # 🩶 E15.0 grey heart
1F90D ; fully-qualified # 🤍 E12.0 white heart
+
+# subgroup: emotion
+1F48B ; fully-qualified # 💋 E0.6 kiss mark
1F4AF ; fully-qualified # 💯 E0.6 hundred points
1F4A2 ; fully-qualified # 💢 E0.6 anger symbol
1F4A5 ; fully-qualified # 💥 E0.6 collision
@@ -226,21 +232,20 @@
1F4A8 ; fully-qualified # 💨 E0.6 dashing away
1F573 FE0F ; fully-qualified # 🕳️ E0.7 hole
1F573 ; unqualified # 🕳 E0.7 hole
-1F4A3 ; fully-qualified # 💣 E0.6 bomb
1F4AC ; fully-qualified # 💬 E0.6 speech balloon
1F441 FE0F 200D 1F5E8 FE0F ; fully-qualified # 👁️🗨️ E2.0 eye in speech bubble
1F441 200D 1F5E8 FE0F ; unqualified # 👁🗨️ E2.0 eye in speech bubble
-1F441 FE0F 200D 1F5E8 ; unqualified # 👁️🗨 E2.0 eye in speech bubble
+1F441 FE0F 200D 1F5E8 ; minimally-qualified # 👁️🗨 E2.0 eye in speech bubble
1F441 200D 1F5E8 ; unqualified # 👁🗨 E2.0 eye in speech bubble
1F5E8 FE0F ; fully-qualified # 🗨️ E2.0 left speech bubble
1F5E8 ; unqualified # 🗨 E2.0 left speech bubble
1F5EF FE0F ; fully-qualified # 🗯️ E0.7 right anger bubble
1F5EF ; unqualified # 🗯 E0.7 right anger bubble
1F4AD ; fully-qualified # 💭 E1.0 thought balloon
-1F4A4 ; fully-qualified # 💤 E0.6 zzz
+1F4A4 ; fully-qualified # 💤 E0.6 ZZZ
-# Smileys & Emotion subtotal: 177
-# Smileys & Emotion subtotal: 177 w/o modifiers
+# Smileys & Emotion subtotal: 180
+# Smileys & Emotion subtotal: 180 w/o modifiers
# group: People & Body
@@ -300,6 +305,18 @@
1FAF4 1F3FD ; fully-qualified # 🫴🏽 E14.0 palm up hand: medium skin tone
1FAF4 1F3FE ; fully-qualified # 🫴🏾 E14.0 palm up hand: medium-dark skin tone
1FAF4 1F3FF ; fully-qualified # 🫴🏿 E14.0 palm up hand: dark skin tone
+1FAF7 ; fully-qualified # 🫷 E15.0 leftwards pushing hand
+1FAF7 1F3FB ; fully-qualified # 🫷🏻 E15.0 leftwards pushing hand: light skin tone
+1FAF7 1F3FC ; fully-qualified # 🫷🏼 E15.0 leftwards pushing hand: medium-light skin tone
+1FAF7 1F3FD ; fully-qualified # 🫷🏽 E15.0 leftwards pushing hand: medium skin tone
+1FAF7 1F3FE ; fully-qualified # 🫷🏾 E15.0 leftwards pushing hand: medium-dark skin tone
+1FAF7 1F3FF ; fully-qualified # 🫷🏿 E15.0 leftwards pushing hand: dark skin tone
+1FAF8 ; fully-qualified # 🫸 E15.0 rightwards pushing hand
+1FAF8 1F3FB ; fully-qualified # 🫸🏻 E15.0 rightwards pushing hand: light skin tone
+1FAF8 1F3FC ; fully-qualified # 🫸🏼 E15.0 rightwards pushing hand: medium-light skin tone
+1FAF8 1F3FD ; fully-qualified # 🫸🏽 E15.0 rightwards pushing hand: medium skin tone
+1FAF8 1F3FE ; fully-qualified # 🫸🏾 E15.0 rightwards pushing hand: medium-dark skin tone
+1FAF8 1F3FF ; fully-qualified # 🫸🏿 E15.0 rightwards pushing hand: dark skin tone
# subgroup: hand-fingers-partial
1F44C ; fully-qualified # 👌 E0.6 OK hand
@@ -473,11 +490,11 @@
1F932 1F3FE ; fully-qualified # 🤲🏾 E5.0 palms up together: medium-dark skin tone
1F932 1F3FF ; fully-qualified # 🤲🏿 E5.0 palms up together: dark skin tone
1F91D ; fully-qualified # 🤝 E3.0 handshake
-1F91D 1F3FB ; fully-qualified # 🤝🏻 E3.0 handshake: light skin tone
-1F91D 1F3FC ; fully-qualified # 🤝🏼 E3.0 handshake: medium-light skin tone
-1F91D 1F3FD ; fully-qualified # 🤝🏽 E3.0 handshake: medium skin tone
-1F91D 1F3FE ; fully-qualified # 🤝🏾 E3.0 handshake: medium-dark skin tone
-1F91D 1F3FF ; fully-qualified # 🤝🏿 E3.0 handshake: dark skin tone
+1F91D 1F3FB ; fully-qualified # 🤝🏻 E14.0 handshake: light skin tone
+1F91D 1F3FC ; fully-qualified # 🤝🏼 E14.0 handshake: medium-light skin tone
+1F91D 1F3FD ; fully-qualified # 🤝🏽 E14.0 handshake: medium skin tone
+1F91D 1F3FE ; fully-qualified # 🤝🏾 E14.0 handshake: medium-dark skin tone
+1F91D 1F3FF ; fully-qualified # 🤝🏿 E14.0 handshake: dark skin tone
1FAF1 1F3FB 200D 1FAF2 1F3FC ; fully-qualified # 🫱🏻🫲🏼 E14.0 handshake: light skin tone, medium-light skin tone
1FAF1 1F3FB 200D 1FAF2 1F3FD ; fully-qualified # 🫱🏻🫲🏽 E14.0 handshake: light skin tone, medium skin tone
1FAF1 1F3FB 200D 1FAF2 1F3FE ; fully-qualified # 🫱🏻🫲🏾 E14.0 handshake: light skin tone, medium-dark skin tone
@@ -1455,7 +1472,7 @@
1F575 1F3FF ; fully-qualified # 🕵🏿 E2.0 detective: dark skin tone
1F575 FE0F 200D 2642 FE0F ; fully-qualified # 🕵️♂️ E4.0 man detective
1F575 200D 2642 FE0F ; unqualified # 🕵♂️ E4.0 man detective
-1F575 FE0F 200D 2642 ; unqualified # 🕵️♂ E4.0 man detective
+1F575 FE0F 200D 2642 ; minimally-qualified # 🕵️♂ E4.0 man detective
1F575 200D 2642 ; unqualified # 🕵♂ E4.0 man detective
1F575 1F3FB 200D 2642 FE0F ; fully-qualified # 🕵🏻♂️ E4.0 man detective: light skin tone
1F575 1F3FB 200D 2642 ; minimally-qualified # 🕵🏻♂ E4.0 man detective: light skin tone
@@ -1469,7 +1486,7 @@
1F575 1F3FF 200D 2642 ; minimally-qualified # 🕵🏿♂ E4.0 man detective: dark skin tone
1F575 FE0F 200D 2640 FE0F ; fully-qualified # 🕵️♀️ E4.0 woman detective
1F575 200D 2640 FE0F ; unqualified # 🕵♀️ E4.0 woman detective
-1F575 FE0F 200D 2640 ; unqualified # 🕵️♀ E4.0 woman detective
+1F575 FE0F 200D 2640 ; minimally-qualified # 🕵️♀ E4.0 woman detective
1F575 200D 2640 ; unqualified # 🕵♀ E4.0 woman detective
1F575 1F3FB 200D 2640 FE0F ; fully-qualified # 🕵🏻♀️ E4.0 woman detective: light skin tone
1F575 1F3FB 200D 2640 ; minimally-qualified # 🕵🏻♀ E4.0 woman detective: light skin tone
@@ -2302,7 +2319,7 @@
1F3CC 1F3FF ; fully-qualified # 🏌🏿 E4.0 person golfing: dark skin tone
1F3CC FE0F 200D 2642 FE0F ; fully-qualified # 🏌️♂️ E4.0 man golfing
1F3CC 200D 2642 FE0F ; unqualified # 🏌♂️ E4.0 man golfing
-1F3CC FE0F 200D 2642 ; unqualified # 🏌️♂ E4.0 man golfing
+1F3CC FE0F 200D 2642 ; minimally-qualified # 🏌️♂ E4.0 man golfing
1F3CC 200D 2642 ; unqualified # 🏌♂ E4.0 man golfing
1F3CC 1F3FB 200D 2642 FE0F ; fully-qualified # 🏌🏻♂️ E4.0 man golfing: light skin tone
1F3CC 1F3FB 200D 2642 ; minimally-qualified # 🏌🏻♂ E4.0 man golfing: light skin tone
@@ -2316,7 +2333,7 @@
1F3CC 1F3FF 200D 2642 ; minimally-qualified # 🏌🏿♂ E4.0 man golfing: dark skin tone
1F3CC FE0F 200D 2640 FE0F ; fully-qualified # 🏌️♀️ E4.0 woman golfing
1F3CC 200D 2640 FE0F ; unqualified # 🏌♀️ E4.0 woman golfing
-1F3CC FE0F 200D 2640 ; unqualified # 🏌️♀ E4.0 woman golfing
+1F3CC FE0F 200D 2640 ; minimally-qualified # 🏌️♀ E4.0 woman golfing
1F3CC 200D 2640 ; unqualified # 🏌♀ E4.0 woman golfing
1F3CC 1F3FB 200D 2640 FE0F ; fully-qualified # 🏌🏻♀️ E4.0 woman golfing: light skin tone
1F3CC 1F3FB 200D 2640 ; minimally-qualified # 🏌🏻♀ E4.0 woman golfing: light skin tone
@@ -2427,7 +2444,7 @@
26F9 1F3FF ; fully-qualified # ⛹🏿 E2.0 person bouncing ball: dark skin tone
26F9 FE0F 200D 2642 FE0F ; fully-qualified # ⛹️♂️ E4.0 man bouncing ball
26F9 200D 2642 FE0F ; unqualified # ⛹♂️ E4.0 man bouncing ball
-26F9 FE0F 200D 2642 ; unqualified # ⛹️♂ E4.0 man bouncing ball
+26F9 FE0F 200D 2642 ; minimally-qualified # ⛹️♂ E4.0 man bouncing ball
26F9 200D 2642 ; unqualified # ⛹♂ E4.0 man bouncing ball
26F9 1F3FB 200D 2642 FE0F ; fully-qualified # ⛹🏻♂️ E4.0 man bouncing ball: light skin tone
26F9 1F3FB 200D 2642 ; minimally-qualified # ⛹🏻♂ E4.0 man bouncing ball: light skin tone
@@ -2441,7 +2458,7 @@
26F9 1F3FF 200D 2642 ; minimally-qualified # ⛹🏿♂ E4.0 man bouncing ball: dark skin tone
26F9 FE0F 200D 2640 FE0F ; fully-qualified # ⛹️♀️ E4.0 woman bouncing ball
26F9 200D 2640 FE0F ; unqualified # ⛹♀️ E4.0 woman bouncing ball
-26F9 FE0F 200D 2640 ; unqualified # ⛹️♀ E4.0 woman bouncing ball
+26F9 FE0F 200D 2640 ; minimally-qualified # ⛹️♀ E4.0 woman bouncing ball
26F9 200D 2640 ; unqualified # ⛹♀ E4.0 woman bouncing ball
26F9 1F3FB 200D 2640 FE0F ; fully-qualified # ⛹🏻♀️ E4.0 woman bouncing ball: light skin tone
26F9 1F3FB 200D 2640 ; minimally-qualified # ⛹🏻♀ E4.0 woman bouncing ball: light skin tone
@@ -2462,7 +2479,7 @@
1F3CB 1F3FF ; fully-qualified # 🏋🏿 E2.0 person lifting weights: dark skin tone
1F3CB FE0F 200D 2642 FE0F ; fully-qualified # 🏋️♂️ E4.0 man lifting weights
1F3CB 200D 2642 FE0F ; unqualified # 🏋♂️ E4.0 man lifting weights
-1F3CB FE0F 200D 2642 ; unqualified # 🏋️♂ E4.0 man lifting weights
+1F3CB FE0F 200D 2642 ; minimally-qualified # 🏋️♂ E4.0 man lifting weights
1F3CB 200D 2642 ; unqualified # 🏋♂ E4.0 man lifting weights
1F3CB 1F3FB 200D 2642 FE0F ; fully-qualified # 🏋🏻♂️ E4.0 man lifting weights: light skin tone
1F3CB 1F3FB 200D 2642 ; minimally-qualified # 🏋🏻♂ E4.0 man lifting weights: light skin tone
@@ -2476,7 +2493,7 @@
1F3CB 1F3FF 200D 2642 ; minimally-qualified # 🏋🏿♂ E4.0 man lifting weights: dark skin tone
1F3CB FE0F 200D 2640 FE0F ; fully-qualified # 🏋️♀️ E4.0 woman lifting weights
1F3CB 200D 2640 FE0F ; unqualified # 🏋♀️ E4.0 woman lifting weights
-1F3CB FE0F 200D 2640 ; unqualified # 🏋️♀ E4.0 woman lifting weights
+1F3CB FE0F 200D 2640 ; minimally-qualified # 🏋️♀ E4.0 woman lifting weights
1F3CB 200D 2640 ; unqualified # 🏋♀ E4.0 woman lifting weights
1F3CB 1F3FB 200D 2640 FE0F ; fully-qualified # 🏋🏻♀️ E4.0 woman lifting weights: light skin tone
1F3CB 1F3FB 200D 2640 ; minimally-qualified # 🏋🏻♀ E4.0 woman lifting weights: light skin tone
@@ -3262,8 +3279,8 @@
1FAC2 ; fully-qualified # 🫂 E13.0 people hugging
1F463 ; fully-qualified # 👣 E0.6 footprints
-# People & Body subtotal: 2986
-# People & Body subtotal: 506 w/o modifiers
+# People & Body subtotal: 2998
+# People & Body subtotal: 508 w/o modifiers
# group: Component
@@ -3306,6 +3323,8 @@
1F405 ; fully-qualified # 🐅 E1.0 tiger
1F406 ; fully-qualified # 🐆 E1.0 leopard
1F434 ; fully-qualified # 🐴 E0.6 horse face
+1FACE ; fully-qualified # 🫎 E15.0 moose
+1FACF ; fully-qualified # 🫏 E15.0 donkey
1F40E ; fully-qualified # 🐎 E0.6 horse
1F984 ; fully-qualified # 🦄 E1.0 unicorn
1F993 ; fully-qualified # 🦓 E5.0 zebra
@@ -3373,6 +3392,9 @@
1F9A9 ; fully-qualified # 🦩 E12.0 flamingo
1F99A ; fully-qualified # 🦚 E11.0 peacock
1F99C ; fully-qualified # 🦜 E11.0 parrot
+1FABD ; fully-qualified # 🪽 E15.0 wing
+1F426 200D 2B1B ; fully-qualified # 🐦⬛ E15.0 black bird
+1FABF ; fully-qualified # 🪿 E15.0 goose
# subgroup: animal-amphibian
1F438 ; fully-qualified # 🐸 E0.6 frog
@@ -3399,6 +3421,7 @@
1F419 ; fully-qualified # 🐙 E0.6 octopus
1F41A ; fully-qualified # 🐚 E0.6 spiral shell
1FAB8 ; fully-qualified # 🪸 E14.0 coral
+1FABC ; fully-qualified # 🪼 E15.0 jellyfish
# subgroup: animal-bug
1F40C ; fully-qualified # 🐌 E0.6 snail
@@ -3433,6 +3456,7 @@
1F33B ; fully-qualified # 🌻 E0.6 sunflower
1F33C ; fully-qualified # 🌼 E0.6 blossom
1F337 ; fully-qualified # 🌷 E0.6 tulip
+1FABB ; fully-qualified # 🪻 E15.0 hyacinth
# subgroup: plant-other
1F331 ; fully-qualified # 🌱 E0.6 seedling
@@ -3451,9 +3475,10 @@
1F343 ; fully-qualified # 🍃 E0.6 leaf fluttering in wind
1FAB9 ; fully-qualified # 🪹 E14.0 empty nest
1FABA ; fully-qualified # 🪺 E14.0 nest with eggs
+1F344 ; fully-qualified # 🍄 E0.6 mushroom
-# Animals & Nature subtotal: 151
-# Animals & Nature subtotal: 151 w/o modifiers
+# Animals & Nature subtotal: 159
+# Animals & Nature subtotal: 159 w/o modifiers
# group: Food & Drink
@@ -3492,10 +3517,11 @@
1F966 ; fully-qualified # 🥦 E5.0 broccoli
1F9C4 ; fully-qualified # 🧄 E12.0 garlic
1F9C5 ; fully-qualified # 🧅 E12.0 onion
-1F344 ; fully-qualified # 🍄 E0.6 mushroom
1F95C ; fully-qualified # 🥜 E3.0 peanuts
1FAD8 ; fully-qualified # 🫘 E14.0 beans
1F330 ; fully-qualified # 🌰 E0.6 chestnut
+1FADA ; fully-qualified # 🫚 E15.0 ginger root
+1FADB ; fully-qualified # 🫛 E15.0 pea pod
# subgroup: food-prepared
1F35E ; fully-qualified # 🍞 E0.6 bread
@@ -3607,8 +3633,8 @@
1FAD9 ; fully-qualified # 🫙 E14.0 jar
1F3FA ; fully-qualified # 🏺 E1.0 amphora
-# Food & Drink subtotal: 134
-# Food & Drink subtotal: 134 w/o modifiers
+# Food & Drink subtotal: 135
+# Food & Drink subtotal: 135 w/o modifiers
# group: Travel & Places
@@ -3974,11 +4000,10 @@
1F3AF ; fully-qualified # 🎯 E0.6 bullseye
1FA80 ; fully-qualified # 🪀 E12.0 yo-yo
1FA81 ; fully-qualified # 🪁 E12.0 kite
+1F52B ; fully-qualified # 🔫 E0.6 water pistol
1F3B1 ; fully-qualified # 🎱 E0.6 pool 8 ball
1F52E ; fully-qualified # 🔮 E0.6 crystal ball
1FA84 ; fully-qualified # 🪄 E13.0 magic wand
-1F9FF ; fully-qualified # 🧿 E11.0 nazar amulet
-1FAAC ; fully-qualified # 🪬 E14.0 hamsa
1F3AE ; fully-qualified # 🎮 E0.6 video game
1F579 FE0F ; fully-qualified # 🕹️ E0.7 joystick
1F579 ; unqualified # 🕹 E0.7 joystick
@@ -4013,8 +4038,8 @@
1F9F6 ; fully-qualified # 🧶 E11.0 yarn
1FAA2 ; fully-qualified # 🪢 E13.0 knot
-# Activities subtotal: 97
-# Activities subtotal: 97 w/o modifiers
+# Activities subtotal: 96
+# Activities subtotal: 96 w/o modifiers
# group: Objects
@@ -4040,6 +4065,7 @@
1FA73 ; fully-qualified # 🩳 E12.0 shorts
1F459 ; fully-qualified # 👙 E0.6 bikini
1F45A ; fully-qualified # 👚 E0.6 woman’s clothes
+1FAAD ; fully-qualified # 🪭 E15.0 folding hand fan
1F45B ; fully-qualified # 👛 E0.6 purse
1F45C ; fully-qualified # 👜 E0.6 handbag
1F45D ; fully-qualified # 👝 E0.6 clutch bag
@@ -4055,6 +4081,7 @@
1F461 ; fully-qualified # 👡 E0.6 woman’s sandal
1FA70 ; fully-qualified # 🩰 E12.0 ballet shoes
1F462 ; fully-qualified # 👢 E0.6 woman’s boot
+1FAAE ; fully-qualified # 🪮 E15.0 hair pick
1F451 ; fully-qualified # 👑 E0.6 crown
1F452 ; fully-qualified # 👒 E0.6 woman’s hat
1F3A9 ; fully-qualified # 🎩 E0.6 top hat
@@ -4103,6 +4130,8 @@
1FA95 ; fully-qualified # 🪕 E12.0 banjo
1F941 ; fully-qualified # 🥁 E3.0 drum
1FA98 ; fully-qualified # 🪘 E13.0 long drum
+1FA87 ; fully-qualified # 🪇 E15.0 maracas
+1FA88 ; fully-qualified # 🪈 E15.0 flute
# subgroup: phone
1F4F1 ; fully-qualified # 📱 E0.6 mobile phone
@@ -4275,7 +4304,7 @@
1F5E1 ; unqualified # 🗡 E0.7 dagger
2694 FE0F ; fully-qualified # ⚔️ E1.0 crossed swords
2694 ; unqualified # ⚔ E1.0 crossed swords
-1F52B ; fully-qualified # 🔫 E0.6 water pistol
+1F4A3 ; fully-qualified # 💣 E0.6 bomb
1FA83 ; fully-qualified # 🪃 E13.0 boomerang
1F3F9 ; fully-qualified # 🏹 E1.0 bow and arrow
1F6E1 FE0F ; fully-qualified # 🛡️ E0.7 shield
@@ -4354,12 +4383,14 @@
1FAA6 ; fully-qualified # 🪦 E13.0 headstone
26B1 FE0F ; fully-qualified # ⚱️ E1.0 funeral urn
26B1 ; unqualified # ⚱ E1.0 funeral urn
+1F9FF ; fully-qualified # 🧿 E11.0 nazar amulet
+1FAAC ; fully-qualified # 🪬 E14.0 hamsa
1F5FF ; fully-qualified # 🗿 E0.6 moai
1FAA7 ; fully-qualified # 🪧 E13.0 placard
1FAAA ; fully-qualified # 🪪 E14.0 identification card
-# Objects subtotal: 304
-# Objects subtotal: 304 w/o modifiers
+# Objects subtotal: 310
+# Objects subtotal: 310 w/o modifiers
# group: Symbols
@@ -4455,6 +4486,7 @@
262E ; unqualified # ☮ E1.0 peace symbol
1F54E ; fully-qualified # 🕎 E1.0 menorah
1F52F ; fully-qualified # 🔯 E0.6 dotted six-pointed star
+1FAAF ; fully-qualified # 🪯 E15.0 khanda
# subgroup: zodiac
2648 ; fully-qualified # ♈ E0.6 Aries
@@ -4503,6 +4535,7 @@
1F505 ; fully-qualified # 🔅 E1.0 dim button
1F506 ; fully-qualified # 🔆 E1.0 bright button
1F4F6 ; fully-qualified # 📶 E0.6 antenna bars
+1F6DC ; fully-qualified # 🛜 E15.0 wireless
1F4F3 ; fully-qualified # 📳 E0.6 vibration mode
1F4F4 ; fully-qualified # 📴 E0.6 mobile phone off
@@ -4693,8 +4726,8 @@
1F533 ; fully-qualified # 🔳 E0.6 white square button
1F532 ; fully-qualified # 🔲 E0.6 black square button
-# Symbols subtotal: 302
-# Symbols subtotal: 302 w/o modifiers
+# Symbols subtotal: 304
+# Symbols subtotal: 304 w/o modifiers
# group: Flags
@@ -4709,7 +4742,7 @@
1F3F3 200D 1F308 ; unqualified # 🏳🌈 E4.0 rainbow flag
1F3F3 FE0F 200D 26A7 FE0F ; fully-qualified # 🏳️⚧️ E13.0 transgender flag
1F3F3 200D 26A7 FE0F ; unqualified # 🏳⚧️ E13.0 transgender flag
-1F3F3 FE0F 200D 26A7 ; unqualified # 🏳️⚧ E13.0 transgender flag
+1F3F3 FE0F 200D 26A7 ; minimally-qualified # 🏳️⚧ E13.0 transgender flag
1F3F3 200D 26A7 ; unqualified # 🏳⚧ E13.0 transgender flag
1F3F4 200D 2620 FE0F ; fully-qualified # 🏴☠️ E11.0 pirate flag
1F3F4 200D 2620 ; minimally-qualified # 🏴☠ E11.0 pirate flag
@@ -4983,9 +5016,9 @@
# Flags subtotal: 275 w/o modifiers
# Status Counts
-# fully-qualified : 3624
-# minimally-qualified : 817
-# unqualified : 252
+# fully-qualified : 3655
+# minimally-qualified : 827
+# unqualified : 242
# component : 9
#EOF
diff --git a/lib/pleroma/emoji.ex b/lib/pleroma/emoji.ex
index 35f0da816..21bcb0111 100644
--- a/lib/pleroma/emoji.ex
+++ b/lib/pleroma/emoji.ex
@@ -9,6 +9,7 @@ defmodule Pleroma.Emoji do
"""
use GenServer
+ alias Pleroma.Emoji.Combinations
alias Pleroma.Emoji.Loader
require Logger
@@ -23,6 +24,8 @@ defmodule Pleroma.Emoji do
defstruct [:code, :file, :tags, :safe_code, :safe_file]
+ @type t :: %__MODULE__{}
+
@doc "Build emoji struct"
def build({code, file, tags}) do
%__MODULE__{
@@ -48,10 +51,12 @@ def reload do
end
@doc "Returns the path of the emoji `name`."
- @spec get(String.t()) :: String.t() | nil
+ @spec get(String.t()) :: Pleroma.Emoji.t() | nil
def get(name) do
+ name = maybe_strip_name(name)
+
case :ets.lookup(@ets, name) do
- [{_, path}] -> path
+ [{_, emoji}] -> emoji
_ -> nil
end
end
@@ -133,8 +138,72 @@ defp update_emojis(emojis) do
emojis = emojis ++ regional_indicators
for emoji <- emojis do
- def is_unicode_emoji?(unquote(emoji)), do: true
+ def unicode?(unquote(emoji)), do: true
end
- def is_unicode_emoji?(_), do: false
+ def unicode?(_), do: false
+
+ @emoji_regex ~r/:[A-Za-z0-9_-]+(@.+)?:/
+
+ def custom?(s) when is_binary(s), do: Regex.match?(@emoji_regex, s)
+
+ def custom?(_), do: false
+
+ def maybe_strip_name(name) when is_binary(name), do: String.trim(name, ":")
+
+ def maybe_strip_name(name), do: name
+
+ def maybe_quote(name) when is_binary(name) do
+ if unicode?(name) do
+ name
+ else
+ if String.starts_with?(name, ":") do
+ name
+ else
+ ":#{name}:"
+ end
+ end
+ end
+
+ def maybe_quote(name), do: name
+
+ def emoji_url(%{"type" => "EmojiReact", "content" => _, "tag" => []}), do: nil
+
+ def emoji_url(%{"type" => "EmojiReact", "content" => emoji, "tag" => tags}) do
+ emoji = maybe_strip_name(emoji)
+
+ tag =
+ tags
+ |> Enum.find(fn tag ->
+ tag["type"] == "Emoji" && !is_nil(tag["name"]) && tag["name"] == emoji
+ end)
+
+ if is_nil(tag) do
+ nil
+ else
+ tag
+ |> Map.get("icon")
+ |> Map.get("url")
+ end
+ end
+
+ def emoji_url(_), do: nil
+
+ def emoji_name_with_instance(name, url) do
+ url = url |> URI.parse() |> Map.get(:host)
+ "#{name}@#{url}"
+ end
+
+ emoji_qualification_map =
+ emojis
+ |> Enum.filter(&String.contains?(&1, "\uFE0F"))
+ |> Combinations.variate_emoji_qualification()
+
+ for {qualified, unqualified_list} <- emoji_qualification_map do
+ for unqualified <- unqualified_list do
+ def fully_qualify_emoji(unquote(unqualified)), do: unquote(qualified)
+ end
+ end
+
+ def fully_qualify_emoji(emoji), do: emoji
end
diff --git a/lib/pleroma/emoji/combinations.ex b/lib/pleroma/emoji/combinations.ex
new file mode 100644
index 000000000..981c73596
--- /dev/null
+++ b/lib/pleroma/emoji/combinations.ex
@@ -0,0 +1,45 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Emoji.Combinations do
+ # FE0F is the emoji variation sequence. It is used for fully-qualifying
+ # emoji, and that includes emoji combinations.
+ # This code generates combinations per emoji: for each FE0F, all possible
+ # combinations of the character being removed or staying will be generated.
+ # This is made as an attempt to find all partially-qualified and unqualified
+ # versions of a fully-qualified emoji.
+ # I have found *no cases* for which this would be a problem, after browsing
+ # the entire emoji list in emoji-test.txt. This is safe, and, sadly, most
+ # likely sane too.
+
+ defp qualification_combinations(codepoints) do
+ qualification_combinations([[]], codepoints)
+ end
+
+ defp qualification_combinations(acc, []), do: acc
+
+ defp qualification_combinations(acc, ["\uFE0F" | tail]) do
+ acc
+ |> Enum.flat_map(fn x -> [x, x ++ ["\uFE0F"]] end)
+ |> qualification_combinations(tail)
+ end
+
+ defp qualification_combinations(acc, [codepoint | tail]) do
+ acc
+ |> Enum.map(&Kernel.++(&1, [codepoint]))
+ |> qualification_combinations(tail)
+ end
+
+ def variate_emoji_qualification(emoji) when is_binary(emoji) do
+ emoji
+ |> String.codepoints()
+ |> qualification_combinations()
+ |> Enum.map(&List.to_string/1)
+ end
+
+ def variate_emoji_qualification(emoji) when is_list(emoji) do
+ emoji
+ |> Enum.map(fn emoji -> {emoji, variate_emoji_qualification(emoji)} end)
+ end
+end
diff --git a/lib/pleroma/emoji/loader.ex b/lib/pleroma/emoji/loader.ex
index 97d4b8f70..b6e544323 100644
--- a/lib/pleroma/emoji/loader.ex
+++ b/lib/pleroma/emoji/loader.ex
@@ -15,8 +15,6 @@ defmodule Pleroma.Emoji.Loader do
require Logger
- @mix_env Mix.env()
-
@type pattern :: Regex.t() | module() | String.t()
@type patterns :: pattern() | [pattern()]
@type group_patterns :: keyword(patterns())
@@ -59,7 +57,7 @@ def load do
Logger.info("Found emoji packs: #{Enum.join(packs, ", ")}")
if not Enum.empty?(files) do
- Logger.warn(
+ Logger.warning(
"Found files in the emoji folder. These will be ignored, please move them to a subdirectory\nFound files: #{Enum.join(files, ", ")}"
)
end
@@ -79,7 +77,7 @@ def load do
# for testing emoji.txt entries we do not want exposed in normal operation
test_emoji =
- if @mix_env == :test do
+ if Application.get_env(:pleroma, __MODULE__)[:test_emoji] do
load_from_file("test/config/emoji.txt", emoji_groups)
else
[]
diff --git a/lib/pleroma/emoji/pack.ex b/lib/pleroma/emoji/pack.ex
index a361ea200..afc341853 100644
--- a/lib/pleroma/emoji/pack.ex
+++ b/lib/pleroma/emoji/pack.ex
@@ -100,7 +100,7 @@ def add_file(%Pack{} = pack, _, _, %Plug.Upload{content_type: "application/zip"}
{:ok, _emoji_files} =
:zip.unzip(
to_charlist(file.path),
- [{:file_list, Enum.map(emojies, & &1[:path])}, {:cwd, tmp_dir}]
+ [{:file_list, Enum.map(emojies, & &1[:path])}, {:cwd, String.to_charlist(tmp_dir)}]
)
{_, updated_pack} =
@@ -209,7 +209,9 @@ def list_remote(opts) do
with :ok <- validate_shareable_packs_available(uri) do
uri
- |> URI.merge("/api/pleroma/emoji/packs?page=#{opts[:page]}&page_size=#{opts[:page_size]}")
+ |> URI.merge(
+ "/api/v1/pleroma/emoji/packs?page=#{opts[:page]}&page_size=#{opts[:page_size]}"
+ )
|> http_get()
end
end
@@ -249,8 +251,12 @@ def download(name, url, as) do
uri = url |> String.trim() |> URI.parse()
with :ok <- validate_shareable_packs_available(uri),
+ {:ok, %{"files_count" => files_count}} <-
+ uri |> URI.merge("/api/v1/pleroma/emoji/pack?name=#{name}&page_size=0") |> http_get(),
{:ok, remote_pack} <-
- uri |> URI.merge("/api/pleroma/emoji/pack?name=#{name}") |> http_get(),
+ uri
+ |> URI.merge("/api/v1/pleroma/emoji/pack?name=#{name}&page_size=#{files_count}")
+ |> http_get(),
{:ok, %{sha: sha, url: url} = pack_info} <- fetch_pack_info(remote_pack, uri, name),
{:ok, archive} <- download_archive(url, sha),
pack <- copy_as(remote_pack, as || name),
@@ -285,6 +291,7 @@ def update_metadata(name, data) do
@spec load_pack(String.t()) :: {:ok, t()} | {:error, :file.posix()}
def load_pack(name) do
+ name = Path.basename(name)
pack_file = Path.join([emoji_path(), name, "pack.json"])
with {:ok, _} <- File.stat(pack_file),
@@ -591,7 +598,7 @@ defp fetch_pack_info(remote_pack, uri, name) do
{:ok,
%{
sha: sha,
- url: URI.merge(uri, "/api/pleroma/emoji/packs/archive?name=#{name}") |> to_string()
+ url: URI.merge(uri, "/api/v1/pleroma/emoji/packs/archive?name=#{name}") |> to_string()
}}
%{"fallback-src" => src, "fallback-src-sha256" => sha} when is_binary(src) ->
diff --git a/lib/pleroma/filter.ex b/lib/pleroma/filter.ex
index db88bc021..e827d3cbc 100644
--- a/lib/pleroma/filter.ex
+++ b/lib/pleroma/filter.ex
@@ -216,9 +216,6 @@ def compose_regex([_ | _] = filters, format) do
:re ->
~r/\b#{phrases}\b/i
-
- _ ->
- nil
end
end
diff --git a/lib/pleroma/following_relationship.ex b/lib/pleroma/following_relationship.ex
index e6449aa67..f38c2fce9 100644
--- a/lib/pleroma/following_relationship.ex
+++ b/lib/pleroma/following_relationship.ex
@@ -194,12 +194,13 @@ def move_following(origin, target) do
|> join(:inner, [r], f in assoc(r, :follower))
|> where(following_id: ^origin.id)
|> where([r, f], f.allow_following_move == true)
+ |> where([r, f], f.local == true)
|> limit(50)
|> preload([:follower])
|> Repo.all()
|> Enum.map(fn following_relationship ->
- Repo.delete(following_relationship)
Pleroma.Web.CommonAPI.follow(following_relationship.follower, target)
+ Pleroma.Web.CommonAPI.unfollow(following_relationship.follower, origin)
end)
|> case do
[] ->
@@ -240,13 +241,13 @@ def find(following_relationships, follower, following) do
end
@doc """
- For a query with joined activity,
- keeps rows where activity's actor is followed by user -or- is NOT domain-blocked by user.
+ For a query with joined activity's actor,
+ keeps rows where actor is followed by user -or- is NOT domain-blocked by user.
"""
def keep_following_or_not_domain_blocked(query, user) do
where(
query,
- [_, activity],
+ [_, user_actor: user_actor],
fragment(
# "(actor's domain NOT in domain_blocks) OR (actor IS in followed AP IDs)"
"""
@@ -254,9 +255,9 @@ def keep_following_or_not_domain_blocked(query, user) do
? = ANY(SELECT ap_id FROM users AS u INNER JOIN following_relationships AS fr
ON u.id = fr.following_id WHERE fr.follower_id = ? AND fr.state = ?)
""",
- activity.actor,
+ user_actor.ap_id,
^user.domain_blocks,
- activity.actor,
+ user_actor.ap_id,
^User.binary_id(user.id),
^accept_state_code()
)
diff --git a/lib/pleroma/formatter.ex b/lib/pleroma/formatter.ex
index a46c3e381..11d5af2fb 100644
--- a/lib/pleroma/formatter.ex
+++ b/lib/pleroma/formatter.ex
@@ -124,7 +124,7 @@ def mentions_escape(text, options \\ []) do
end
def markdown_to_html(text) do
- Earmark.as_html!(text, %Earmark.Options{compact_output: true})
+ Earmark.as_html!(text, %Earmark.Options{compact_output: true, smartypants: false})
end
def html_escape({text, mentions, hashtags}, type) do
diff --git a/lib/pleroma/gopher/server.ex b/lib/pleroma/gopher/server.ex
index 0fde0adcf..54245c9fa 100644
--- a/lib/pleroma/gopher/server.ex
+++ b/lib/pleroma/gopher/server.ex
@@ -114,7 +114,7 @@ def response("/main/all") do
def response("/notices/" <> id) do
with %Activity{} = activity <- Activity.get_by_id(id),
- true <- Visibility.is_public?(activity) do
+ true <- Visibility.public?(activity) do
activities =
ActivityPub.fetch_activities_for_context(activity.data["context"])
|> render_activities
diff --git a/lib/pleroma/gun/conn.ex b/lib/pleroma/gun/conn.ex
index 7c5785def..804cd11c7 100644
--- a/lib/pleroma/gun/conn.ex
+++ b/lib/pleroma/gun/conn.ex
@@ -56,7 +56,7 @@ defp do_open(uri, %{proxy: {proxy_host, proxy_port}} = opts) do
{:ok, conn, protocol}
else
error ->
- Logger.warn(
+ Logger.warning(
"Opening proxied connection to #{compose_uri_log(uri)} failed with error #{inspect(error)}"
)
@@ -90,7 +90,7 @@ defp do_open(uri, %{proxy: {proxy_type, proxy_host, proxy_port}} = opts) do
{:ok, conn, protocol}
else
error ->
- Logger.warn(
+ Logger.warning(
"Opening socks proxied connection to #{compose_uri_log(uri)} failed with error #{inspect(error)}"
)
@@ -106,7 +106,7 @@ defp do_open(%URI{host: host, port: port} = uri, opts) do
{:ok, conn, protocol}
else
error ->
- Logger.warn(
+ Logger.warning(
"Opening connection to #{compose_uri_log(uri)} failed with error #{inspect(error)}"
)
diff --git a/lib/pleroma/gun/connection_pool/reclaimer.ex b/lib/pleroma/gun/connection_pool/reclaimer.ex
index efd5c9fb8..35e7f4b2e 100644
--- a/lib/pleroma/gun/connection_pool/reclaimer.ex
+++ b/lib/pleroma/gun/connection_pool/reclaimer.ex
@@ -9,7 +9,7 @@ defp registry, do: Pleroma.Gun.ConnectionPool
def start_monitor do
pid =
- case :gen_server.start(__MODULE__, [], name: {:via, Registry, {registry(), "reclaimer"}}) do
+ case GenServer.start_link(__MODULE__, [], name: {:via, Registry, {registry(), "reclaimer"}}) do
{:ok, pid} ->
pid
diff --git a/lib/pleroma/gun/connection_pool/worker_supervisor.ex b/lib/pleroma/gun/connection_pool/worker_supervisor.ex
index d26a70be3..eb83962d8 100644
--- a/lib/pleroma/gun/connection_pool/worker_supervisor.ex
+++ b/lib/pleroma/gun/connection_pool/worker_supervisor.ex
@@ -18,10 +18,12 @@ def init(_opts) do
)
end
- def start_worker(opts, retry \\ false) do
+ def start_worker(opts, last_attempt \\ false) do
case DynamicSupervisor.start_child(__MODULE__, {Pleroma.Gun.ConnectionPool.Worker, opts}) do
{:error, :max_children} ->
- if retry or free_pool() == :error do
+ funs = [fn -> last_attempt end, fn -> match?(:error, free_pool()) end]
+
+ if Enum.any?(funs, fn fun -> fun.() end) do
:telemetry.execute([:pleroma, :connection_pool, :provision_failure], %{opts: opts})
{:error, :pool_full}
else
diff --git a/lib/pleroma/helpers/media_helper.ex b/lib/pleroma/helpers/media_helper.ex
index 24c845fcd..e44114d9d 100644
--- a/lib/pleroma/helpers/media_helper.ex
+++ b/lib/pleroma/helpers/media_helper.ex
@@ -8,11 +8,14 @@ defmodule Pleroma.Helpers.MediaHelper do
"""
alias Pleroma.HTTP
+ alias Vix.Vips.Operation
require Logger
+ @cachex Pleroma.Config.get([:cachex, :provider], Cachex)
+
def missing_dependencies do
- Enum.reduce([imagemagick: "convert", ffmpeg: "ffmpeg"], [], fn {sym, executable}, acc ->
+ Enum.reduce([ffmpeg: "ffmpeg"], [], fn {sym, executable}, acc ->
if Pleroma.Utils.command_available?(executable) do
acc
else
@@ -22,141 +25,63 @@ def missing_dependencies do
end
def image_resize(url, options) do
- with executable when is_binary(executable) <- System.find_executable("convert"),
- {:ok, args} <- prepare_image_resize_args(options),
- {:ok, env} <- HTTP.get(url, [], pool: :media),
- {:ok, fifo_path} <- mkfifo() do
- args = List.flatten([fifo_path, args])
- run_fifo(fifo_path, env, executable, args)
+ with {:ok, env} <- HTTP.get(url, [], pool: :media),
+ {:ok, resized} <-
+ Operation.thumbnail_buffer(env.body, options.max_width,
+ height: options.max_height,
+ size: :VIPS_SIZE_DOWN
+ ) do
+ if options[:format] == "png" do
+ Operation.pngsave_buffer(resized, Q: options[:quality])
+ else
+ Operation.jpegsave_buffer(resized, Q: options[:quality], interlace: true)
+ end
else
- nil -> {:error, {:convert, :command_not_found}}
{:error, _} = error -> error
end
end
- defp prepare_image_resize_args(
- %{max_width: max_width, max_height: max_height, format: "png"} = options
- ) do
- quality = options[:quality] || 85
- resize = Enum.join([max_width, "x", max_height, ">"])
-
- args = [
- "-resize",
- resize,
- "-quality",
- to_string(quality),
- "png:-"
- ]
-
- {:ok, args}
- end
-
- defp prepare_image_resize_args(%{max_width: max_width, max_height: max_height} = options) do
- quality = options[:quality] || 85
- resize = Enum.join([max_width, "x", max_height, ">"])
-
- args = [
- "-interlace",
- "Plane",
- "-resize",
- resize,
- "-quality",
- to_string(quality),
- "jpg:-"
- ]
-
- {:ok, args}
- end
-
- defp prepare_image_resize_args(_), do: {:error, :missing_options}
-
# Note: video thumbnail is intentionally not resized (always has original dimensions)
+ @spec video_framegrab(String.t()) :: {:ok, binary()} | {:error, any()}
def video_framegrab(url) do
with executable when is_binary(executable) <- System.find_executable("ffmpeg"),
+ false <- @cachex.exists?(:failed_media_helper_cache, url),
{:ok, env} <- HTTP.get(url, [], pool: :media),
- {:ok, fifo_path} <- mkfifo(),
- args = [
- "-y",
- "-i",
- fifo_path,
- "-vframes",
- "1",
- "-f",
- "mjpeg",
- "-loglevel",
- "error",
- "-"
- ] do
- run_fifo(fifo_path, env, executable, args)
+ {:ok, pid} <- StringIO.open(env.body) do
+ body_stream = IO.binstream(pid, 1)
+
+ task =
+ Task.async(fn ->
+ Exile.stream!(
+ [
+ executable,
+ "-i",
+ "pipe:0",
+ "-vframes",
+ "1",
+ "-f",
+ "mjpeg",
+ "pipe:1"
+ ],
+ input: body_stream,
+ ignore_epipe: true,
+ stderr: :disable
+ )
+ |> Enum.into(<<>>)
+ end)
+
+ case Task.yield(task, 5_000) do
+ nil ->
+ Task.shutdown(task)
+ @cachex.put(:failed_media_helper_cache, url, nil)
+ {:error, {:ffmpeg, :timeout}}
+
+ result ->
+ {:ok, result}
+ end
else
nil -> {:error, {:ffmpeg, :command_not_found}}
{:error, _} = error -> error
end
end
-
- defp run_fifo(fifo_path, env, executable, args) do
- pid =
- Port.open({:spawn_executable, executable}, [
- :use_stdio,
- :stream,
- :exit_status,
- :binary,
- args: args
- ])
-
- fifo = Port.open(to_charlist(fifo_path), [:eof, :binary, :stream, :out])
- fix = Pleroma.Helpers.QtFastStart.fix(env.body)
- true = Port.command(fifo, fix)
- :erlang.port_close(fifo)
- loop_recv(pid)
- after
- File.rm(fifo_path)
- end
-
- defp mkfifo do
- path = Path.join(System.tmp_dir!(), "pleroma-media-preview-pipe-#{Ecto.UUID.generate()}")
-
- case System.cmd("mkfifo", [path]) do
- {_, 0} ->
- spawn(fifo_guard(path))
- {:ok, path}
-
- {_, err} ->
- {:error, {:fifo_failed, err}}
- end
- end
-
- defp fifo_guard(path) do
- pid = self()
-
- fn ->
- ref = Process.monitor(pid)
-
- receive do
- {:DOWN, ^ref, :process, ^pid, _} ->
- File.rm(path)
- end
- end
- end
-
- defp loop_recv(pid) do
- loop_recv(pid, <<>>)
- end
-
- defp loop_recv(pid, acc) do
- receive do
- {^pid, {:data, data}} ->
- loop_recv(pid, acc <> data)
-
- {^pid, {:exit_status, 0}} ->
- {:ok, acc}
-
- {^pid, {:exit_status, status}} ->
- {:error, status}
- after
- 5000 ->
- :erlang.port_close(pid)
- {:error, :timeout}
- end
- end
end
diff --git a/lib/pleroma/helpers/qt_fast_start.ex b/lib/pleroma/helpers/qt_fast_start.ex
index 5711c7162..0b200b234 100644
--- a/lib/pleroma/helpers/qt_fast_start.ex
+++ b/lib/pleroma/helpers/qt_fast_start.ex
@@ -40,16 +40,21 @@ defp fix(
got_mdat,
acc
) do
- full_size = (size - 8) * 8
- <> = rest
+ try do
+ full_size = (size - 8) * 8
+ <> = rest
- acc = [
- {fourcc, pos, pos + size, size,
- <>}
- | acc
- ]
+ acc = [
+ {fourcc, pos, pos + size, size,
+ <>}
+ | acc
+ ]
- fix(rest, pos + size, got_moov || fourcc == "moov", got_mdat || fourcc == "mdat", acc)
+ fix(rest, pos + size, got_moov || fourcc == "moov", got_mdat || fourcc == "mdat", acc)
+ rescue
+ _ ->
+ :abort
+ end
end
defp fix(<<>>, _pos, _, _, acc) do
@@ -121,9 +126,15 @@ defp rewrite_entries(
<>,
acc
) do
- rewrite_entries(unquote(size), offset, rest, [
- acc | <>
- ])
+ rewrite_entries(
+ unquote(size),
+ offset,
+ rest,
+ acc ++
+ [
+ <>
+ ]
+ )
end
end
diff --git a/lib/pleroma/html.ex b/lib/pleroma/html.ex
index 5bf735c4f..4de7cbb76 100644
--- a/lib/pleroma/html.ex
+++ b/lib/pleroma/html.ex
@@ -6,8 +6,6 @@ defmodule Pleroma.HTML do
# Scrubbers are compiled on boot so they can be configured in OTP releases
# @on_load :compile_scrubbers
- @cachex Pleroma.Config.get([:cachex, :provider], Cachex)
-
def compile_scrubbers do
dir = Path.join(:code.priv_dir(:pleroma), "scrubbers")
@@ -67,22 +65,9 @@ def ensure_scrubbed_html(
end
end
- def extract_first_external_url_from_object(%{data: %{"content" => content}} = object)
+ @spec extract_first_external_url_from_object(Pleroma.Object.t()) :: String.t() | nil
+ def extract_first_external_url_from_object(%{data: %{"content" => content}})
when is_binary(content) do
- unless object.data["fake"] do
- key = "URL|#{object.id}"
-
- @cachex.fetch!(:scrubber_cache, key, fn _key ->
- {:commit, {:ok, extract_first_external_url(content)}}
- end)
- else
- {:ok, extract_first_external_url(content)}
- end
- end
-
- def extract_first_external_url_from_object(_), do: {:error, :no_content}
-
- def extract_first_external_url(content) do
content
|> Floki.parse_fragment!()
|> Floki.find("a:not(.mention,.hashtag,.attachment,[rel~=\"tag\"])")
@@ -90,4 +75,6 @@ def extract_first_external_url(content) do
|> Floki.attribute("href")
|> Enum.at(0)
end
+
+ def extract_first_external_url_from_object(_), do: nil
end
diff --git a/lib/pleroma/http.ex b/lib/pleroma/http.ex
index 2e82ceff2..eec61cf14 100644
--- a/lib/pleroma/http.ex
+++ b/lib/pleroma/http.ex
@@ -106,5 +106,16 @@ defp adapter_middlewares(Tesla.Adapter.Gun) do
[Tesla.Middleware.FollowRedirects, Pleroma.Tesla.Middleware.ConnectionPool]
end
- defp adapter_middlewares(_), do: []
+ defp adapter_middlewares({Tesla.Adapter.Finch, _}) do
+ [Tesla.Middleware.FollowRedirects]
+ end
+
+ defp adapter_middlewares(_) do
+ if Pleroma.Config.get(:env) == :test do
+ # Emulate redirects in test env, which are handled by adapters in other environments
+ [Tesla.Middleware.FollowRedirects]
+ else
+ []
+ end
+ end
end
diff --git a/lib/pleroma/http/adapter_helper.ex b/lib/pleroma/http/adapter_helper.ex
index 252a6aba5..dcb27a29d 100644
--- a/lib/pleroma/http/adapter_helper.ex
+++ b/lib/pleroma/http/adapter_helper.ex
@@ -15,8 +15,8 @@ defmodule Pleroma.HTTP.AdapterHelper do
require Logger
@type proxy ::
- {Connection.host(), pos_integer()}
- | {Connection.proxy_type(), Connection.host(), pos_integer()}
+ {host(), pos_integer()}
+ | {proxy_type(), host(), pos_integer()}
@callback options(keyword(), URI.t()) :: keyword()
@@ -70,15 +70,15 @@ def parse_proxy(proxy) when is_binary(proxy) do
{:ok, parse_host(host), port}
else
{_, _} ->
- Logger.warn("Parsing port failed #{inspect(proxy)}")
+ Logger.warning("Parsing port failed #{inspect(proxy)}")
{:error, :invalid_proxy_port}
:error ->
- Logger.warn("Parsing port failed #{inspect(proxy)}")
+ Logger.warning("Parsing port failed #{inspect(proxy)}")
{:error, :invalid_proxy_port}
_ ->
- Logger.warn("Parsing proxy failed #{inspect(proxy)}")
+ Logger.warning("Parsing proxy failed #{inspect(proxy)}")
{:error, :invalid_proxy}
end
end
@@ -88,7 +88,7 @@ def parse_proxy(proxy) when is_tuple(proxy) do
{:ok, type, parse_host(host), port}
else
_ ->
- Logger.warn("Parsing proxy failed #{inspect(proxy)}")
+ Logger.warning("Parsing proxy failed #{inspect(proxy)}")
{:error, :invalid_proxy}
end
end
diff --git a/lib/pleroma/http/request_builder.ex b/lib/pleroma/http/request_builder.ex
index f16fb3b35..0a028a64c 100644
--- a/lib/pleroma/http/request_builder.ex
+++ b/lib/pleroma/http/request_builder.ex
@@ -54,12 +54,12 @@ def opts(request, options), do: %{request | opts: options}
@doc """
Add optional parameters to the request
"""
- @spec add_param(Request.t(), atom(), atom(), any()) :: Request.t()
+ @spec add_param(Request.t(), atom(), atom() | String.t(), any()) :: Request.t()
def add_param(request, :query, :query, values), do: %{request | query: values}
def add_param(request, :body, :body, value), do: %{request | body: value}
- def add_param(request, :body, key, value) do
+ def add_param(request, :body, key, value) when is_binary(key) do
request
|> Map.put(:body, Multipart.new())
|> Map.update!(
diff --git a/lib/pleroma/http/web_push.ex b/lib/pleroma/http/web_push.ex
index ca399b6c8..888079c1e 100644
--- a/lib/pleroma/http/web_push.ex
+++ b/lib/pleroma/http/web_push.ex
@@ -6,7 +6,11 @@ defmodule Pleroma.HTTP.WebPush do
@moduledoc false
def post(url, payload, headers, options \\ []) do
- list_headers = Map.to_list(headers)
+ list_headers =
+ headers
+ |> Map.to_list()
+ |> Kernel.++([{"content-type", "octet-stream"}])
+
Pleroma.HTTP.post(url, payload, list_headers, options)
end
end
diff --git a/lib/pleroma/instances.ex b/lib/pleroma/instances.ex
index 782948f83..b6d83f591 100644
--- a/lib/pleroma/instances.ex
+++ b/lib/pleroma/instances.ex
@@ -7,16 +7,15 @@ defmodule Pleroma.Instances do
alias Pleroma.Instances.Instance
- def filter_reachable(urls_or_hosts), do: Instance.filter_reachable(urls_or_hosts)
+ defdelegate filter_reachable(urls_or_hosts), to: Instance
- def reachable?(url_or_host), do: Instance.reachable?(url_or_host)
+ defdelegate reachable?(url_or_host), to: Instance
- def set_reachable(url_or_host), do: Instance.set_reachable(url_or_host)
+ defdelegate set_reachable(url_or_host), to: Instance
- def set_unreachable(url_or_host, unreachable_since \\ nil),
- do: Instance.set_unreachable(url_or_host, unreachable_since)
+ defdelegate set_unreachable(url_or_host, unreachable_since \\ nil), to: Instance
- def get_consistently_unreachable, do: Instance.get_consistently_unreachable()
+ defdelegate get_consistently_unreachable, to: Instance
def set_consistently_unreachable(url_or_host),
do: set_unreachable(url_or_host, reachability_datetime_threshold())
diff --git a/lib/pleroma/instances/instance.ex b/lib/pleroma/instances/instance.ex
index a5529ad44..c497a4fb7 100644
--- a/lib/pleroma/instances/instance.ex
+++ b/lib/pleroma/instances/instance.ex
@@ -7,6 +7,7 @@ defmodule Pleroma.Instances.Instance do
alias Pleroma.Instances
alias Pleroma.Instances.Instance
+ alias Pleroma.Maps
alias Pleroma.Repo
alias Pleroma.User
alias Pleroma.Workers.BackgroundWorker
@@ -24,6 +25,14 @@ defmodule Pleroma.Instances.Instance do
field(:favicon, :string)
field(:favicon_updated_at, :naive_datetime)
+ embeds_one :metadata, Pleroma.Instances.Metadata, primary_key: false do
+ field(:software_name, :string)
+ field(:software_version, :string)
+ field(:software_repository, :string)
+ end
+
+ field(:metadata_updated_at, :utc_datetime)
+
timestamps()
end
@@ -31,11 +40,17 @@ defmodule Pleroma.Instances.Instance do
def changeset(struct, params \\ %{}) do
struct
- |> cast(params, [:host, :unreachable_since, :favicon, :favicon_updated_at])
+ |> cast(params, __schema__(:fields) -- [:metadata])
+ |> cast_embed(:metadata, with: &metadata_changeset/2)
|> validate_required([:host])
|> unique_constraint(:host)
end
+ def metadata_changeset(struct, params \\ %{}) do
+ struct
+ |> cast(params, [:software_name, :software_version, :software_repository])
+ end
+
def filter_reachable([]), do: %{}
def filter_reachable(urls_or_hosts) when is_list(urls_or_hosts) do
@@ -82,13 +97,9 @@ def reachable?(url_or_host) when is_binary(url_or_host) do
def reachable?(url_or_host) when is_binary(url_or_host), do: true
def set_reachable(url_or_host) when is_binary(url_or_host) do
- with host <- host(url_or_host),
- %Instance{} = existing_record <- Repo.get_by(Instance, %{host: host}) do
- {:ok, _instance} =
- existing_record
- |> changeset(%{unreachable_since: nil})
- |> Repo.update()
- end
+ %Instance{host: host(url_or_host)}
+ |> changeset(%{unreachable_since: nil})
+ |> Repo.insert(on_conflict: {:replace, [:unreachable_since]}, conflict_target: :host)
end
def set_reachable(_), do: {:error, nil}
@@ -162,7 +173,7 @@ def get_or_update_favicon(%URI{host: host} = instance_uri) do
end
rescue
e ->
- Logger.warn("Instance.get_or_update_favicon(\"#{host}\") error: #{inspect(e)}")
+ Logger.warning("Instance.get_or_update_favicon(\"#{host}\") error: #{inspect(e)}")
nil
end
@@ -190,7 +201,7 @@ defp scrape_favicon(%URI{} = instance_uri) do
end
rescue
e ->
- Logger.warn(
+ Logger.warning(
"Instance.scrape_favicon(\"#{to_string(instance_uri)}\") error: #{inspect(e)}"
)
@@ -198,6 +209,89 @@ defp scrape_favicon(%URI{} = instance_uri) do
end
end
+ def get_or_update_metadata(%URI{host: host} = instance_uri) do
+ existing_record = Repo.get_by(Instance, %{host: host})
+ now = NaiveDateTime.utc_now()
+
+ if existing_record && existing_record.metadata_updated_at &&
+ NaiveDateTime.diff(now, existing_record.metadata_updated_at) < 86_400 do
+ existing_record.metadata
+ else
+ metadata = scrape_metadata(instance_uri)
+
+ if existing_record do
+ existing_record
+ |> changeset(%{metadata: metadata, metadata_updated_at: now})
+ |> Repo.update()
+ else
+ %Instance{}
+ |> changeset(%{host: host, metadata: metadata, metadata_updated_at: now})
+ |> Repo.insert()
+ end
+
+ metadata
+ end
+ end
+
+ defp get_nodeinfo_uri(well_known) do
+ links = Map.get(well_known, "links", [])
+
+ nodeinfo21 =
+ Enum.find(links, &(&1["rel"] == "http://nodeinfo.diaspora.software/ns/schema/2.1"))["href"]
+
+ nodeinfo20 =
+ Enum.find(links, &(&1["rel"] == "http://nodeinfo.diaspora.software/ns/schema/2.0"))["href"]
+
+ cond do
+ is_binary(nodeinfo21) -> {:ok, nodeinfo21}
+ is_binary(nodeinfo20) -> {:ok, nodeinfo20}
+ true -> {:error, :no_links}
+ end
+ end
+
+ defp scrape_metadata(%URI{} = instance_uri) do
+ try do
+ with {_, true} <- {:reachable, reachable?(instance_uri.host)},
+ {:ok, %Tesla.Env{body: well_known_body}} <-
+ instance_uri
+ |> URI.merge("/.well-known/nodeinfo")
+ |> to_string()
+ |> Pleroma.HTTP.get([{"accept", "application/json"}]),
+ {:ok, well_known_json} <- Jason.decode(well_known_body),
+ {:ok, nodeinfo_uri} <- get_nodeinfo_uri(well_known_json),
+ {:ok, %Tesla.Env{body: nodeinfo_body}} <-
+ Pleroma.HTTP.get(nodeinfo_uri, [{"accept", "application/json"}]),
+ {:ok, nodeinfo} <- Jason.decode(nodeinfo_body) do
+ # Can extract more metadata from NodeInfo but need to be careful about it's size,
+ # can't just dump the entire thing
+ software = Map.get(nodeinfo, "software", %{})
+
+ %{
+ software_name: software["name"],
+ software_version: software["version"]
+ }
+ |> Maps.put_if_present(:software_repository, software["repository"])
+ else
+ {:reachable, false} ->
+ Logger.debug(
+ "Instance.scrape_metadata(\"#{to_string(instance_uri)}\") ignored unreachable host"
+ )
+
+ nil
+
+ _ ->
+ nil
+ end
+ rescue
+ e ->
+ Logger.warning(
+ "Instance.scrape_metadata(\"#{to_string(instance_uri)}\") error: #{inspect(e)}"
+ )
+
+ nil
+ end
+ end
+
@doc """
Deletes all users from an instance in a background task, thus also deleting
all of those users' activities and notifications.
diff --git a/lib/pleroma/maintenance.ex b/lib/pleroma/maintenance.ex
index eb5a6ef42..1e39b03e6 100644
--- a/lib/pleroma/maintenance.ex
+++ b/lib/pleroma/maintenance.ex
@@ -20,7 +20,7 @@ def vacuum(args) do
"full" ->
Logger.info("Running VACUUM FULL.")
- Logger.warn(
+ Logger.warning(
"Re-packing your entire database may take a while and will consume extra disk space during the process."
)
diff --git a/lib/pleroma/maps.ex b/lib/pleroma/maps.ex
index 6d586e53e..5020a8ff8 100644
--- a/lib/pleroma/maps.ex
+++ b/lib/pleroma/maps.ex
@@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server
-# Copyright © 2017-2022 Pleroma Authors
+# Copyright © 2017-2024 Pleroma Authors
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Maps do
@@ -18,4 +18,17 @@ def safe_put_in(data, keys, value) when is_map(data) and is_list(keys) do
rescue
_ -> data
end
+
+ def filter_empty_values(data) do
+ # TODO: Change to Map.filter in Elixir 1.13+
+ data
+ |> Enum.filter(fn
+ {_k, nil} -> false
+ {_k, ""} -> false
+ {_k, []} -> false
+ {_k, %{} = v} -> Map.keys(v) != []
+ {_k, _v} -> true
+ end)
+ |> Map.new()
+ end
end
diff --git a/lib/pleroma/mfa.ex b/lib/pleroma/mfa.ex
index 01b730c76..ce30cd4ad 100644
--- a/lib/pleroma/mfa.ex
+++ b/lib/pleroma/mfa.ex
@@ -77,7 +77,7 @@ def generate_backup_codes(%User{} = user) do
{:ok, codes}
else
{:error, msg} ->
- %{error: msg}
+ {:error, msg}
end
end
diff --git a/lib/pleroma/mfa/totp.ex b/lib/pleroma/mfa/totp.ex
index 429c4b700..96fa8d71d 100644
--- a/lib/pleroma/mfa/totp.ex
+++ b/lib/pleroma/mfa/totp.ex
@@ -14,6 +14,7 @@ defmodule Pleroma.MFA.TOTP do
@doc """
https://github.com/google/google-authenticator/wiki/Key-Uri-Format
"""
+ @spec provisioning_uri(String.t(), String.t(), list()) :: String.t()
def provisioning_uri(secret, label, opts \\ []) do
query =
%{
@@ -27,7 +28,7 @@ def provisioning_uri(secret, label, opts \\ []) do
|> URI.encode_query()
%URI{scheme: "otpauth", host: "totp", path: "/" <> label, query: query}
- |> URI.to_string()
+ |> to_string()
end
defp default_period, do: Config.get(@config_ns ++ [:period])
diff --git a/lib/pleroma/migrators/context_objects_deletion_migrator.ex b/lib/pleroma/migrators/context_objects_deletion_migrator.ex
new file mode 100644
index 000000000..fb224795a
--- /dev/null
+++ b/lib/pleroma/migrators/context_objects_deletion_migrator.ex
@@ -0,0 +1,139 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Migrators.ContextObjectsDeletionMigrator do
+ defmodule State do
+ use Pleroma.Migrators.Support.BaseMigratorState
+
+ @impl Pleroma.Migrators.Support.BaseMigratorState
+ defdelegate data_migration(), to: Pleroma.DataMigration, as: :delete_context_objects
+ end
+
+ use Pleroma.Migrators.Support.BaseMigrator
+
+ alias Pleroma.Migrators.Support.BaseMigrator
+ alias Pleroma.Object
+
+ @doc "This migration removes objects created exclusively for contexts, containing only an `id` field."
+
+ @impl BaseMigrator
+ def feature_config_path, do: [:features, :delete_context_objects]
+
+ @impl BaseMigrator
+ def fault_rate_allowance, do: Config.get([:delete_context_objects, :fault_rate_allowance], 0)
+
+ @impl BaseMigrator
+ def perform do
+ data_migration_id = data_migration_id()
+ max_processed_id = get_stat(:max_processed_id, 0)
+
+ Logger.info("Deleting context objects from `objects` (from oid: #{max_processed_id})...")
+
+ query()
+ |> where([object], object.id > ^max_processed_id)
+ |> Repo.chunk_stream(100, :batches, timeout: :infinity)
+ |> Stream.each(fn objects ->
+ object_ids = Enum.map(objects, & &1.id)
+
+ results = Enum.map(object_ids, &delete_context_object(&1))
+
+ failed_ids =
+ results
+ |> Enum.filter(&(elem(&1, 0) == :error))
+ |> Enum.map(&elem(&1, 1))
+
+ chunk_affected_count =
+ results
+ |> Enum.filter(&(elem(&1, 0) == :ok))
+ |> length()
+
+ for failed_id <- failed_ids do
+ _ =
+ Repo.query(
+ "INSERT INTO data_migration_failed_ids(data_migration_id, record_id) " <>
+ "VALUES ($1, $2) ON CONFLICT DO NOTHING;",
+ [data_migration_id, failed_id]
+ )
+ end
+
+ _ =
+ Repo.query(
+ "DELETE FROM data_migration_failed_ids " <>
+ "WHERE data_migration_id = $1 AND record_id = ANY($2)",
+ [data_migration_id, object_ids -- failed_ids]
+ )
+
+ max_object_id = Enum.at(object_ids, -1)
+
+ put_stat(:max_processed_id, max_object_id)
+ increment_stat(:iteration_processed_count, length(object_ids))
+ increment_stat(:processed_count, length(object_ids))
+ increment_stat(:failed_count, length(failed_ids))
+ increment_stat(:affected_count, chunk_affected_count)
+ put_stat(:records_per_second, records_per_second())
+ persist_state()
+
+ # A quick and dirty approach to controlling the load this background migration imposes
+ sleep_interval = Config.get([:delete_context_objects, :sleep_interval_ms], 0)
+ Process.sleep(sleep_interval)
+ end)
+ |> Stream.run()
+ end
+
+ @impl BaseMigrator
+ def query do
+ # Context objects have no activity type, and only one field, `id`.
+ # Only those context objects are without types.
+ from(
+ object in Object,
+ where: fragment("(?)->'type' IS NULL", object.data),
+ select: %{
+ id: object.id
+ }
+ )
+ end
+
+ @spec delete_context_object(integer()) :: {:ok | :error, integer()}
+ defp delete_context_object(id) do
+ result =
+ %Object{id: id}
+ |> Repo.delete()
+ |> elem(0)
+
+ {result, id}
+ end
+
+ @impl BaseMigrator
+ def retry_failed do
+ data_migration_id = data_migration_id()
+
+ failed_objects_query()
+ |> Repo.chunk_stream(100, :one)
+ |> Stream.each(fn object ->
+ with {res, _} when res != :error <- delete_context_object(object.id) do
+ _ =
+ Repo.query(
+ "DELETE FROM data_migration_failed_ids " <>
+ "WHERE data_migration_id = $1 AND record_id = $2",
+ [data_migration_id, object.id]
+ )
+ end
+ end)
+ |> Stream.run()
+
+ put_stat(:failed_count, failures_count())
+ persist_state()
+
+ force_continue()
+ end
+
+ defp failed_objects_query do
+ from(o in Object)
+ |> join(:inner, [o], dmf in fragment("SELECT * FROM data_migration_failed_ids"),
+ on: dmf.record_id == o.id
+ )
+ |> where([_o, dmf], dmf.data_migration_id == ^data_migration_id())
+ |> order_by([o], asc: o.id)
+ end
+end
diff --git a/lib/pleroma/migrators/hashtags_table_migrator.ex b/lib/pleroma/migrators/hashtags_table_migrator.ex
index fa1190b7d..bd4dd2f1d 100644
--- a/lib/pleroma/migrators/hashtags_table_migrator.ex
+++ b/lib/pleroma/migrators/hashtags_table_migrator.ex
@@ -100,7 +100,7 @@ def query do
|> where([_o, hashtags_objects], is_nil(hashtags_objects.object_id))
end
- @spec transfer_object_hashtags(Map.t()) :: {:noop | :ok | :error, integer()}
+ @spec transfer_object_hashtags(map()) :: {:noop | :ok | :error, integer()}
defp transfer_object_hashtags(object) do
embedded_tags = if Map.has_key?(object, :tag), do: object.tag, else: object.data["tag"]
hashtags = Object.object_data_hashtags(%{"tag" => embedded_tags})
@@ -183,7 +183,7 @@ def delete_non_create_activities_hashtags do
DELETE FROM hashtags_objects WHERE object_id IN
(SELECT DISTINCT objects.id FROM objects
JOIN hashtags_objects ON hashtags_objects.object_id = objects.id LEFT JOIN activities
- ON COALESCE(activities.data->'object'->>'id', activities.data->>'object') =
+ ON associated_object_id(activities) =
(objects.data->>'id')
AND activities.data->>'type' = 'Create'
WHERE activities.id IS NULL);
diff --git a/lib/pleroma/migrators/support/base_migrator.ex b/lib/pleroma/migrators/support/base_migrator.ex
index 3bcd59fd0..76a5d4590 100644
--- a/lib/pleroma/migrators/support/base_migrator.ex
+++ b/lib/pleroma/migrators/support/base_migrator.ex
@@ -73,7 +73,7 @@ def handle_continue(:init_state, _state) do
data_migration.state == :manual or data_migration.name in manual_migrations ->
message = "Data migration is in manual execution or manual fix mode."
update_status(:manual, message)
- Logger.warn("#{__MODULE__}: #{message}")
+ Logger.warning("#{__MODULE__}: #{message}")
data_migration.state == :complete ->
on_complete(data_migration)
@@ -109,7 +109,7 @@ def handle_info(:perform, state) do
Putting data migration to manual fix mode. Try running `#{__MODULE__}.retry_failed/0`.
"""
- Logger.warn("#{__MODULE__}: #{message}")
+ Logger.warning("#{__MODULE__}: #{message}")
update_status(:manual, message)
on_complete(data_migration())
@@ -125,7 +125,7 @@ def handle_info(:perform, state) do
defp on_complete(data_migration) do
if data_migration.feature_lock || feature_state() == :disabled do
- Logger.warn(
+ Logger.warning(
"#{__MODULE__}: migration complete but feature is locked; consider enabling."
)
@@ -188,10 +188,11 @@ defp update_status(status, message \\ nil) do
end
defp fault_rate do
- with failures_count when is_integer(failures_count) <- failures_count() do
+ with failures_count when is_integer(failures_count) <- failures_count(),
+ true <- failures_count > 0 do
failures_count / Enum.max([get_stat(:affected_count, 0), 1])
else
- _ -> :error
+ _ -> 0
end
end
diff --git a/lib/pleroma/moderation_log.ex b/lib/pleroma/moderation_log.ex
index 7203423e2..5c3ca58b0 100644
--- a/lib/pleroma/moderation_log.ex
+++ b/lib/pleroma/moderation_log.ex
@@ -121,7 +121,7 @@ defp prepare_log_data(%{actor: actor, action: action} = attrs) do
defp prepare_log_data(attrs), do: attrs
- @spec insert_log(log_params()) :: {:ok, ModerationLog} | {:error, any}
+ @spec insert_log(log_params()) :: {:ok, ModerationLog.t()} | {:error, any}
def insert_log(%{actor: %User{}, subject: subjects, permission: permission} = attrs) do
data =
attrs
@@ -248,7 +248,8 @@ def insert_log(%{actor: %User{} = actor, action: "chat_message_delete", subject_
|> insert_log_entry_with_message()
end
- @spec insert_log_entry_with_message(ModerationLog) :: {:ok, ModerationLog} | {:error, any}
+ @spec insert_log_entry_with_message(ModerationLog.t()) ::
+ {:ok, ModerationLog.t()} | {:error, any}
defp insert_log_entry_with_message(entry) do
entry.data["message"]
|> put_in(get_log_entry_message(entry))
diff --git a/lib/pleroma/notification.ex b/lib/pleroma/notification.ex
index 52fd2656b..4f714b25f 100644
--- a/lib/pleroma/notification.ex
+++ b/lib/pleroma/notification.ex
@@ -73,6 +73,7 @@ def unread_notifications_count(%User{id: user_id}) do
pleroma:report
reblog
poll
+ status
}
def changeset(%Notification{} = notification, attrs) do
@@ -88,7 +89,7 @@ def last_read_query(user) do
where: q.seen == true,
select: type(q.id, :string),
limit: 1,
- order_by: [desc: :id]
+ order_by: fragment("? desc nulls last", q.id)
)
end
@@ -117,9 +118,8 @@ def for_user_query(user, opts \\ %{}) do
|> join(:left, [n, a], object in Object,
on:
fragment(
- "(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
+ "(?->>'id') = associated_object_id(?)",
object.data,
- a.data,
a.data
)
)
@@ -138,7 +138,7 @@ defp exclude_blocked(query, user, opts) do
blocked_ap_ids = opts[:blocked_users_ap_ids] || User.blocked_users_ap_ids(user)
query
- |> where([n, a], a.actor not in ^blocked_ap_ids)
+ |> where([..., user_actor: user_actor], user_actor.ap_id not in ^blocked_ap_ids)
|> FollowingRelationship.keep_following_or_not_domain_blocked(user)
end
@@ -149,7 +149,7 @@ defp exclude_blockers(query, user) do
blocker_ap_ids = User.incoming_relationships_ungrouped_ap_ids(user, [:block])
query
- |> where([n, a], a.actor not in ^blocker_ap_ids)
+ |> where([..., user_actor: user_actor], user_actor.ap_id not in ^blocker_ap_ids)
end
end
@@ -162,7 +162,7 @@ defp exclude_notification_muted(query, user, opts) do
opts[:notification_muted_users_ap_ids] || User.notification_muted_users_ap_ids(user)
query
- |> where([n, a], a.actor not in ^notification_muted_ap_ids)
+ |> where([..., user_actor: user_actor], user_actor.ap_id not in ^notification_muted_ap_ids)
|> join(:left, [n, a], tm in ThreadMute,
on: tm.user_id == ^user.id and tm.context == fragment("?->>'context'", a.data),
as: :thread_mute
@@ -179,6 +179,7 @@ defp exclude_filtered(query, user) do
from([_n, a, o] in query,
where:
fragment("not(?->>'content' ~* ?)", o.data, ^regex) or
+ fragment("?->>'content' is null", o.data) or
fragment("?->>'actor' = ?", o.data, ^user.ap_id)
)
end
@@ -193,13 +194,11 @@ defp exclude_visibility(query, %{exclude_visibilities: visibility})
|> join(:left, [n, a], mutated_activity in Pleroma.Activity,
on:
fragment(
- "COALESCE((?->'object')->>'id', ?->>'object')",
- a.data,
+ "associated_object_id(?)",
a.data
) ==
fragment(
- "COALESCE((?->'object')->>'id', ?->>'object')",
- mutated_activity.data,
+ "associated_object_id(?)",
mutated_activity.data
) and
fragment("(?->>'type' = 'Like' or ?->>'type' = 'Announce')", a.data, a.data) and
@@ -282,15 +281,10 @@ def set_read_up_to(%{id: user_id} = user, id) do
select: n.id
)
- {:ok, %{ids: {_, notification_ids}}} =
- Multi.new()
- |> Multi.update_all(:ids, query, set: [seen: true, updated_at: NaiveDateTime.utc_now()])
- |> Marker.multi_set_last_read_id(user, "notifications")
- |> Repo.transaction()
-
- for_user_query(user)
- |> where([n], n.id in ^notification_ids)
- |> Repo.all()
+ Multi.new()
+ |> Multi.update_all(:ids, query, set: [seen: true, updated_at: NaiveDateTime.utc_now()])
+ |> Marker.multi_set_last_read_id(user, "notifications")
+ |> Repo.transaction()
end
@spec read_one(User.t(), String.t()) ::
@@ -301,10 +295,6 @@ def read_one(%User{} = user, notification_id) do
|> Multi.update(:update, changeset(notification, %{seen: true}))
|> Marker.multi_set_last_read_id(user, "notifications")
|> Repo.transaction()
- |> case do
- {:ok, %{update: notification}} -> {:ok, notification}
- {:error, :update, changeset, _} -> {:error, changeset}
- end
end
end
@@ -341,14 +331,6 @@ def destroy_multiple(%{id: user_id} = _user, ids) do
|> Repo.delete_all()
end
- def destroy_multiple_from_types(%{id: user_id}, types) do
- from(n in Notification,
- where: n.user_id == ^user_id,
- where: n.type in ^types
- )
- |> Repo.delete_all()
- end
-
def dismiss(%Pleroma.Activity{} = activity) do
Notification
|> where([n], n.activity_id == ^activity.id)
@@ -371,37 +353,38 @@ def dismiss(%{id: user_id} = _user, id) do
end
end
- @spec create_notifications(Activity.t(), keyword()) :: {:ok, [Notification.t()] | []}
- def create_notifications(activity, options \\ [])
+ @spec create_notifications(Activity.t()) :: {:ok, [Notification.t()] | []}
+ def create_notifications(activity)
- def create_notifications(%Activity{data: %{"to" => _, "type" => "Create"}} = activity, options) do
+ def create_notifications(%Activity{data: %{"to" => _, "type" => "Create"}} = activity) do
object = Object.normalize(activity, fetch: false)
if object && object.data["type"] == "Answer" do
{:ok, []}
else
- do_create_notifications(activity, options)
+ do_create_notifications(activity)
end
end
- def create_notifications(%Activity{data: %{"type" => type}} = activity, options)
- when type in ["Follow", "Like", "Announce", "Move", "EmojiReact", "Flag"] do
- do_create_notifications(activity, options)
+ def create_notifications(%Activity{data: %{"type" => type}} = activity)
+ when type in ["Follow", "Like", "Announce", "Move", "EmojiReact", "Flag", "Update"] do
+ do_create_notifications(activity)
end
- def create_notifications(_, _), do: {:ok, []}
+ def create_notifications(_), do: {:ok, []}
- defp do_create_notifications(%Activity{} = activity, options) do
- do_send = Keyword.get(options, :do_send, true)
+ defp do_create_notifications(%Activity{} = activity) do
+ enabled_receivers = get_notified_from_activity(activity)
- {enabled_receivers, disabled_receivers} = get_notified_from_activity(activity)
- potential_receivers = enabled_receivers ++ disabled_receivers
+ enabled_subscribers = get_notified_subscribers_from_activity(activity)
notifications =
- Enum.map(potential_receivers, fn user ->
- do_send = do_send && user in enabled_receivers
- create_notification(activity, user, do_send: do_send)
- end)
+ (Enum.map(enabled_receivers, fn user ->
+ create_notification(activity, user)
+ end) ++
+ Enum.map(enabled_subscribers -- enabled_receivers, fn user ->
+ create_notification(activity, user, type: "status")
+ end))
|> Enum.reject(&is_nil/1)
{:ok, notifications}
@@ -439,6 +422,9 @@ defp type_from_activity(%{data: %{"type" => type}} = activity) do
activity
|> type_from_activity_object()
+ "Update" ->
+ "update"
+
t ->
raise "No notification type for activity type #{t}"
end
@@ -457,7 +443,6 @@ defp type_from_activity_object(%{data: %{"type" => "Create"}} = activity) do
# TODO move to sql, too.
def create_notification(%Activity{} = activity, %User{} = user, opts \\ []) do
- do_send = Keyword.get(opts, :do_send, true)
type = Keyword.get(opts, :type, type_from_activity(activity))
unless skip?(activity, user, opts) do
@@ -472,11 +457,6 @@ def create_notification(%Activity{} = activity, %User{} = user, opts \\ []) do
|> Marker.multi_set_last_read_id(user, "notifications")
|> Repo.transaction()
- if do_send do
- Streamer.stream(["user", "user:notification"], notification)
- Push.send(notification)
- end
-
notification
end
end
@@ -513,7 +493,16 @@ def create_poll_notifications(%Activity{} = activity) do
def get_notified_from_activity(activity, local_only \\ true)
def get_notified_from_activity(%Activity{data: %{"type" => type}} = activity, local_only)
- when type in ["Create", "Like", "Announce", "Follow", "Move", "EmojiReact", "Flag"] do
+ when type in [
+ "Create",
+ "Like",
+ "Announce",
+ "Follow",
+ "Move",
+ "EmojiReact",
+ "Flag",
+ "Update"
+ ] do
potential_receiver_ap_ids = get_potential_receiver_ap_ids(activity)
potential_receivers =
@@ -525,13 +514,28 @@ def get_notified_from_activity(%Activity{data: %{"type" => type}} = activity, lo
|> exclude_relationship_restricted_ap_ids(activity)
|> exclude_thread_muter_ap_ids(activity)
- notification_enabled_users =
- Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
-
- {notification_enabled_users, potential_receivers -- notification_enabled_users}
+ Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
end
- def get_notified_from_activity(_, _local_only), do: {[], []}
+ def get_notified_from_activity(_, _local_only), do: []
+
+ def get_notified_subscribers_from_activity(activity, local_only \\ true)
+
+ def get_notified_subscribers_from_activity(
+ %Activity{data: %{"type" => "Create"}} = activity,
+ local_only
+ ) do
+ notification_enabled_ap_ids =
+ []
+ |> Utils.maybe_notify_subscribers(activity)
+
+ potential_receivers =
+ User.get_users_from_set(notification_enabled_ap_ids, local_only: local_only)
+
+ Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
+ end
+
+ def get_notified_subscribers_from_activity(_, _), do: []
# For some activities, only notify the author of the object
def get_potential_receiver_ap_ids(%{data: %{"type" => type, "object" => object_id}})
@@ -550,14 +554,30 @@ def get_potential_receiver_ap_ids(%{data: %{"type" => "Follow", "object" => obje
end
def get_potential_receiver_ap_ids(%{data: %{"type" => "Flag", "actor" => actor}}) do
- (User.all_superusers() |> Enum.map(fn user -> user.ap_id end)) -- [actor]
+ (User.all_users_with_privilege(:reports_manage_reports)
+ |> Enum.map(fn user -> user.ap_id end)) --
+ [actor]
+ end
+
+ # Update activity: notify all who repeated this
+ def get_potential_receiver_ap_ids(%{data: %{"type" => "Update", "actor" => actor}} = activity) do
+ with %Object{data: %{"id" => object_id}} <- Object.normalize(activity, fetch: false) do
+ repeaters =
+ Activity.Queries.by_type("Announce")
+ |> Activity.Queries.by_object_id(object_id)
+ |> Activity.with_joined_user_actor()
+ |> where([a, u], u.local)
+ |> select([a, u], u.ap_id)
+ |> Repo.all()
+
+ repeaters -- [actor]
+ end
end
def get_potential_receiver_ap_ids(activity) do
[]
|> Utils.maybe_notify_to_recipients(activity)
|> Utils.maybe_notify_mentioned_recipients(activity)
- |> Utils.maybe_notify_subscribers(activity)
|> Utils.maybe_notify_followers(activity)
|> Enum.uniq()
end
@@ -624,6 +644,7 @@ def skip?(activity, user, opts \\ [])
def skip?(%Activity{} = activity, %User{} = user, opts) do
[
:self,
+ :internal,
:invisible,
:block_from_strangers,
:recently_followed,
@@ -643,6 +664,12 @@ def skip?(:self, %Activity{} = activity, %User{} = user, opts) do
end
end
+ def skip?(:internal, %Activity{} = activity, _user, _opts) do
+ actor = activity.data["actor"]
+ user = User.get_cached_by_ap_id(actor)
+ User.internal?(user)
+ end
+
def skip?(:invisible, %Activity{} = activity, _user, _opts) do
actor = activity.data["actor"]
user = User.get_cached_by_ap_id(actor)
@@ -661,7 +688,7 @@ def skip?(
cond do
opts[:type] == "poll" -> false
user.ap_id == actor -> false
- !User.following?(follower, user) -> true
+ !User.following?(user, follower) -> true
true -> false
end
end
@@ -729,4 +756,12 @@ def mark_context_as_read(%User{id: id}, context) do
)
|> Repo.update_all(set: [seen: true])
end
+
+ @spec send(list(Notification.t())) :: :ok
+ def send(notifications) do
+ Enum.each(notifications, fn notification ->
+ Streamer.stream(["user", "user:notification"], notification)
+ Push.send(notification)
+ end)
+ end
end
diff --git a/lib/pleroma/object.ex b/lib/pleroma/object.ex
index fe264b5e0..55b646b12 100644
--- a/lib/pleroma/object.ex
+++ b/lib/pleroma/object.ex
@@ -40,8 +40,7 @@ def with_joined_activity(query, activity_type \\ "Create", join_type \\ :inner)
join(query, join_type, [{object, object_position}], a in Activity,
on:
fragment(
- "COALESCE(?->'object'->>'id', ?->>'object') = (? ->> 'id') AND (?->>'type' = ?) ",
- a.data,
+ "associated_object_id(?) = (? ->> 'id') AND (?->>'type' = ?) ",
a.data,
object.data,
a.data,
@@ -145,7 +144,7 @@ defp warn_on_no_object_preloaded(ap_id) do
Logger.debug("Backtrace: #{inspect(Process.info(:erlang.self(), :current_stacktrace))}")
end
- def normalize(_, options \\ [fetch: false])
+ def normalize(_, options \\ [fetch: false, id_only: false])
# If we pass an Activity to Object.normalize(), we can try to use the preloaded object.
# Use this whenever possible, especially when walking graphs in an O(N) loop!
@@ -173,10 +172,18 @@ def normalize(%Activity{data: %{"object" => ap_id}}, options) do
def normalize(%{"id" => ap_id}, options), do: normalize(ap_id, options)
def normalize(ap_id, options) when is_binary(ap_id) do
- if Keyword.get(options, :fetch) do
- Fetcher.fetch_object_from_id!(ap_id, options)
- else
- get_cached_by_ap_id(ap_id)
+ cond do
+ Keyword.get(options, :id_only) ->
+ ap_id
+
+ Keyword.get(options, :fetch) ->
+ case Fetcher.fetch_object_from_id(ap_id, options) do
+ {:ok, object} -> object
+ _ -> nil
+ end
+
+ true ->
+ get_cached_by_ap_id(ap_id)
end
end
@@ -208,10 +215,6 @@ def get_cached_by_ap_id(ap_id) do
end
end
- def context_mapping(context) do
- Object.change(%Object{}, %{data: %{"id" => context}})
- end
-
def make_tombstone(%Object{data: %{"id" => id, "type" => type}}, deleted \\ DateTime.utc_now()) do
%ObjectTombstone{
id: id,
@@ -239,17 +242,17 @@ def delete(%Object{data: %{"id" => id}} = object) do
{:ok, _} <- invalid_object_cache(object) do
cleanup_attachments(
Config.get([:instance, :cleanup_attachments]),
- %{"object" => object}
+ object
)
{:ok, object, deleted_activity}
end
end
- @spec cleanup_attachments(boolean(), %{required(:object) => map()}) ::
+ @spec cleanup_attachments(boolean(), Object.t()) ::
{:ok, Oban.Job.t() | nil}
- def cleanup_attachments(true, %{"object" => _} = params) do
- AttachmentsCleanupWorker.enqueue("cleanup_attachments", params)
+ def cleanup_attachments(true, %Object{} = object) do
+ AttachmentsCleanupWorker.enqueue("cleanup_attachments", %{"object" => object})
end
def cleanup_attachments(_, _), do: {:ok, nil}
@@ -328,6 +331,52 @@ def decrease_replies_count(ap_id) do
end
end
+ def increase_quotes_count(ap_id) do
+ Object
+ |> where([o], fragment("?->>'id' = ?::text", o.data, ^to_string(ap_id)))
+ |> update([o],
+ set: [
+ data:
+ fragment(
+ """
+ safe_jsonb_set(?, '{quotesCount}',
+ (coalesce((?->>'quotesCount')::int, 0) + 1)::varchar::jsonb, true)
+ """,
+ o.data,
+ o.data
+ )
+ ]
+ )
+ |> Repo.update_all([])
+ |> case do
+ {1, [object]} -> set_cache(object)
+ _ -> {:error, "Not found"}
+ end
+ end
+
+ def decrease_quotes_count(ap_id) do
+ Object
+ |> where([o], fragment("?->>'id' = ?::text", o.data, ^to_string(ap_id)))
+ |> update([o],
+ set: [
+ data:
+ fragment(
+ """
+ safe_jsonb_set(?, '{quotesCount}',
+ (greatest(0, (?->>'quotesCount')::int - 1))::varchar::jsonb, true)
+ """,
+ o.data,
+ o.data
+ )
+ ]
+ )
+ |> Repo.update_all([])
+ |> case do
+ {1, [object]} -> set_cache(object)
+ _ -> {:error, "Not found"}
+ end
+ end
+
def increase_vote_count(ap_id, name, actor) do
with %Object{} = object <- Object.normalize(ap_id, fetch: false),
"Question" <- object.data["type"] do
@@ -425,4 +474,30 @@ def object_data_hashtags(%{"tag" => tags}) when is_list(tags) do
end
def object_data_hashtags(_), do: []
+
+ def get_emoji_reactions(object) do
+ reactions = object.data["reactions"]
+
+ if is_list(reactions) or is_map(reactions) do
+ reactions
+ |> Enum.map(fn
+ [_emoji, users, _maybe_url] = item when is_list(users) ->
+ item
+
+ [emoji, users] when is_list(users) ->
+ [emoji, users, nil]
+
+ # This case is here to process the Map situation, which will happen
+ # only with the legacy two-value format.
+ {emoji, users} when is_list(users) ->
+ [emoji, users, nil]
+
+ _ ->
+ nil
+ end)
+ |> Enum.reject(&is_nil/1)
+ else
+ []
+ end
+ end
end
diff --git a/lib/pleroma/object/fetcher.ex b/lib/pleroma/object/fetcher.ex
index deb3dc711..af5642af4 100644
--- a/lib/pleroma/object/fetcher.ex
+++ b/lib/pleroma/object/fetcher.ex
@@ -4,46 +4,34 @@
defmodule Pleroma.Object.Fetcher do
alias Pleroma.HTTP
+ alias Pleroma.Instances
alias Pleroma.Maps
alias Pleroma.Object
alias Pleroma.Object.Containment
- alias Pleroma.Repo
alias Pleroma.Signature
alias Pleroma.Web.ActivityPub.InternalFetchActor
+ alias Pleroma.Web.ActivityPub.MRF
alias Pleroma.Web.ActivityPub.ObjectValidator
+ alias Pleroma.Web.ActivityPub.Pipeline
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.Federator
require Logger
require Pleroma.Constants
- defp touch_changeset(changeset) do
- updated_at =
- NaiveDateTime.utc_now()
- |> NaiveDateTime.truncate(:second)
-
- Ecto.Changeset.put_change(changeset, :updated_at, updated_at)
- end
-
- defp maybe_reinject_internal_fields(%{data: %{} = old_data}, new_data) do
- internal_fields = Map.take(old_data, Pleroma.Constants.object_internal_fields())
-
- Map.merge(new_data, internal_fields)
- end
-
- defp maybe_reinject_internal_fields(_, new_data), do: new_data
-
@spec reinject_object(struct(), map()) :: {:ok, Object.t()} | {:error, any()}
- defp reinject_object(%Object{data: %{"type" => "Question"}} = object, new_data) do
+ defp reinject_object(%Object{data: %{}} = object, new_data) do
Logger.debug("Reinjecting object #{new_data["id"]}")
- with data <- maybe_reinject_internal_fields(object, new_data),
- {:ok, data, _} <- ObjectValidator.validate(data, %{}),
- changeset <- Object.change(object, %{data: data}),
- changeset <- touch_changeset(changeset),
- {:ok, object} <- Repo.insert_or_update(changeset),
- {:ok, object} <- Object.set_cache(object) do
- {:ok, object}
+ with {:ok, new_data, _} <- ObjectValidator.validate(new_data, %{}),
+ {:ok, new_data} <- MRF.filter(new_data),
+ {:ok, new_object, _} <-
+ Object.Updater.do_update_and_invalidate_cache(
+ object,
+ new_data,
+ _touch_changeset? = true
+ ) do
+ {:ok, new_object}
else
e ->
Logger.error("Error while processing object: #{inspect(e)}")
@@ -51,20 +39,11 @@ defp reinject_object(%Object{data: %{"type" => "Question"}} = object, new_data)
end
end
- defp reinject_object(%Object{} = object, new_data) do
- Logger.debug("Reinjecting object #{new_data["id"]}")
-
- with new_data <- Transmogrifier.fix_object(new_data),
- data <- maybe_reinject_internal_fields(object, new_data),
- changeset <- Object.change(object, %{data: data}),
- changeset <- touch_changeset(changeset),
- {:ok, object} <- Repo.insert_or_update(changeset),
- {:ok, object} <- Object.set_cache(object) do
+ defp reinject_object(_, new_data) do
+ with {:ok, object, _} <- Pipeline.common_pipeline(new_data, local: false) do
{:ok, object}
else
- e ->
- Logger.error("Error while processing object: #{inspect(e)}")
- {:error, e}
+ e -> e
end
end
@@ -93,20 +72,25 @@ def fetch_object_from_id(id, options \\ []) do
{:object, data, Object.normalize(activity, fetch: false)} do
{:ok, object}
else
- {:allowed_depth, false} ->
- {:error, "Max thread distance exceeded."}
+ {:allowed_depth, false} = e ->
+ log_fetch_error(id, e)
+ {:error, :allowed_depth}
- {:containment, _} ->
- {:error, "Object containment failed."}
+ {:containment, reason} = e ->
+ log_fetch_error(id, e)
+ {:error, reason}
- {:transmogrifier, {:error, {:reject, e}}} ->
- {:reject, e}
+ {:transmogrifier, {:error, {:reject, reason}}} = e ->
+ log_fetch_error(id, e)
+ {:reject, reason}
- {:transmogrifier, {:reject, e}} ->
- {:reject, e}
+ {:transmogrifier, {:reject, reason}} = e ->
+ log_fetch_error(id, e)
+ {:reject, reason}
- {:transmogrifier, _} = e ->
- {:error, e}
+ {:transmogrifier, reason} = e ->
+ log_fetch_error(id, e)
+ {:error, reason}
{:object, data, nil} ->
reinject_object(%Object{}, data)
@@ -117,14 +101,21 @@ def fetch_object_from_id(id, options \\ []) do
{:fetch_object, %Object{} = object} ->
{:ok, object}
- {:fetch, {:error, error}} ->
- {:error, error}
+ {:fetch, {:error, reason}} = e ->
+ log_fetch_error(id, e)
+ {:error, reason}
e ->
- e
+ log_fetch_error(id, e)
+ {:error, e}
end
end
+ defp log_fetch_error(id, error) do
+ Logger.metadata(object: id)
+ Logger.error("Object rejected while fetching #{id} #{inspect(error)}")
+ end
+
defp prepare_activity_params(data) do
%{
"type" => "Create",
@@ -138,26 +129,6 @@ defp prepare_activity_params(data) do
|> Maps.put_if_present("bcc", data["bcc"])
end
- def fetch_object_from_id!(id, options \\ []) do
- with {:ok, object} <- fetch_object_from_id(id, options) do
- object
- else
- {:error, %Tesla.Mock.Error{}} ->
- nil
-
- {:error, "Object has been deleted"} ->
- nil
-
- {:reject, reason} ->
- Logger.info("Rejected #{id} while fetching: #{inspect(reason)}")
- nil
-
- e ->
- Logger.error("Error while fetching #{id}: #{inspect(e)}")
- nil
- end
- end
-
defp make_signature(id, date) do
uri = URI.parse(id)
@@ -200,6 +171,10 @@ def fetch_and_contain_remote_object_from_id(id) when is_binary(id) do
{:ok, body} <- get_object(id),
{:ok, data} <- safe_json_decode(body),
:ok <- Containment.contain_origin_from_id(id, data) do
+ if not Instances.reachable?(id) do
+ Instances.set_reachable(id)
+ end
+
{:ok, data}
else
{:scheme, _} ->
@@ -244,8 +219,11 @@ defp get_object(id) do
{:error, {:content_type, nil}}
end
+ {:ok, %{status: code}} when code in [401, 403] ->
+ {:error, :forbidden}
+
{:ok, %{status: code}} when code in [404, 410] ->
- {:error, "Object has been deleted"}
+ {:error, :not_found}
{:error, e} ->
{:error, e}
diff --git a/lib/pleroma/object/updater.ex b/lib/pleroma/object/updater.ex
new file mode 100644
index 000000000..b1e4870ba
--- /dev/null
+++ b/lib/pleroma/object/updater.ex
@@ -0,0 +1,290 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Object.Updater do
+ require Pleroma.Constants
+
+ alias Pleroma.Object
+ alias Pleroma.Repo
+
+ def update_content_fields(orig_object_data, updated_object) do
+ Pleroma.Constants.status_updatable_fields()
+ |> Enum.reduce(
+ %{data: orig_object_data, updated: false},
+ fn field, %{data: data, updated: updated} ->
+ updated =
+ updated or
+ (field != "updated" and
+ Map.get(updated_object, field) != Map.get(orig_object_data, field))
+
+ data =
+ if Map.has_key?(updated_object, field) do
+ Map.put(data, field, updated_object[field])
+ else
+ Map.drop(data, [field])
+ end
+
+ %{data: data, updated: updated}
+ end
+ )
+ end
+
+ def maybe_history(object) do
+ with history <- Map.get(object, "formerRepresentations"),
+ true <- is_map(history),
+ "OrderedCollection" <- Map.get(history, "type"),
+ true <- is_list(Map.get(history, "orderedItems")),
+ true <- is_integer(Map.get(history, "totalItems")) do
+ history
+ else
+ _ -> nil
+ end
+ end
+
+ def history_for(object) do
+ with history when not is_nil(history) <- maybe_history(object) do
+ history
+ else
+ _ -> history_skeleton()
+ end
+ end
+
+ defp history_skeleton do
+ %{
+ "type" => "OrderedCollection",
+ "totalItems" => 0,
+ "orderedItems" => []
+ }
+ end
+
+ def maybe_update_history(
+ updated_object,
+ orig_object_data,
+ opts
+ ) do
+ updated = opts[:updated]
+ use_history_in_new_object? = opts[:use_history_in_new_object?]
+
+ if not updated do
+ %{updated_object: updated_object, used_history_in_new_object?: false}
+ else
+ # Put edit history
+ # Note that we may have got the edit history by first fetching the object
+ {new_history, used_history_in_new_object?} =
+ with true <- use_history_in_new_object?,
+ updated_history when not is_nil(updated_history) <- maybe_history(opts[:new_data]) do
+ {updated_history, true}
+ else
+ _ ->
+ history = history_for(orig_object_data)
+
+ latest_history_item =
+ orig_object_data
+ |> Map.drop(["id", "formerRepresentations"])
+
+ updated_history =
+ history
+ |> Map.put("orderedItems", [latest_history_item | history["orderedItems"]])
+ |> Map.put("totalItems", history["totalItems"] + 1)
+
+ {updated_history, false}
+ end
+
+ updated_object =
+ updated_object
+ |> Map.put("formerRepresentations", new_history)
+
+ %{updated_object: updated_object, used_history_in_new_object?: used_history_in_new_object?}
+ end
+ end
+
+ defp maybe_update_poll(to_be_updated, updated_object) do
+ choice_key = fn
+ %{"anyOf" => [_ | _]} -> "anyOf"
+ %{"oneOf" => [_ | _]} -> "oneOf"
+ _ -> nil
+ end
+
+ with true <- to_be_updated["type"] == "Question",
+ key when not is_nil(key) <- choice_key.(updated_object),
+ true <- key == choice_key.(to_be_updated),
+ orig_choices <- to_be_updated[key] |> Enum.map(&Map.drop(&1, ["replies"])),
+ new_choices <- updated_object[key] |> Enum.map(&Map.drop(&1, ["replies"])),
+ true <- orig_choices == new_choices do
+ # Choices are the same, but counts are different
+ to_be_updated
+ |> Map.put(key, updated_object[key])
+ else
+ # Choices (or vote type) have changed, do not allow this
+ _ -> to_be_updated
+ end
+ end
+
+ # This calculates the data to be sent as the object of an Update.
+ # new_data's formerRepresentations is not considered.
+ # formerRepresentations is added to the returned data.
+ def make_update_object_data(original_data, new_data, date) do
+ %{data: updated_data, updated: updated} =
+ original_data
+ |> update_content_fields(new_data)
+
+ if not updated do
+ updated_data
+ else
+ %{updated_object: updated_data} =
+ updated_data
+ |> maybe_update_history(original_data, updated: updated, use_history_in_new_object?: false)
+
+ updated_data
+ |> Map.put("updated", date)
+ end
+ end
+
+ # This calculates the data of the new Object from an Update.
+ # new_data's formerRepresentations is considered.
+ def make_new_object_data_from_update_object(original_data, new_data) do
+ update_is_reasonable =
+ with {_, updated} when not is_nil(updated) <- {:cur_updated, new_data["updated"]},
+ {_, {:ok, updated_time, _}} <- {:cur_updated, DateTime.from_iso8601(updated)},
+ {_, last_updated} when not is_nil(last_updated) <-
+ {:last_updated, original_data["updated"] || original_data["published"]},
+ {_, {:ok, last_updated_time, _}} <-
+ {:last_updated, DateTime.from_iso8601(last_updated)},
+ :gt <- DateTime.compare(updated_time, last_updated_time) do
+ :update_everything
+ else
+ # only allow poll updates
+ {:cur_updated, _} -> :no_content_update
+ :eq -> :no_content_update
+ # allow all updates
+ {:last_updated, _} -> :update_everything
+ # allow no updates
+ _ -> false
+ end
+
+ %{
+ updated_object: updated_data,
+ used_history_in_new_object?: used_history_in_new_object?,
+ updated: updated
+ } =
+ if update_is_reasonable == :update_everything do
+ %{data: updated_data, updated: updated} =
+ original_data
+ |> update_content_fields(new_data)
+
+ updated_data
+ |> maybe_update_history(original_data,
+ updated: updated,
+ use_history_in_new_object?: true,
+ new_data: new_data
+ )
+ |> Map.put(:updated, updated)
+ else
+ %{
+ updated_object: original_data,
+ used_history_in_new_object?: false,
+ updated: false
+ }
+ end
+
+ updated_data =
+ if update_is_reasonable != false do
+ updated_data
+ |> maybe_update_poll(new_data)
+ else
+ updated_data
+ end
+
+ %{
+ updated_data: updated_data,
+ updated: updated,
+ used_history_in_new_object?: used_history_in_new_object?
+ }
+ end
+
+ def for_each_history_item(%{"orderedItems" => items} = history, _object, fun) do
+ new_items =
+ Enum.map(items, fun)
+ |> Enum.reduce_while(
+ {:ok, []},
+ fn
+ {:ok, item}, {:ok, acc} -> {:cont, {:ok, acc ++ [item]}}
+ e, _acc -> {:halt, e}
+ end
+ )
+
+ case new_items do
+ {:ok, items} -> {:ok, Map.put(history, "orderedItems", items)}
+ e -> e
+ end
+ end
+
+ def for_each_history_item(history, _, _) do
+ {:ok, history}
+ end
+
+ def do_with_history(object, fun) do
+ with history <- object["formerRepresentations"],
+ object <- Map.drop(object, ["formerRepresentations"]),
+ {_, {:ok, object}} <- {:main_body, fun.(object)},
+ {_, {:ok, history}} <- {:history_items, for_each_history_item(history, object, fun)} do
+ object =
+ if history do
+ Map.put(object, "formerRepresentations", history)
+ else
+ object
+ end
+
+ {:ok, object}
+ else
+ {:main_body, e} -> e
+ {:history_items, e} -> e
+ end
+ end
+
+ defp maybe_touch_changeset(changeset, true) do
+ updated_at =
+ NaiveDateTime.utc_now()
+ |> NaiveDateTime.truncate(:second)
+
+ Ecto.Changeset.put_change(changeset, :updated_at, updated_at)
+ end
+
+ defp maybe_touch_changeset(changeset, _), do: changeset
+
+ def do_update_and_invalidate_cache(orig_object, updated_object, touch_changeset? \\ false) do
+ orig_object_ap_id = updated_object["id"]
+ orig_object_data = orig_object.data
+
+ %{
+ updated_data: updated_object_data,
+ updated: updated,
+ used_history_in_new_object?: used_history_in_new_object?
+ } = make_new_object_data_from_update_object(orig_object_data, updated_object)
+
+ changeset =
+ orig_object
+ |> Repo.preload(:hashtags)
+ |> Object.change(%{data: updated_object_data})
+ |> maybe_touch_changeset(touch_changeset?)
+
+ with {:ok, new_object} <- Repo.update(changeset),
+ {:ok, _} <- Object.invalid_object_cache(new_object),
+ {:ok, _} <- Object.set_cache(new_object),
+ # The metadata/utils.ex uses the object id for the cache.
+ {:ok, _} <- Pleroma.Activity.HTML.invalidate_cache_for(new_object.id) do
+ if used_history_in_new_object? do
+ with create_activity when not is_nil(create_activity) <-
+ Pleroma.Activity.get_create_by_object_ap_id(orig_object_ap_id),
+ {:ok, _} <- Pleroma.Activity.HTML.invalidate_cache_for(create_activity.id) do
+ nil
+ else
+ _ -> nil
+ end
+ end
+
+ {:ok, new_object, updated}
+ end
+ end
+end
diff --git a/lib/pleroma/pagination.ex b/lib/pleroma/pagination.ex
index f12ca2819..8db732cc9 100644
--- a/lib/pleroma/pagination.ex
+++ b/lib/pleroma/pagination.ex
@@ -61,15 +61,16 @@ def fetch_paginated(query, params, :offset, table_binding) do
|> Repo.all()
end
- @spec paginate(Ecto.Query.t(), map(), type(), atom() | nil) :: [Ecto.Schema.t()]
- def paginate(query, options, method \\ :keyset, table_binding \\ nil)
-
- def paginate(list, options, _method, _table_binding) when is_list(list) do
+ @spec paginate_list(list(), keyword()) :: list()
+ def paginate_list(list, options) do
offset = options[:offset] || 0
limit = options[:limit] || 0
Enum.slice(list, offset, limit)
end
+ @spec paginate(Ecto.Query.t(), map(), type(), atom() | nil) :: [Ecto.Schema.t()]
+ def paginate(query, options, method \\ :keyset, table_binding \\ nil)
+
def paginate(query, options, :keyset, table_binding) do
query
|> restrict(:min_id, options, table_binding)
diff --git a/lib/pleroma/password/pbkdf2.ex b/lib/pleroma/password/pbkdf2.ex
index 92e9e1952..9c6d2e381 100644
--- a/lib/pleroma/password/pbkdf2.ex
+++ b/lib/pleroma/password/pbkdf2.ex
@@ -28,7 +28,7 @@ def verify_pass(password, hash) do
iterations = String.to_integer(iterations)
- digest = String.to_atom(digest)
+ digest = String.to_existing_atom(digest)
binary_hash =
KeyGenerator.generate(password, salt, digest: digest, iterations: iterations, length: 64)
diff --git a/lib/pleroma/prom_ex.ex b/lib/pleroma/prom_ex.ex
new file mode 100644
index 000000000..6608708b7
--- /dev/null
+++ b/lib/pleroma/prom_ex.ex
@@ -0,0 +1,49 @@
+defmodule Pleroma.PromEx do
+ use PromEx, otp_app: :pleroma
+
+ alias PromEx.Plugins
+
+ @impl true
+ def plugins do
+ [
+ # PromEx built in plugins
+ Plugins.Application,
+ Plugins.Beam,
+ {Plugins.Phoenix, router: Pleroma.Web.Router, endpoint: Pleroma.Web.Endpoint},
+ Plugins.Ecto,
+ Plugins.Oban
+ # Plugins.PhoenixLiveView,
+ # Plugins.Absinthe,
+ # Plugins.Broadway,
+
+ # Add your own PromEx metrics plugins
+ # Pleroma.Users.PromExPlugin
+ ]
+ end
+
+ @impl true
+ def dashboard_assigns do
+ [
+ datasource_id: Pleroma.Config.get([Pleroma.PromEx, :datasource]),
+ default_selected_interval: "30s"
+ ]
+ end
+
+ @impl true
+ def dashboards do
+ [
+ # PromEx built in Grafana dashboards
+ {:prom_ex, "application.json"},
+ {:prom_ex, "beam.json"},
+ {:prom_ex, "phoenix.json"},
+ {:prom_ex, "ecto.json"},
+ {:prom_ex, "oban.json"}
+ # {:prom_ex, "phoenix_live_view.json"},
+ # {:prom_ex, "absinthe.json"},
+ # {:prom_ex, "broadway.json"},
+
+ # Add your dashboard definitions here with the format: {:otp_app, "path_in_priv"}
+ # {:pleroma, "/grafana_dashboards/user_metrics.json"}
+ ]
+ end
+end
diff --git a/lib/pleroma/release_tasks.ex b/lib/pleroma/release_tasks.ex
index f9e8d1948..bcfcd1243 100644
--- a/lib/pleroma/release_tasks.ex
+++ b/lib/pleroma/release_tasks.ex
@@ -55,12 +55,6 @@ def create do
{:error, term} when is_binary(term) ->
IO.puts(:stderr, "The database for #{inspect(@repo)} couldn't be created: #{term}")
-
- {:error, term} ->
- IO.puts(
- :stderr,
- "The database for #{inspect(@repo)} couldn't be created: #{inspect(term)}"
- )
end
end
end
diff --git a/lib/pleroma/repo.ex b/lib/pleroma/repo.ex
index 515b0c1ff..a50a59b3b 100644
--- a/lib/pleroma/repo.ex
+++ b/lib/pleroma/repo.ex
@@ -11,8 +11,6 @@ defmodule Pleroma.Repo do
import Ecto.Query
require Logger
- defmodule Instrumenter, do: use(Prometheus.EctoInstrumenter)
-
@doc """
Dynamically loads the repository url from the
DATABASE_URL environment variable.
diff --git a/lib/pleroma/report_note.ex b/lib/pleroma/report_note.ex
index f2ad76fa8..f59e5451b 100644
--- a/lib/pleroma/report_note.ex
+++ b/lib/pleroma/report_note.ex
@@ -23,8 +23,8 @@ defmodule Pleroma.ReportNote do
timestamps()
end
- @spec create(FlakeId.Ecto.CompatType.t(), FlakeId.Ecto.CompatType.t(), String.t()) ::
- {:ok, ReportNote.t()} | {:error, Changeset.t()}
+ @spec create(Ecto.UUID.t(), Ecto.UUID.t(), String.t()) ::
+ {:ok, ReportNote.t()} | {:error, Ecto.Changeset.t()}
def create(user_id, activity_id, content) do
attrs = %{
user_id: user_id,
@@ -38,8 +38,8 @@ def create(user_id, activity_id, content) do
|> Repo.insert()
end
- @spec destroy(FlakeId.Ecto.CompatType.t()) ::
- {:ok, ReportNote.t()} | {:error, Changeset.t()}
+ @spec destroy(Ecto.UUID.t()) ::
+ {:ok, ReportNote.t()} | {:error, Ecto.Changeset.t()}
def destroy(id) do
from(r in ReportNote, where: r.id == ^id)
|> Repo.one()
diff --git a/lib/pleroma/reverse_proxy.ex b/lib/pleroma/reverse_proxy.ex
index 2248c2713..4d13e51fc 100644
--- a/lib/pleroma/reverse_proxy.ex
+++ b/lib/pleroma/reverse_proxy.ex
@@ -8,7 +8,7 @@ defmodule Pleroma.ReverseProxy do
~w(if-unmodified-since if-none-match) ++ @range_headers
@resp_cache_headers ~w(etag date last-modified)
@keep_resp_headers @resp_cache_headers ++
- ~w(content-length content-type content-disposition content-encoding) ++
+ ~w(content-type content-disposition content-encoding) ++
~w(content-range accept-ranges vary)
@default_cache_control_header "public, max-age=1209600"
@valid_resp_codes [200, 206, 304]
@@ -81,16 +81,16 @@ def default_cache_control_header, do: @default_cache_control_header
import Plug.Conn
@type option() ::
- {:max_read_duration, :timer.time() | :infinity}
+ {:max_read_duration, non_neg_integer() | :infinity}
| {:max_body_length, non_neg_integer() | :infinity}
- | {:failed_request_ttl, :timer.time() | :infinity}
- | {:http, []}
+ | {:failed_request_ttl, non_neg_integer() | :infinity}
+ | {:http, keyword()}
| {:req_headers, [{String.t(), String.t()}]}
| {:resp_headers, [{String.t(), String.t()}]}
- | {:inline_content_types, boolean() | [String.t()]}
+ | {:inline_content_types, boolean() | list(String.t())}
| {:redirect_on_failure, boolean()}
- @spec call(Plug.Conn.t(), url :: String.t(), [option()]) :: Plug.Conn.t()
+ @spec call(Plug.Conn.t(), String.t(), list(option())) :: Plug.Conn.t()
def call(_conn, _url, _opts \\ [])
def call(conn = %{method: method}, url, opts) when method in @methods do
@@ -192,7 +192,7 @@ defp response(conn, client, url, status, headers, opts) do
halt(conn)
{:error, error, conn} ->
- Logger.warn(
+ Logger.warning(
"#{__MODULE__} request to #{url} failed while reading/chunking: #{inspect(error)}"
)
@@ -388,8 +388,6 @@ defp body_size_constraint(size, limit) when is_integer(limit) and limit > 0 and
defp body_size_constraint(_, _), do: :ok
- defp check_read_duration(nil = _duration, max), do: check_read_duration(@max_read_duration, max)
-
defp check_read_duration(duration, max)
when is_integer(duration) and is_integer(max) and max > 0 do
if duration > max do
@@ -407,10 +405,6 @@ defp increase_read_duration({previous_duration, started})
{:ok, previous_duration + duration}
end
- defp increase_read_duration(_) do
- {:ok, :no_duration_limit, :no_duration_limit}
- end
-
defp client, do: Pleroma.ReverseProxy.Client.Wrapper
defp track_failed_url(url, error, opts) do
diff --git a/lib/pleroma/rule.ex b/lib/pleroma/rule.ex
new file mode 100644
index 000000000..3ba413214
--- /dev/null
+++ b/lib/pleroma/rule.ex
@@ -0,0 +1,68 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Rule do
+ use Ecto.Schema
+
+ import Ecto.Changeset
+ import Ecto.Query
+
+ alias Pleroma.Repo
+ alias Pleroma.Rule
+
+ schema "rules" do
+ field(:priority, :integer, default: 0)
+ field(:text, :string)
+ field(:hint, :string)
+
+ timestamps()
+ end
+
+ def changeset(%Rule{} = rule, params \\ %{}) do
+ rule
+ |> cast(params, [:priority, :text, :hint])
+ |> validate_required([:text])
+ end
+
+ def query do
+ Rule
+ |> order_by(asc: :priority)
+ |> order_by(asc: :id)
+ end
+
+ def get(ids) when is_list(ids) do
+ from(r in __MODULE__, where: r.id in ^ids)
+ |> Repo.all()
+ end
+
+ def get(id), do: Repo.get(__MODULE__, id)
+
+ def exists?(id) do
+ from(r in __MODULE__, where: r.id == ^id)
+ |> Repo.exists?()
+ end
+
+ def create(params) do
+ {:ok, rule} =
+ %Rule{}
+ |> changeset(params)
+ |> Repo.insert()
+
+ rule
+ end
+
+ def update(params, id) do
+ {:ok, rule} =
+ get(id)
+ |> changeset(params)
+ |> Repo.update()
+
+ rule
+ end
+
+ def delete(id) do
+ get(id)
+ |> Repo.delete()
+ end
+end
diff --git a/lib/pleroma/scheduled_activity.ex b/lib/pleroma/scheduled_activity.ex
index a7be58512..63c6cb45b 100644
--- a/lib/pleroma/scheduled_activity.ex
+++ b/lib/pleroma/scheduled_activity.ex
@@ -6,7 +6,6 @@ defmodule Pleroma.ScheduledActivity do
use Ecto.Schema
alias Ecto.Multi
- alias Pleroma.Config
alias Pleroma.Repo
alias Pleroma.ScheduledActivity
alias Pleroma.User
@@ -20,6 +19,8 @@ defmodule Pleroma.ScheduledActivity do
@min_offset :timer.minutes(5)
+ @config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
+
schema "scheduled_activities" do
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
field(:scheduled_at, :naive_datetime)
@@ -40,7 +41,11 @@ defp with_media_attachments(
%{changes: %{params: %{"media_ids" => media_ids} = params}} = changeset
)
when is_list(media_ids) do
- media_attachments = Utils.attachments_from_ids(%{media_ids: media_ids})
+ media_attachments =
+ Utils.attachments_from_ids(
+ %{media_ids: media_ids},
+ User.get_cached_by_id(changeset.data.user_id)
+ )
params =
params
@@ -83,7 +88,7 @@ def exceeds_daily_user_limit?(user_id, scheduled_at) do
|> where([sa], type(sa.scheduled_at, :date) == type(^scheduled_at, :date))
|> select([sa], count(sa.id))
|> Repo.one()
- |> Kernel.>=(Config.get([ScheduledActivity, :daily_user_limit]))
+ |> Kernel.>=(@config_impl.get([ScheduledActivity, :daily_user_limit]))
end
def exceeds_total_user_limit?(user_id) do
@@ -91,7 +96,7 @@ def exceeds_total_user_limit?(user_id) do
|> where(user_id: ^user_id)
|> select([sa], count(sa.id))
|> Repo.one()
- |> Kernel.>=(Config.get([ScheduledActivity, :total_user_limit]))
+ |> Kernel.>=(@config_impl.get([ScheduledActivity, :total_user_limit]))
end
def far_enough?(scheduled_at) when is_binary(scheduled_at) do
@@ -119,7 +124,7 @@ def new(%User{} = user, attrs) do
def create(%User{} = user, attrs) do
Multi.new()
|> Multi.insert(:scheduled_activity, new(user, attrs))
- |> maybe_add_jobs(Config.get([ScheduledActivity, :enabled]))
+ |> maybe_add_jobs(@config_impl.get([ScheduledActivity, :enabled]))
|> Repo.transaction()
|> transaction_response
end
diff --git a/lib/pleroma/search.ex b/lib/pleroma/search.ex
new file mode 100644
index 000000000..fd0218cb8
--- /dev/null
+++ b/lib/pleroma/search.ex
@@ -0,0 +1,21 @@
+defmodule Pleroma.Search do
+ alias Pleroma.Workers.SearchIndexingWorker
+
+ def add_to_index(%Pleroma.Activity{id: activity_id}) do
+ SearchIndexingWorker.enqueue("add_to_index", %{"activity" => activity_id})
+ end
+
+ def remove_from_index(%Pleroma.Object{id: object_id}) do
+ SearchIndexingWorker.enqueue("remove_from_index", %{"object" => object_id})
+ end
+
+ def search(query, options) do
+ search_module = Pleroma.Config.get([Pleroma.Search, :module])
+ search_module.search(options[:for_user], query, options)
+ end
+
+ def healthcheck_endpoints do
+ search_module = Pleroma.Config.get([Pleroma.Search, :module])
+ search_module.healthcheck_endpoints
+ end
+end
diff --git a/lib/pleroma/activity/search.ex b/lib/pleroma/search/database_search.ex
similarity index 76%
rename from lib/pleroma/activity/search.ex
rename to lib/pleroma/search/database_search.ex
index 694dc5709..11e99e7f1 100644
--- a/lib/pleroma/activity/search.ex
+++ b/lib/pleroma/search/database_search.ex
@@ -1,9 +1,10 @@
# Pleroma: A lightweight social networking server
-# Copyright © 2017-2022 Pleroma Authors
+# Copyright © 2017-2021 Pleroma Authors
# SPDX-License-Identifier: AGPL-3.0-only
-defmodule Pleroma.Activity.Search do
+defmodule Pleroma.Search.DatabaseSearch do
alias Pleroma.Activity
+ alias Pleroma.Config
alias Pleroma.Object.Fetcher
alias Pleroma.Pagination
alias Pleroma.User
@@ -13,25 +14,21 @@ defmodule Pleroma.Activity.Search do
import Ecto.Query
+ @behaviour Pleroma.Search.SearchBackend
+
+ @impl true
def search(user, search_query, options \\ []) do
- index_type = if Pleroma.Config.get([:database, :rum_enabled]), do: :rum, else: :gin
+ index_type = if Config.get([:database, :rum_enabled]), do: :rum, else: :gin
limit = Enum.min([Keyword.get(options, :limit), 40])
offset = Keyword.get(options, :offset, 0)
author = Keyword.get(options, :author)
- search_function =
- if :persistent_term.get({Pleroma.Repo, :postgres_version}) >= 11 do
- :websearch
- else
- :plain
- end
-
try do
Activity
|> Activity.with_preloaded_object()
|> Activity.restrict_deactivated_users()
- |> restrict_public()
- |> query_with(index_type, search_query, search_function)
+ |> restrict_public(user)
+ |> query_with(index_type, search_query, :websearch)
|> maybe_restrict_local(user)
|> maybe_restrict_author(author)
|> maybe_restrict_blocked(user)
@@ -45,6 +42,15 @@ def search(user, search_query, options \\ []) do
end
end
+ @impl true
+ def add_to_index(_activity), do: :ok
+
+ @impl true
+ def remove_from_index(_object), do: :ok
+
+ @impl true
+ def healthcheck_endpoints, do: nil
+
def maybe_restrict_author(query, %User{} = author) do
Activity.Queries.by_author(query, author)
end
@@ -57,7 +63,19 @@ def maybe_restrict_blocked(query, %User{} = user) do
def maybe_restrict_blocked(query, _), do: query
- defp restrict_public(q) do
+ defp restrict_public(q, user) when not is_nil(user) do
+ intended_recipients = [
+ Pleroma.Constants.as_public(),
+ Pleroma.Web.ActivityPub.Utils.as_local_public()
+ ]
+
+ from([a, o] in q,
+ where: fragment("?->>'type' = 'Create'", a.data),
+ where: fragment("? && ?", ^intended_recipients, a.recipients)
+ )
+ end
+
+ defp restrict_public(q, _user) do
from([a, o] in q,
where: fragment("?->>'type' = 'Create'", a.data),
where: ^Pleroma.Constants.as_public() in a.recipients
@@ -124,8 +142,8 @@ defp query_with(q, :rum, search_query, :websearch) do
)
end
- defp maybe_restrict_local(q, user) do
- limit = Pleroma.Config.get([:instance, :limit_to_local_content], :unauthenticated)
+ def maybe_restrict_local(q, user) do
+ limit = Config.get([:instance, :limit_to_local_content], :unauthenticated)
case {limit, user} do
{:all, _} -> restrict_local(q)
@@ -137,7 +155,7 @@ defp maybe_restrict_local(q, user) do
defp restrict_local(q), do: where(q, local: true)
- defp maybe_fetch(activities, user, search_query) do
+ def maybe_fetch(activities, user, search_query) do
with true <- Regex.match?(~r/https?:/, search_query),
{:ok, object} <- Fetcher.fetch_object_from_id(search_query),
%Activity{} = activity <- Activity.get_create_by_object_ap_id(object.data["id"]),
diff --git a/lib/pleroma/search/healthcheck.ex b/lib/pleroma/search/healthcheck.ex
new file mode 100644
index 000000000..e562c8478
--- /dev/null
+++ b/lib/pleroma/search/healthcheck.ex
@@ -0,0 +1,86 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2024 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+defmodule Pleroma.Search.Healthcheck do
+ @doc """
+ Monitors health of search backend to control processing of events based on health and availability.
+ """
+ use GenServer
+ require Logger
+
+ @queue :search_indexing
+ @tick :timer.seconds(5)
+ @timeout :timer.seconds(2)
+
+ def start_link(_) do
+ GenServer.start_link(__MODULE__, [], name: __MODULE__)
+ end
+
+ @impl true
+ def init(_) do
+ state = %{healthy: false}
+ {:ok, state, {:continue, :start}}
+ end
+
+ @impl true
+ def handle_continue(:start, state) do
+ tick()
+ {:noreply, state}
+ end
+
+ @impl true
+ def handle_info(:check, state) do
+ urls = Pleroma.Search.healthcheck_endpoints()
+
+ new_state =
+ if check(urls) do
+ Oban.resume_queue(queue: @queue)
+ Map.put(state, :healthy, true)
+ else
+ Oban.pause_queue(queue: @queue)
+ Map.put(state, :healthy, false)
+ end
+
+ maybe_log_state_change(state, new_state)
+
+ tick()
+ {:noreply, new_state}
+ end
+
+ @impl true
+ def handle_call(:state, _from, state) do
+ {:reply, state, state, :hibernate}
+ end
+
+ def state, do: GenServer.call(__MODULE__, :state)
+
+ def check([]), do: true
+
+ def check(urls) when is_list(urls) do
+ Enum.all?(
+ urls,
+ fn url ->
+ case Pleroma.HTTP.get(url, [], recv_timeout: @timeout) do
+ {:ok, %{status: 200}} -> true
+ _ -> false
+ end
+ end
+ )
+ end
+
+ def check(_), do: true
+
+ defp tick do
+ Process.send_after(self(), :check, @tick)
+ end
+
+ defp maybe_log_state_change(%{healthy: true}, %{healthy: false}) do
+ Logger.error("Pausing Oban queue #{@queue} due to search backend healthcheck failure")
+ end
+
+ defp maybe_log_state_change(%{healthy: false}, %{healthy: true}) do
+ Logger.info("Resuming Oban queue #{@queue} due to search backend healthcheck pass")
+ end
+
+ defp maybe_log_state_change(_, _), do: :ok
+end
diff --git a/lib/pleroma/search/meilisearch.ex b/lib/pleroma/search/meilisearch.ex
new file mode 100644
index 000000000..08c2f3d86
--- /dev/null
+++ b/lib/pleroma/search/meilisearch.ex
@@ -0,0 +1,192 @@
+defmodule Pleroma.Search.Meilisearch do
+ require Logger
+ require Pleroma.Constants
+
+ alias Pleroma.Activity
+ alias Pleroma.Config.Getting, as: Config
+
+ import Pleroma.Search.DatabaseSearch
+ import Ecto.Query
+
+ @behaviour Pleroma.Search.SearchBackend
+
+ defp meili_headers do
+ private_key = Config.get([Pleroma.Search.Meilisearch, :private_key])
+
+ [{"Content-Type", "application/json"}] ++
+ if is_nil(private_key), do: [], else: [{"Authorization", "Bearer #{private_key}"}]
+ end
+
+ def meili_get(path) do
+ endpoint = Config.get([Pleroma.Search.Meilisearch, :url])
+
+ result =
+ Pleroma.HTTP.get(
+ Path.join(endpoint, path),
+ meili_headers()
+ )
+
+ with {:ok, res} <- result do
+ {:ok, Jason.decode!(res.body)}
+ end
+ end
+
+ def meili_post(path, params) do
+ endpoint = Config.get([Pleroma.Search.Meilisearch, :url])
+
+ result =
+ Pleroma.HTTP.post(
+ Path.join(endpoint, path),
+ Jason.encode!(params),
+ meili_headers()
+ )
+
+ with {:ok, res} <- result do
+ {:ok, Jason.decode!(res.body)}
+ end
+ end
+
+ def meili_put(path, params) do
+ endpoint = Config.get([Pleroma.Search.Meilisearch, :url])
+
+ result =
+ Pleroma.HTTP.request(
+ :put,
+ Path.join(endpoint, path),
+ Jason.encode!(params),
+ meili_headers(),
+ []
+ )
+
+ with {:ok, res} <- result do
+ {:ok, Jason.decode!(res.body)}
+ end
+ end
+
+ def meili_delete(path) do
+ endpoint = Config.get([Pleroma.Search.Meilisearch, :url])
+
+ with {:ok, _} <-
+ Pleroma.HTTP.request(
+ :delete,
+ Path.join(endpoint, path),
+ "",
+ meili_headers(),
+ []
+ ) do
+ :ok
+ else
+ _ -> {:error, "Could not remove from index"}
+ end
+ end
+
+ @impl true
+ def search(user, query, options \\ []) do
+ limit = Enum.min([Keyword.get(options, :limit), 40])
+ offset = Keyword.get(options, :offset, 0)
+ author = Keyword.get(options, :author)
+
+ res =
+ meili_post(
+ "/indexes/objects/search",
+ %{q: query, offset: offset, limit: limit}
+ )
+
+ with {:ok, result} <- res do
+ hits = result["hits"] |> Enum.map(& &1["ap"])
+
+ try do
+ hits
+ |> Activity.create_by_object_ap_id()
+ |> Activity.with_preloaded_object()
+ |> Activity.restrict_deactivated_users()
+ |> maybe_restrict_local(user)
+ |> maybe_restrict_author(author)
+ |> maybe_restrict_blocked(user)
+ |> maybe_fetch(user, query)
+ |> order_by([object: obj], desc: obj.data["published"])
+ |> Pleroma.Repo.all()
+ rescue
+ _ -> maybe_fetch([], user, query)
+ end
+ end
+ end
+
+ def object_to_search_data(object) do
+ # Only index public or unlisted Notes
+ if not is_nil(object) and object.data["type"] == "Note" and
+ not is_nil(object.data["content"]) and
+ (Pleroma.Constants.as_public() in object.data["to"] or
+ Pleroma.Constants.as_public() in object.data["cc"]) and
+ object.data["content"] not in ["", "."] do
+ data = object.data
+
+ content_str =
+ case data["content"] do
+ [nil | rest] -> to_string(rest)
+ str -> str
+ end
+
+ content =
+ with {:ok, scrubbed} <-
+ FastSanitize.Sanitizer.scrub(content_str, Pleroma.HTML.Scrubber.SearchIndexing),
+ trimmed <- String.trim(scrubbed) do
+ trimmed
+ end
+
+ # Make sure we have a non-empty string
+ if content != "" do
+ {:ok, published, _} = DateTime.from_iso8601(data["published"])
+
+ %{
+ id: object.id,
+ content: content,
+ ap: data["id"],
+ published: published |> DateTime.to_unix()
+ }
+ end
+ end
+ end
+
+ @impl true
+ def add_to_index(activity) do
+ maybe_search_data = object_to_search_data(activity.object)
+
+ if activity.data["type"] == "Create" and maybe_search_data do
+ result =
+ meili_put(
+ "/indexes/objects/documents",
+ [maybe_search_data]
+ )
+
+ with {:ok, %{"status" => "enqueued"}} <- result do
+ # Added successfully
+ :ok
+ else
+ _ ->
+ # There was an error, report it
+ Logger.error("Failed to add activity #{activity.id} to index: #{inspect(result)}")
+ {:error, result}
+ end
+ else
+ # The post isn't something we can search, that's ok
+ :ok
+ end
+ end
+
+ @impl true
+ def remove_from_index(object) do
+ meili_delete("/indexes/objects/documents/#{object.id}")
+ end
+
+ @impl true
+ def healthcheck_endpoints do
+ endpoint =
+ Config.get([Pleroma.Search.Meilisearch, :url])
+ |> URI.parse()
+ |> Map.put(:path, "/health")
+ |> URI.to_string()
+
+ [endpoint]
+ end
+end
diff --git a/lib/pleroma/search/search_backend.ex b/lib/pleroma/search/search_backend.ex
new file mode 100644
index 000000000..13c887bc2
--- /dev/null
+++ b/lib/pleroma/search/search_backend.ex
@@ -0,0 +1,32 @@
+defmodule Pleroma.Search.SearchBackend do
+ @doc """
+ Search statuses with a query, restricting to only those the user should have access to.
+ """
+ @callback search(user :: Pleroma.User.t(), query :: String.t(), options :: [any()]) :: [
+ Pleroma.Activity.t()
+ ]
+
+ @doc """
+ Add the object associated with the activity to the search index.
+
+ The whole activity is passed, to allow filtering on things such as scope.
+ """
+ @callback add_to_index(activity :: Pleroma.Activity.t()) :: :ok | {:error, any()}
+
+ @doc """
+ Remove the object from the index.
+
+ Just the object, as opposed to the whole activity, is passed, since the object
+ is what contains the actual content and there is no need for filtering when removing
+ from index.
+ """
+ @callback remove_from_index(object :: Pleroma.Object.t()) :: :ok | {:error, any()}
+
+ @doc """
+ Healthcheck endpoints of search backend infrastructure to monitor for controlling
+ processing of jobs in the Oban queue.
+
+ It is expected a 200 response is healthy and other responses are unhealthy.
+ """
+ @callback healthcheck_endpoints :: list() | nil
+end
diff --git a/lib/pleroma/signature.ex b/lib/pleroma/signature.ex
index dbe6fd209..8fd422a6e 100644
--- a/lib/pleroma/signature.ex
+++ b/lib/pleroma/signature.ex
@@ -10,17 +10,14 @@ defmodule Pleroma.Signature do
alias Pleroma.User
alias Pleroma.Web.ActivityPub.ActivityPub
+ @known_suffixes ["/publickey", "/main-key"]
+
def key_id_to_actor_id(key_id) do
uri =
- URI.parse(key_id)
+ key_id
+ |> URI.parse()
|> Map.put(:fragment, nil)
-
- uri =
- if not is_nil(uri.path) and String.ends_with?(uri.path, "/publickey") do
- Map.put(uri, :path, String.replace(uri.path, "/publickey", ""))
- else
- uri
- end
+ |> remove_suffix(@known_suffixes)
maybe_ap_id = URI.to_string(uri)
@@ -30,12 +27,22 @@ def key_id_to_actor_id(key_id) do
_ ->
case Pleroma.Web.WebFinger.finger(maybe_ap_id) do
- %{"ap_id" => ap_id} -> {:ok, ap_id}
+ {:ok, %{"ap_id" => ap_id}} -> {:ok, ap_id}
_ -> {:error, maybe_ap_id}
end
end
end
+ defp remove_suffix(uri, [test | rest]) do
+ if not is_nil(uri.path) and String.ends_with?(uri.path, test) do
+ Map.put(uri, :path, String.replace(uri.path, test, ""))
+ else
+ remove_suffix(uri, rest)
+ end
+ end
+
+ defp remove_suffix(uri, []), do: uri
+
def fetch_public_key(conn) do
with %{"keyId" => kid} <- HTTPSignatures.signature_for_conn(conn),
{:ok, actor_id} <- key_id_to_actor_id(kid),
@@ -59,9 +66,8 @@ def refetch_public_key(conn) do
end
end
- def sign(%User{} = user, headers) do
- with {:ok, %{keys: keys}} <- User.ensure_keys_present(user),
- {:ok, private_key, _} <- Keys.keys_from_pem(keys) do
+ def sign(%User{keys: keys} = user, headers) do
+ with {:ok, private_key, _} <- Keys.keys_from_pem(keys) do
HTTPSignatures.sign(private_key, user.ap_id <> "#main-key", headers)
end
end
diff --git a/lib/pleroma/telemetry/logger.ex b/lib/pleroma/telemetry/logger.ex
index 384c70fbc..9998d8185 100644
--- a/lib/pleroma/telemetry/logger.ex
+++ b/lib/pleroma/telemetry/logger.ex
@@ -59,7 +59,7 @@ def handle_event(
_,
_
) do
- Logger.error(fn ->
+ Logger.debug(fn ->
"Connection pool had to refuse opening a connection to #{key} due to connection limit exhaustion"
end)
end
@@ -70,7 +70,7 @@ def handle_event(
%{key: key},
_
) do
- Logger.warn(fn ->
+ Logger.warning(fn ->
"Pool worker for #{key}: Client #{inspect(client_pid)} died before releasing the connection with #{inspect(reason)}"
end)
end
@@ -81,7 +81,7 @@ def handle_event(
%{key: key, protocol: :http},
_
) do
- Logger.info(fn ->
+ Logger.debug(fn ->
"Pool worker for #{key}: #{length(clients)} clients are using an HTTP1 connection at the same time, head-of-line blocking might occur."
end)
end
diff --git a/lib/pleroma/upload.ex b/lib/pleroma/upload.ex
index 8a01cf613..2c6b23c39 100644
--- a/lib/pleroma/upload.ex
+++ b/lib/pleroma/upload.ex
@@ -34,8 +34,8 @@ defmodule Pleroma.Upload do
"""
alias Ecto.UUID
- alias Pleroma.Config
alias Pleroma.Maps
+ alias Pleroma.Web.ActivityPub.Utils
require Logger
@type source ::
@@ -51,6 +51,7 @@ defmodule Pleroma.Upload do
| {:size_limit, nil | non_neg_integer()}
| {:uploader, module()}
| {:filters, [module()]}
+ | {:actor, String.t()}
@type t :: %__MODULE__{
id: String.t(),
@@ -75,6 +76,8 @@ defmodule Pleroma.Upload do
:path
]
+ @config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
+
defp get_description(upload) do
case {upload.description, Pleroma.Config.get([Pleroma.Upload, :default_description])} do
{description, _} when is_binary(description) -> description
@@ -84,7 +87,7 @@ defp get_description(upload) do
end
end
- @spec store(source, options :: [option()]) :: {:ok, Map.t()} | {:error, any()}
+ @spec store(source, options :: [option()]) :: {:ok, map()} | {:error, any()}
@doc "Store a file. If using a `Plug.Upload{}` as the source, be sure to use `Majic.Plug` to ensure its content_type and filename is correct."
def store(upload, opts \\ []) do
opts = get_opts(opts)
@@ -99,6 +102,7 @@ def store(upload, opts \\ []) do
{:ok, url_spec} <- Pleroma.Uploaders.Uploader.put_file(opts.uploader, upload) do
{:ok,
%{
+ "id" => Utils.generate_object_id(),
"type" => opts.activity_type,
"mediaType" => upload.content_type,
"url" => [
@@ -172,7 +176,7 @@ defp prepare_upload(%Plug.Upload{} = file, opts) do
defp prepare_upload(%{img: "data:image/" <> image_data}, opts) do
parsed = Regex.named_captures(~r/(?jpeg|png|gif);base64,(?.*)/, image_data)
data = Base.decode64!(parsed["data"], ignore: :whitespace)
- hash = Base.encode16(:crypto.hash(:sha256, data), lower: true)
+ hash = Base.encode16(:crypto.hash(:sha256, data), case: :upper)
with :ok <- check_binary_size(data, opts.size_limit),
tmp_path <- tempfile_for_image(data),
@@ -246,18 +250,18 @@ defp url_from_spec(%__MODULE__{name: name}, base_url, {:file, path}) do
defp url_from_spec(_upload, _base_url, {:url, url}), do: url
def base_url do
- uploader = Config.get([Pleroma.Upload, :uploader])
- upload_base_url = Config.get([Pleroma.Upload, :base_url])
- public_endpoint = Config.get([uploader, :public_endpoint])
+ uploader = @config_impl.get([Pleroma.Upload, :uploader])
+ upload_base_url = @config_impl.get([Pleroma.Upload, :base_url])
+ public_endpoint = @config_impl.get([uploader, :public_endpoint])
case uploader do
Pleroma.Uploaders.Local ->
upload_base_url || Pleroma.Web.Endpoint.url() <> "/media/"
Pleroma.Uploaders.S3 ->
- bucket = Config.get([Pleroma.Uploaders.S3, :bucket])
- truncated_namespace = Config.get([Pleroma.Uploaders.S3, :truncated_namespace])
- namespace = Config.get([Pleroma.Uploaders.S3, :bucket_namespace])
+ bucket = @config_impl.get([Pleroma.Uploaders.S3, :bucket])
+ truncated_namespace = @config_impl.get([Pleroma.Uploaders.S3, :truncated_namespace])
+ namespace = @config_impl.get([Pleroma.Uploaders.S3, :bucket_namespace])
bucket_with_namespace =
cond do
diff --git a/lib/pleroma/upload/filter.ex b/lib/pleroma/upload/filter.ex
index 717f06621..809bc6e70 100644
--- a/lib/pleroma/upload/filter.ex
+++ b/lib/pleroma/upload/filter.ex
@@ -38,9 +38,9 @@ def filter([filter | rest], upload) do
{:ok, :noop} ->
filter(rest, upload)
- error ->
- Logger.error("#{__MODULE__}: Filter #{filter} failed: #{inspect(error)}")
- error
+ {:error, e} ->
+ Logger.error("#{__MODULE__}: Filter #{filter} failed: #{inspect(e)}")
+ {:error, e}
end
end
end
diff --git a/lib/pleroma/upload/filter/analyze_metadata.ex b/lib/pleroma/upload/filter/analyze_metadata.ex
index 9a76a998b..7ee643277 100644
--- a/lib/pleroma/upload/filter/analyze_metadata.ex
+++ b/lib/pleroma/upload/filter/analyze_metadata.ex
@@ -8,27 +8,28 @@ defmodule Pleroma.Upload.Filter.AnalyzeMetadata do
"""
require Logger
+ alias Vix.Vips.Image
+ alias Vix.Vips.Operation
+
@behaviour Pleroma.Upload.Filter
@spec filter(Pleroma.Upload.t()) ::
{:ok, :filtered, Pleroma.Upload.t()} | {:ok, :noop} | {:error, String.t()}
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _} = upload) do
try do
- image =
- file
- |> Mogrify.open()
- |> Mogrify.verbose()
+ {:ok, image} = Image.new_from_file(file)
+ {width, height} = {Image.width(image), Image.height(image)}
upload =
upload
- |> Map.put(:width, image.width)
- |> Map.put(:height, image.height)
- |> Map.put(:blurhash, get_blurhash(file))
+ |> Map.put(:width, width)
+ |> Map.put(:height, height)
+ |> Map.put(:blurhash, get_blurhash(image))
{:ok, :filtered, upload}
rescue
e in ErlangError ->
- Logger.warn("#{__MODULE__}: #{inspect(e)}")
+ Logger.warning("#{__MODULE__}: #{inspect(e)}")
{:ok, :noop}
end
end
@@ -45,7 +46,7 @@ def filter(%Pleroma.Upload{tempfile: file, content_type: "video" <> _} = upload)
{:ok, :filtered, upload}
rescue
e in ErlangError ->
- Logger.warn("#{__MODULE__}: #{inspect(e)}")
+ Logger.warning("#{__MODULE__}: #{inspect(e)}")
{:ok, :noop}
end
end
@@ -53,7 +54,7 @@ def filter(%Pleroma.Upload{tempfile: file, content_type: "video" <> _} = upload)
def filter(_), do: {:ok, :noop}
defp get_blurhash(file) do
- with {:ok, blurhash} <- :eblurhash.magick(file) do
+ with {:ok, blurhash} <- vips_blurhash(file) do
blurhash
else
_ -> nil
@@ -77,7 +78,28 @@ defp media_dimensions(file) do
%{width: width, height: height}
else
nil -> {:error, {:ffprobe, :command_not_found}}
- {:error, _} = error -> error
+ error -> {:error, error}
+ end
+ end
+
+ defp vips_blurhash(%Vix.Vips.Image{} = image) do
+ with {:ok, resized_image} <- Operation.thumbnail_image(image, 100),
+ {height, width} <- {Image.height(resized_image), Image.width(resized_image)},
+ max <- max(height, width),
+ {x, y} <- {max(round(width * 5 / max), 1), max(round(height * 5 / max), 1)} do
+ {:ok, rgb} =
+ if Image.has_alpha?(resized_image) do
+ # remove alpha channel
+ resized_image
+ |> Operation.extract_band!(0, n: 3)
+ |> Image.write_to_binary()
+ else
+ Image.write_to_binary(resized_image)
+ end
+
+ Blurhash.encode(rgb, width, height, x, y)
+ else
+ _ -> nil
end
end
end
diff --git a/lib/pleroma/upload/filter/exiftool/read_description.ex b/lib/pleroma/upload/filter/exiftool/read_description.ex
index 03d698a81..8c1ed82f8 100644
--- a/lib/pleroma/upload/filter/exiftool/read_description.ex
+++ b/lib/pleroma/upload/filter/exiftool/read_description.ex
@@ -10,8 +10,6 @@ defmodule Pleroma.Upload.Filter.Exiftool.ReadDescription do
"""
@behaviour Pleroma.Upload.Filter
- @spec filter(Pleroma.Upload.t()) :: {:ok, any()} | {:error, String.t()}
-
def filter(%Pleroma.Upload{description: description})
when is_binary(description),
do: {:ok, :noop}
@@ -33,7 +31,10 @@ defp read_when_empty(current_description, _, _) when is_binary(current_descripti
defp read_when_empty(_, file, tag) do
try do
{tag_content, 0} =
- System.cmd("exiftool", ["-b", "-s3", tag, file], stderr_to_stdout: true, parallelism: true)
+ System.cmd("exiftool", ["-b", "-s3", tag, file],
+ stderr_to_stdout: false,
+ parallelism: true
+ )
tag_content = String.trim(tag_content)
diff --git a/lib/pleroma/upload/filter/exiftool/strip_location.ex b/lib/pleroma/upload/filter/exiftool/strip_location.ex
index 6100527d3..f2bcc4622 100644
--- a/lib/pleroma/upload/filter/exiftool/strip_location.ex
+++ b/lib/pleroma/upload/filter/exiftool/strip_location.ex
@@ -14,6 +14,7 @@ defmodule Pleroma.Upload.Filter.Exiftool.StripLocation do
# Formats not compatible with exiftool at this time
def filter(%Pleroma.Upload{content_type: "image/heic"}), do: {:ok, :noop}
def filter(%Pleroma.Upload{content_type: "image/webp"}), do: {:ok, :noop}
+ def filter(%Pleroma.Upload{content_type: "image/svg" <> _}), do: {:ok, :noop}
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
try do
diff --git a/lib/pleroma/upload/filter/only_media.ex b/lib/pleroma/upload/filter/only_media.ex
new file mode 100644
index 000000000..a9caeba67
--- /dev/null
+++ b/lib/pleroma/upload/filter/only_media.ex
@@ -0,0 +1,20 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2023 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Upload.Filter.OnlyMedia do
+ @behaviour Pleroma.Upload.Filter
+ alias Pleroma.Upload
+
+ def filter(%Upload{content_type: content_type}) do
+ [type, _subtype] = String.split(content_type, "/")
+
+ if type in ["image", "video", "audio"] do
+ {:ok, :noop}
+ else
+ {:error, "Disallowed content-type: #{content_type}"}
+ end
+ end
+
+ def filter(_), do: {:ok, :noop}
+end
diff --git a/lib/pleroma/uploaders/s3.ex b/lib/pleroma/uploaders/s3.ex
index 19287c532..7b32bd8a5 100644
--- a/lib/pleroma/uploaders/s3.ex
+++ b/lib/pleroma/uploaders/s3.ex
@@ -6,7 +6,8 @@ defmodule Pleroma.Uploaders.S3 do
@behaviour Pleroma.Uploaders.Uploader
require Logger
- alias Pleroma.Config
+ @ex_aws_impl Application.compile_env(:pleroma, [__MODULE__, :ex_aws_impl], ExAws)
+ @config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
# The file name is re-encoded with S3's constraints here to comply with previous
# links with less strict filenames
@@ -22,7 +23,7 @@ def get_file(file) do
@impl true
def put_file(%Pleroma.Upload{} = upload) do
- config = Config.get([__MODULE__])
+ config = @config_impl.get([__MODULE__])
bucket = Keyword.get(config, :bucket)
streaming = Keyword.get(config, :streaming_enabled)
@@ -56,7 +57,7 @@ def put_file(%Pleroma.Upload{} = upload) do
])
end
- case ExAws.request(op) do
+ case @ex_aws_impl.request(op) do
{:ok, _} ->
{:ok, {:file, s3_name}}
@@ -69,9 +70,9 @@ def put_file(%Pleroma.Upload{} = upload) do
@impl true
def delete_file(file) do
[__MODULE__, :bucket]
- |> Config.get()
+ |> @config_impl.get()
|> ExAws.S3.delete_object(file)
- |> ExAws.request()
+ |> @ex_aws_impl.request()
|> case do
{:ok, %{status_code: 204}} -> :ok
error -> {:error, inspect(error)}
@@ -83,3 +84,7 @@ def strict_encode(name) do
String.replace(name, @regex, "-")
end
end
+
+defmodule Pleroma.Uploaders.S3.ExAwsAPI do
+ @callback request(op :: ExAws.Operation.t()) :: {:ok, ExAws.Operation.t()} | {:error, term()}
+end
diff --git a/lib/pleroma/uploaders/uploader.ex b/lib/pleroma/uploaders/uploader.ex
index 77f6f02dd..3396fe06a 100644
--- a/lib/pleroma/uploaders/uploader.ex
+++ b/lib/pleroma/uploaders/uploader.ex
@@ -5,8 +5,6 @@
defmodule Pleroma.Uploaders.Uploader do
import Pleroma.Web.Gettext
- @mix_env Mix.env()
-
@moduledoc """
Defines the contract to put and get an uploaded file to any backend.
"""
@@ -40,7 +38,7 @@ defmodule Pleroma.Uploaders.Uploader do
@callback delete_file(file :: String.t()) :: :ok | {:error, String.t()}
- @callback http_callback(Plug.Conn.t(), Map.t()) ::
+ @callback http_callback(Plug.Conn.t(), map()) ::
{:ok, Plug.Conn.t()}
| {:ok, Plug.Conn.t(), file_spec()}
| {:error, Plug.Conn.t(), String.t()}
@@ -75,10 +73,5 @@ defp handle_callback(uploader, upload) do
end
end
- defp callback_timeout do
- case @mix_env do
- :test -> 1_000
- _ -> 30_000
- end
- end
+ defp callback_timeout, do: Application.get_env(:pleroma, __MODULE__)[:timeout]
end
diff --git a/lib/pleroma/user.ex b/lib/pleroma/user.ex
index 747a83e8d..6d6aa98b5 100644
--- a/lib/pleroma/user.ex
+++ b/lib/pleroma/user.ex
@@ -8,6 +8,7 @@ defmodule Pleroma.User do
import Ecto.Changeset
import Ecto.Query
import Ecto, only: [assoc: 2]
+ import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1]
alias Ecto.Multi
alias Pleroma.Activity
@@ -39,6 +40,7 @@ defmodule Pleroma.User do
alias Pleroma.Workers.BackgroundWorker
require Logger
+ require Pleroma.Constants
@type t :: %__MODULE__{}
@type account_status ::
@@ -124,7 +126,6 @@ defmodule Pleroma.User do
field(:domain_blocks, {:array, :string}, default: [])
field(:is_active, :boolean, default: true)
field(:no_rich_text, :boolean, default: false)
- field(:ap_enabled, :boolean, default: false)
field(:is_moderator, :boolean, default: false)
field(:is_admin, :boolean, default: false)
field(:show_role, :boolean, default: true)
@@ -326,7 +327,7 @@ def visible_for(%User{} = user, nil) do
end
def visible_for(%User{} = user, for_user) do
- if superuser?(for_user) do
+ if privileged?(for_user, :users_manage_activation_state) do
:visible
else
visible_account_status(user)
@@ -353,10 +354,45 @@ defp visible_account_status(user) do
end
end
- @spec superuser?(User.t()) :: boolean()
- def superuser?(%User{local: true, is_admin: true}), do: true
- def superuser?(%User{local: true, is_moderator: true}), do: true
- def superuser?(_), do: false
+ @spec privileged?(User.t(), atom()) :: boolean()
+ def privileged?(%User{is_admin: false, is_moderator: false}, _), do: false
+
+ def privileged?(
+ %User{local: true, is_admin: is_admin, is_moderator: is_moderator},
+ privilege_tag
+ ),
+ do:
+ privileged_for?(privilege_tag, is_admin, :admin_privileges) or
+ privileged_for?(privilege_tag, is_moderator, :moderator_privileges)
+
+ def privileged?(_, _), do: false
+
+ defp privileged_for?(privilege_tag, true, config_role_key),
+ do: privilege_tag in Config.get([:instance, config_role_key])
+
+ defp privileged_for?(_, _, _), do: false
+
+ @spec privileges(User.t()) :: [atom()]
+ def privileges(%User{local: false}) do
+ []
+ end
+
+ def privileges(%User{is_moderator: false, is_admin: false}) do
+ []
+ end
+
+ def privileges(%User{local: true, is_moderator: true, is_admin: true}) do
+ (Config.get([:instance, :moderator_privileges]) ++ Config.get([:instance, :admin_privileges]))
+ |> Enum.uniq()
+ end
+
+ def privileges(%User{local: true, is_moderator: true, is_admin: false}) do
+ Config.get([:instance, :moderator_privileges])
+ end
+
+ def privileges(%User{local: true, is_moderator: false, is_admin: true}) do
+ Config.get([:instance, :admin_privileges])
+ end
@spec invisible?(User.t()) :: boolean()
def invisible?(%User{invisible: true}), do: true
@@ -453,7 +489,6 @@ def remote_user_changeset(struct \\ %User{local: false}, params) do
:nickname,
:public_key,
:avatar,
- :ap_enabled,
:banner,
:is_locked,
:last_refreshed_at,
@@ -546,7 +581,7 @@ def update_changeset(struct, params \\ %{}) do
|> validate_format(:nickname, local_nickname_regex())
|> validate_length(:bio, max: bio_limit)
|> validate_length(:name, min: 1, max: name_limit)
- |> validate_inclusion(:actor_type, ["Person", "Service"])
+ |> validate_inclusion(:actor_type, Pleroma.Constants.allowed_user_actor_types())
|> put_fields()
|> put_emoji()
|> put_change_if_present(:bio, &{:ok, parse_bio(&1, struct)})
@@ -562,9 +597,23 @@ def update_changeset(struct, params \\ %{}) do
defp put_fields(changeset) do
if raw_fields = get_change(changeset, :raw_fields) do
+ old_fields = changeset.data.raw_fields
+
raw_fields =
raw_fields
|> Enum.filter(fn %{"name" => n} -> n != "" end)
+ |> Enum.map(fn field ->
+ previous =
+ old_fields
+ |> Enum.find(fn %{"value" => value} -> field["value"] == value end)
+
+ if previous && Map.has_key?(previous, "verified_at") do
+ field
+ |> Map.put("verified_at", previous["verified_at"])
+ else
+ field
+ end
+ end)
fields =
raw_fields
@@ -611,7 +660,13 @@ defp put_change_if_present(changeset, map_field, value_function) do
{:ok, new_value} <- value_function.(value) do
put_change(changeset, map_field, new_value)
else
- _ -> changeset
+ {:error, :file_too_large} ->
+ Ecto.Changeset.validate_change(changeset, map_field, fn map_field, _value ->
+ [{map_field, "file is too large"}]
+ end)
+
+ _ ->
+ changeset
end
end
@@ -632,7 +687,7 @@ def update_as_admin_changeset(struct, params) do
|> validate_inclusion(:actor_type, ["Person", "Service"])
end
- @spec update_as_admin(User.t(), map()) :: {:ok, User.t()} | {:error, Changeset.t()}
+ @spec update_as_admin(User.t(), map()) :: {:ok, User.t()} | {:error, Ecto.Changeset.t()}
def update_as_admin(user, params) do
params = Map.put(params, "password_confirmation", params["password"])
changeset = update_as_admin_changeset(user, params)
@@ -653,7 +708,7 @@ def password_update_changeset(struct, params) do
|> put_change(:password_reset_pending, false)
end
- @spec reset_password(User.t(), map()) :: {:ok, User.t()} | {:error, Changeset.t()}
+ @spec reset_password(User.t(), map()) :: {:ok, User.t()} | {:error, Ecto.Changeset.t()}
def reset_password(%User{} = user, params) do
reset_password(user, user, params)
end
@@ -706,11 +761,12 @@ def register_changeset_ldap(struct, params = %{password: password})
])
|> validate_required([:name, :nickname])
|> unique_constraint(:nickname)
- |> validate_exclusion(:nickname, Config.get([User, :restricted_nicknames]))
+ |> validate_not_restricted_nickname(:nickname)
|> validate_format(:nickname, local_nickname_regex())
|> put_ap_id()
|> unique_constraint(:ap_id)
|> put_following_and_follower_and_featured_address()
+ |> put_private_key()
end
def register_changeset(struct, params \\ %{}, opts \\ []) do
@@ -754,17 +810,9 @@ def register_changeset(struct, params \\ %{}, opts \\ []) do
|> validate_confirmation(:password)
|> unique_constraint(:email)
|> validate_format(:email, @email_regex)
- |> validate_change(:email, fn :email, email ->
- valid? =
- Config.get([User, :email_blacklist])
- |> Enum.all?(fn blacklisted_domain ->
- !String.ends_with?(email, ["@" <> blacklisted_domain, "." <> blacklisted_domain])
- end)
-
- if valid?, do: [], else: [email: "Invalid email"]
- end)
+ |> validate_email_not_in_blacklisted_domain(:email)
|> unique_constraint(:nickname)
- |> validate_exclusion(:nickname, Config.get([User, :restricted_nicknames]))
+ |> validate_not_restricted_nickname(:nickname)
|> validate_format(:nickname, local_nickname_regex())
|> validate_length(:bio, max: bio_limit)
|> validate_length(:name, min: 1, max: name_limit)
@@ -776,6 +824,36 @@ def register_changeset(struct, params \\ %{}, opts \\ []) do
|> put_ap_id()
|> unique_constraint(:ap_id)
|> put_following_and_follower_and_featured_address()
+ |> put_private_key()
+ end
+
+ def validate_not_restricted_nickname(changeset, field) do
+ validate_change(changeset, field, fn _, value ->
+ valid? =
+ Config.get([User, :restricted_nicknames])
+ |> Enum.all?(fn restricted_nickname ->
+ String.downcase(value) != String.downcase(restricted_nickname)
+ end)
+
+ if valid?, do: [], else: [nickname: "Invalid nickname"]
+ end)
+ end
+
+ def validate_email_not_in_blacklisted_domain(changeset, field) do
+ validate_change(changeset, field, fn _, value ->
+ valid? =
+ Config.get([User, :email_blacklist])
+ |> Enum.all?(fn blacklisted_domain ->
+ blacklisted_domain_downcase = String.downcase(blacklisted_domain)
+
+ !String.ends_with?(String.downcase(value), [
+ "@" <> blacklisted_domain_downcase,
+ "." <> blacklisted_domain_downcase
+ ])
+ end)
+
+ if valid?, do: [], else: [email: "Invalid email"]
+ end)
end
def maybe_validate_required_email(changeset, true), do: changeset
@@ -825,6 +903,11 @@ defp put_following_and_follower_and_featured_address(changeset) do
|> put_change(:featured_address, featured)
end
+ defp put_private_key(changeset) do
+ {:ok, pem} = Keys.generate_rsa_pem()
+ put_change(changeset, :keys, pem)
+ end
+
defp autofollow_users(user) do
candidates = Config.get([:instance, :autofollowed_nicknames])
@@ -877,7 +960,7 @@ def post_register_action(%User{is_approved: true, is_confirmed: true} = user) do
end
end
- defp send_user_approval_email(user) do
+ defp send_user_approval_email(%User{email: email} = user) when is_binary(email) do
user
|> Pleroma.Emails.UserEmail.approval_pending_email()
|> Pleroma.Emails.Mailer.deliver_async()
@@ -885,6 +968,10 @@ defp send_user_approval_email(user) do
{:ok, :enqueued}
end
+ defp send_user_approval_email(_user) do
+ {:ok, :skipped}
+ end
+
defp send_admin_approval_emails(user) do
all_superusers()
|> Enum.filter(fn user -> not is_nil(user.email) end)
@@ -939,7 +1026,7 @@ def maybe_send_confirmation_email(%User{is_confirmed: false, email: email} = use
def maybe_send_confirmation_email(_), do: {:ok, :noop}
- @spec send_confirmation_email(Uset.t()) :: User.t()
+ @spec send_confirmation_email(User.t()) :: User.t()
def send_confirmation_email(%User{} = user) do
user
|> Pleroma.Emails.UserEmail.account_confirmation_email()
@@ -976,7 +1063,8 @@ def needs_update?(%User{local: false} = user) do
def needs_update?(_), do: true
- @spec maybe_direct_follow(User.t(), User.t()) :: {:ok, User.t()} | {:error, String.t()}
+ @spec maybe_direct_follow(User.t(), User.t()) ::
+ {:ok, User.t(), User.t()} | {:error, String.t()}
# "Locked" (self-locked) users demand explicit authorization of follow requests
def maybe_direct_follow(%User{} = follower, %User{local: true, is_locked: true} = followed) do
@@ -988,11 +1076,7 @@ def maybe_direct_follow(%User{} = follower, %User{local: true} = followed) do
end
def maybe_direct_follow(%User{} = follower, %User{} = followed) do
- if not ap_enabled?(followed) do
- follow(follower, followed)
- else
- {:ok, follower, followed}
- end
+ {:ok, follower, followed}
end
@doc "A mass follow for local users. Respects blocks in both directions but does not create activities."
@@ -1129,24 +1213,14 @@ def update_and_set_cache(struct, params) do
|> update_and_set_cache()
end
- def update_and_set_cache(%{data: %Pleroma.User{} = user} = changeset) do
- was_superuser_before_update = User.superuser?(user)
-
+ def update_and_set_cache(changeset) do
with {:ok, user} <- Repo.update(changeset, stale_error_field: :id) do
+ if get_change(changeset, :raw_fields) do
+ BackgroundWorker.enqueue("verify_fields_links", %{"user_id" => user.id})
+ end
+
set_cache(user)
end
- |> maybe_remove_report_notifications(was_superuser_before_update)
- end
-
- defp maybe_remove_report_notifications({:ok, %Pleroma.User{} = user} = result, true) do
- if not User.superuser?(user),
- do: user |> Notification.destroy_multiple_from_types(["pleroma:report"])
-
- result
- end
-
- defp maybe_remove_report_notifications(result, _) do
- result
end
def get_user_friends_ap_ids(user) do
@@ -1330,6 +1404,40 @@ def get_friends_ids(%User{} = user, page \\ nil) do
|> Repo.all()
end
+ @spec get_familiar_followers_query(User.t(), User.t(), pos_integer() | nil) :: Ecto.Query.t()
+ def get_familiar_followers_query(%User{} = user, %User{} = current_user, nil) do
+ friends =
+ get_friends_query(current_user)
+ |> where([u], not u.hide_follows)
+ |> select([u], u.id)
+
+ User.Query.build(%{is_active: true})
+ |> where([u], u.id not in ^[user.id, current_user.id])
+ |> join(:inner, [u], r in FollowingRelationship,
+ as: :followers_relationships,
+ on: r.following_id == ^user.id and r.follower_id == u.id
+ )
+ |> where([followers_relationships: r], r.state == ^:follow_accept)
+ |> where([followers_relationships: r], r.follower_id in subquery(friends))
+ end
+
+ def get_familiar_followers_query(%User{} = user, %User{} = current_user, page) do
+ user
+ |> get_familiar_followers_query(current_user, nil)
+ |> User.Query.paginate(page, 20)
+ end
+
+ @spec get_familiar_followers_query(User.t(), User.t()) :: Ecto.Query.t()
+ def get_familiar_followers_query(%User{} = user, %User{} = current_user),
+ do: get_familiar_followers_query(user, current_user, nil)
+
+ @spec get_familiar_followers(User.t(), User.t(), pos_integer() | nil) :: {:ok, list(User.t())}
+ def get_familiar_followers(%User{} = user, %User{} = current_user, page \\ nil) do
+ user
+ |> get_familiar_followers_query(current_user, page)
+ |> Repo.all()
+ end
+
def increase_note_count(%User{} = user) do
User
|> where(id: ^user.id)
@@ -1459,17 +1567,30 @@ def get_recipients_from_activity(%Activity{recipients: to, actor: actor}) do
{:ok, list(UserRelationship.t())} | {:error, String.t()}
def mute(%User{} = muter, %User{} = mutee, params \\ %{}) do
notifications? = Map.get(params, :notifications, true)
- expires_in = Map.get(params, :expires_in, 0)
+ duration = Map.get(params, :duration, 0)
- with {:ok, user_mute} <- UserRelationship.create_mute(muter, mutee),
+ expires_at =
+ if duration > 0 do
+ DateTime.utc_now()
+ |> DateTime.add(duration)
+ else
+ nil
+ end
+
+ with {:ok, user_mute} <- UserRelationship.create_mute(muter, mutee, expires_at),
{:ok, user_notification_mute} <-
- (notifications? && UserRelationship.create_notification_mute(muter, mutee)) ||
+ (notifications? &&
+ UserRelationship.create_notification_mute(
+ muter,
+ mutee,
+ expires_at
+ )) ||
{:ok, nil} do
- if expires_in > 0 do
+ if duration > 0 do
Pleroma.Workers.MuteExpireWorker.enqueue(
"unmute_user",
%{"muter_id" => muter.id, "mutee_id" => mutee.id},
- schedule_in: expires_in
+ scheduled_at: expires_at
)
end
@@ -1494,7 +1615,7 @@ def unmute(muter_id, mutee_id) do
unmute(muter, mutee)
else
{who, result} = error ->
- Logger.warn(
+ Logger.warning(
"User.unmute/2 failed. #{who}: #{result}, muter_id: #{muter_id}, mutee_id: #{mutee_id}"
)
@@ -1540,13 +1661,19 @@ def block(%User{} = blocker, %User{} = blocked) do
blocker
end
- # clear any requested follows as well
+ # clear any requested follows from both sides as well
blocked =
case CommonAPI.reject_follow_request(blocked, blocker) do
{:ok, %User{} = updated_blocked} -> updated_blocked
nil -> blocked
end
+ blocker =
+ case CommonAPI.reject_follow_request(blocker, blocked) do
+ {:ok, %User{} = updated_blocker} -> updated_blocker
+ nil -> blocker
+ end
+
unsubscribe(blocked, blocker)
unfollowing_blocked = Config.get([:activitypub, :unfollow_blocked], true)
@@ -1710,14 +1837,17 @@ def set_activation_async(user, status \\ true) do
BackgroundWorker.enqueue("user_activation", %{"user_id" => user.id, "status" => status})
end
- @spec set_activation([User.t()], boolean()) :: {:ok, User.t()} | {:error, Changeset.t()}
+ @spec set_activation([User.t()], boolean()) :: {:ok, User.t()} | {:error, Ecto.Changeset.t()}
def set_activation(users, status) when is_list(users) do
Repo.transaction(fn ->
- for user <- users, do: set_activation(user, status)
+ for user <- users do
+ {:ok, user} = set_activation(user, status)
+ user
+ end
end)
end
- @spec set_activation(User.t(), boolean()) :: {:ok, User.t()} | {:error, Changeset.t()}
+ @spec set_activation(User.t(), boolean()) :: {:ok, User.t()} | {:error, Ecto.Changeset.t()}
def set_activation(%User{} = user, status) do
with {:ok, user} <- set_activation_status(user, status) do
user
@@ -1795,7 +1925,7 @@ def update_notification_settings(%User{} = user, settings) do
|> update_and_set_cache()
end
- @spec purge_user_changeset(User.t()) :: Changeset.t()
+ @spec purge_user_changeset(User.t()) :: Ecto.Changeset.t()
def purge_user_changeset(user) do
# "Right to be forgotten"
# https://gdpr.eu/right-to-be-forgotten/
@@ -1820,7 +1950,6 @@ def purge_user_changeset(user) do
confirmation_token: nil,
domain_blocks: [],
is_active: false,
- ap_enabled: false,
is_moderator: false,
is_admin: false,
mascot: nil,
@@ -1899,8 +2028,45 @@ def perform(:delete, %User{} = user) do
maybe_delete_from_db(user)
end
+ def perform(:verify_fields_links, user) do
+ profile_urls = [user.ap_id]
+
+ fields =
+ user.raw_fields
+ |> Enum.map(&verify_field_link(&1, profile_urls))
+
+ changeset =
+ user
+ |> update_changeset(%{raw_fields: fields})
+
+ with {:ok, user} <- Repo.update(changeset, stale_error_field: :id) do
+ set_cache(user)
+ end
+ end
+
def perform(:set_activation_async, user, status), do: set_activation(user, status)
+ defp verify_field_link(field, profile_urls) do
+ verified_at =
+ with %{"value" => value} <- field,
+ {:verified_at, nil} <- {:verified_at, Map.get(field, "verified_at")},
+ %{scheme: scheme, userinfo: nil, host: host}
+ when not_empty_string(host) and scheme in ["http", "https"] <-
+ URI.parse(value),
+ {:not_idn, true} <- {:not_idn, to_string(:idna.encode(host)) == host},
+ "me" <- Pleroma.Web.RelMe.maybe_put_rel_me(value, profile_urls) do
+ CommonUtils.to_masto_date(NaiveDateTime.utc_now())
+ else
+ {:verified_at, value} when not_empty_string(value) ->
+ value
+
+ _ ->
+ nil
+ end
+
+ Map.put(field, "verified_at", verified_at)
+ end
+
@spec external_users_query() :: Ecto.Query.t()
def external_users_query do
User.Query.build(%{
@@ -2046,6 +2212,7 @@ defp create_service_actor(uri, nickname) do
follower_address: uri <> "/followers"
}
|> change
+ |> put_private_key()
|> unique_constraint(:nickname)
|> Repo.insert()
|> set_cache()
@@ -2064,7 +2231,7 @@ def public_key(%{public_key: public_key_pem}) when is_binary(public_key_pem) do
def public_key(_), do: {:error, "key not found"}
def get_public_key_for_ap_id(ap_id) do
- with {:ok, %User{} = user} <- get_or_fetch_by_ap_id(ap_id),
+ with %User{} = user <- get_cached_by_ap_id(ap_id),
{:ok, public_key} <- public_key(user) do
{:ok, public_key}
else
@@ -2072,13 +2239,10 @@ def get_public_key_for_ap_id(ap_id) do
end
end
- def ap_enabled?(%User{local: true}), do: true
- def ap_enabled?(%User{ap_enabled: ap_enabled}), do: ap_enabled
- def ap_enabled?(_), do: false
-
@doc "Gets or fetch a user by uri or nickname."
@spec get_or_fetch(String.t()) :: {:ok, User.t()} | {:error, String.t()}
- def get_or_fetch("http" <> _host = uri), do: get_or_fetch_by_ap_id(uri)
+ def get_or_fetch("http://" <> _host = uri), do: get_or_fetch_by_ap_id(uri)
+ def get_or_fetch("https://" <> _host = uri), do: get_or_fetch_by_ap_id(uri)
def get_or_fetch(nickname), do: get_or_fetch_by_nickname(nickname)
# wait a period of time and return newest version of the User structs
@@ -2183,7 +2347,7 @@ def full_nickname(%User{} = user) do
if String.contains?(user.nickname, "@") do
user.nickname
else
- %{host: host} = URI.parse(user.ap_id)
+ host = Pleroma.Web.WebFinger.host()
user.nickname <> "@" <> host
end
end
@@ -2206,6 +2370,11 @@ def all_superusers do
|> Repo.all()
end
+ @spec all_users_with_privilege(atom()) :: [User.t()]
+ def all_users_with_privilege(privilege) do
+ User.Query.build(%{is_privileged: privilege}) |> Repo.all()
+ end
+
def muting_reblogs?(%User{} = user, %User{} = target) do
UserRelationship.reblog_mute_exists?(user, target)
end
@@ -2284,7 +2453,7 @@ def touch_last_digest_emailed_at(user) do
updated_user
end
- @spec set_confirmation(User.t(), boolean()) :: {:ok, User.t()} | {:error, Changeset.t()}
+ @spec set_confirmation(User.t(), boolean()) :: {:ok, User.t()} | {:error, Ecto.Changeset.t()}
def set_confirmation(%User{} = user, bool) do
user
|> confirmation_changeset(set_confirmation: bool)
@@ -2311,17 +2480,6 @@ def get_mascot(%{mascot: mascot}) when is_nil(mascot) do
}
end
- def ensure_keys_present(%{keys: keys} = user) when not is_nil(keys), do: {:ok, user}
-
- def ensure_keys_present(%User{} = user) do
- with {:ok, pem} <- Keys.generate_rsa_pem() do
- user
- |> cast(%{keys: pem}, [:keys])
- |> validate_required([:keys])
- |> update_and_set_cache()
- end
- end
-
def get_ap_ids_by_nicknames(nicknames) do
from(u in User,
where: u.nickname in ^nicknames,
@@ -2339,9 +2497,9 @@ defp put_password_hash(
defp put_password_hash(changeset), do: changeset
- def is_internal_user?(%User{nickname: nil}), do: true
- def is_internal_user?(%User{local: true, nickname: "internal." <> _}), do: true
- def is_internal_user?(_), do: false
+ def internal?(%User{nickname: nil}), do: true
+ def internal?(%User{local: true, nickname: "internal." <> _}), do: true
+ def internal?(_), do: false
# A hack because user delete activities have a fake id for whatever reason
# TODO: Get rid of this
@@ -2364,6 +2522,38 @@ def change_email(user, email) do
|> update_and_set_cache()
end
+ def alias_users(user) do
+ user.also_known_as
+ |> Enum.map(&User.get_cached_by_ap_id/1)
+ |> Enum.filter(fn user -> user != nil end)
+ end
+
+ def add_alias(user, new_alias_user) do
+ current_aliases = user.also_known_as || []
+ new_alias_ap_id = new_alias_user.ap_id
+
+ if new_alias_ap_id in current_aliases do
+ {:ok, user}
+ else
+ user
+ |> cast(%{also_known_as: current_aliases ++ [new_alias_ap_id]}, [:also_known_as])
+ |> update_and_set_cache()
+ end
+ end
+
+ def delete_alias(user, alias_user) do
+ current_aliases = user.also_known_as || []
+ alias_ap_id = alias_user.ap_id
+
+ if alias_ap_id in current_aliases do
+ user
+ |> cast(%{also_known_as: current_aliases -- [alias_ap_id]}, [:also_known_as])
+ |> update_and_set_cache()
+ else
+ {:error, :no_such_alias}
+ end
+ end
+
# Internal function; public one is `deactivate/2`
defp set_activation_status(user, status) do
user
@@ -2441,7 +2631,7 @@ def mascot_update(user, url) do
|> update_and_set_cache()
end
- @spec confirmation_changeset(User.t(), keyword()) :: Changeset.t()
+ @spec confirmation_changeset(User.t(), keyword()) :: Ecto.Changeset.t()
def confirmation_changeset(user, set_confirmation: confirmed?) do
params =
if confirmed? do
@@ -2459,9 +2649,9 @@ def confirmation_changeset(user, set_confirmation: confirmed?) do
cast(user, params, [:is_confirmed, :confirmation_token])
end
- @spec approval_changeset(User.t(), keyword()) :: Changeset.t()
- def approval_changeset(user, set_approval: approved?) do
- cast(user, %{is_approved: approved?}, [:is_approved])
+ @spec approval_changeset(Ecto.Changeset.t(), keyword()) :: Ecto.Changeset.t()
+ def approval_changeset(changeset, set_approval: approved?) do
+ cast(changeset, %{is_approved: approved?}, [:is_approved])
end
@spec add_pinned_object_id(User.t(), String.t()) :: {:ok, User.t()} | {:error, term()}
@@ -2564,10 +2754,11 @@ def sanitize_html(%User{} = user) do
# - display name
def sanitize_html(%User{} = user, filter) do
fields =
- Enum.map(user.fields, fn %{"name" => name, "value" => value} ->
+ Enum.map(user.fields, fn %{"name" => name, "value" => value} = fields ->
%{
"name" => name,
- "value" => HTML.filter_tags(value, Pleroma.HTML.Scrubber.LinksOnly)
+ "value" => HTML.filter_tags(value, Pleroma.HTML.Scrubber.LinksOnly),
+ "verified_at" => Map.get(fields, "verified_at")
}
end)
@@ -2586,6 +2777,8 @@ def update_last_active_at(%__MODULE__{local: true} = user) do
|> update_and_set_cache()
end
+ def update_last_active_at(user), do: user
+
def active_user_count(days \\ 30) do
active_after = Timex.shift(NaiveDateTime.utc_now(), days: -days)
diff --git a/lib/pleroma/user/backup.ex b/lib/pleroma/user/backup.ex
index 9cb329663..65e0baccd 100644
--- a/lib/pleroma/user/backup.ex
+++ b/lib/pleroma/user/backup.ex
@@ -9,32 +9,38 @@ defmodule Pleroma.User.Backup do
import Ecto.Query
import Pleroma.Web.Gettext
+ require Logger
require Pleroma.Constants
alias Pleroma.Activity
alias Pleroma.Bookmark
alias Pleroma.Repo
alias Pleroma.User
+ alias Pleroma.User.Backup.State
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.ActivityPub.UserView
alias Pleroma.Workers.BackupWorker
+ @type t :: %__MODULE__{}
+
schema "backups" do
field(:content_type, :string)
field(:file_name, :string)
field(:file_size, :integer, default: 0)
field(:processed, :boolean, default: false)
+ field(:state, State, default: :invalid)
+ field(:processed_number, :integer, default: 0)
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
timestamps()
end
+ @config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
+
def create(user, admin_id \\ nil) do
- with :ok <- validate_email_enabled(),
- :ok <- validate_user_email(user),
- :ok <- validate_limit(user, admin_id),
+ with :ok <- validate_limit(user, admin_id),
{:ok, backup} <- user |> new() |> Repo.insert() do
BackupWorker.process(backup, admin_id)
end
@@ -48,7 +54,8 @@ def new(user) do
%__MODULE__{
user_id: user.id,
content_type: "application/zip",
- file_name: name
+ file_name: name,
+ state: :pending
}
end
@@ -86,20 +93,6 @@ defp validate_limit(user, nil) do
end
end
- defp validate_email_enabled do
- if Pleroma.Config.get([Pleroma.Emails.Mailer, :enabled]) do
- :ok
- else
- {:error, dgettext("errors", "Backups require enabled email")}
- end
- end
-
- defp validate_user_email(%User{email: nil}) do
- {:error, dgettext("errors", "Email is required")}
- end
-
- defp validate_user_email(%User{email: email}) when is_binary(email), do: :ok
-
def get_last(user_id) do
__MODULE__
|> where(user_id: ^user_id)
@@ -125,30 +118,109 @@ def remove_outdated(%__MODULE__{id: latest_id, user_id: user_id}) do
def get(id), do: Repo.get(__MODULE__, id)
- def process(%__MODULE__{} = backup) do
- with {:ok, zip_file} <- export(backup),
- {:ok, %{size: size}} <- File.stat(zip_file),
- {:ok, _upload} <- upload(backup, zip_file) do
- backup
- |> cast(%{file_size: size, processed: true}, [:file_size, :processed])
- |> Repo.update()
+ defp set_state(backup, state, processed_number \\ nil) do
+ struct =
+ %{state: state}
+ |> Pleroma.Maps.put_if_present(:processed_number, processed_number)
+
+ backup
+ |> cast(struct, [:state, :processed_number])
+ |> Repo.update()
+ end
+
+ def process(
+ %__MODULE__{} = backup,
+ processor_module \\ __MODULE__.Processor
+ ) do
+ set_state(backup, :running, 0)
+
+ current_pid = self()
+
+ task =
+ Task.Supervisor.async_nolink(
+ Pleroma.TaskSupervisor,
+ processor_module,
+ :do_process,
+ [backup, current_pid]
+ )
+
+ wait_backup(backup, backup.processed_number, task)
+ end
+
+ defp wait_backup(backup, current_processed, task) do
+ wait_time = @config_impl.get([__MODULE__, :process_wait_time])
+
+ receive do
+ {:progress, new_processed} ->
+ total_processed = current_processed + new_processed
+
+ set_state(backup, :running, total_processed)
+ wait_backup(backup, total_processed, task)
+
+ {:DOWN, _ref, _proc, _pid, reason} ->
+ backup = get(backup.id)
+
+ if reason != :normal do
+ Logger.error("Backup #{backup.id} process ended abnormally: #{inspect(reason)}")
+
+ {:ok, backup} = set_state(backup, :failed)
+
+ cleanup(backup)
+
+ {:error,
+ %{
+ backup: backup,
+ reason: :exit,
+ details: reason
+ }}
+ else
+ {:ok, backup}
+ end
+ after
+ wait_time ->
+ Logger.error(
+ "Backup #{backup.id} timed out after no response for #{wait_time}ms, terminating"
+ )
+
+ Task.Supervisor.terminate_child(Pleroma.TaskSupervisor, task.pid)
+
+ {:ok, backup} = set_state(backup, :failed)
+
+ cleanup(backup)
+
+ {:error,
+ %{
+ backup: backup,
+ reason: :timeout
+ }}
end
end
- @files ['actor.json', 'outbox.json', 'likes.json', 'bookmarks.json']
- def export(%__MODULE__{} = backup) do
+ @files [
+ 'actor.json',
+ 'outbox.json',
+ 'likes.json',
+ 'bookmarks.json',
+ 'followers.json',
+ 'following.json'
+ ]
+ @spec export(Pleroma.User.Backup.t(), pid()) :: {:ok, String.t()} | :error
+ def export(%__MODULE__{} = backup, caller_pid) do
backup = Repo.preload(backup, :user)
- name = String.trim_trailing(backup.file_name, ".zip")
- dir = dir(name)
+ dir = backup_tempdir(backup)
with :ok <- File.mkdir(dir),
- :ok <- actor(dir, backup.user),
- :ok <- statuses(dir, backup.user),
- :ok <- likes(dir, backup.user),
- :ok <- bookmarks(dir, backup.user),
- {:ok, zip_path} <- :zip.create(String.to_charlist(dir <> ".zip"), @files, cwd: dir),
+ :ok <- actor(dir, backup.user, caller_pid),
+ :ok <- statuses(dir, backup.user, caller_pid),
+ :ok <- likes(dir, backup.user, caller_pid),
+ :ok <- bookmarks(dir, backup.user, caller_pid),
+ :ok <- followers(dir, backup.user, caller_pid),
+ :ok <- following(dir, backup.user, caller_pid),
+ {:ok, zip_path} <- :zip.create(backup.file_name, @files, cwd: dir),
{:ok, _} <- File.rm_rf(dir) do
- {:ok, to_string(zip_path)}
+ {:ok, zip_path}
+ else
+ _ -> :error
end
end
@@ -173,11 +245,12 @@ def upload(%__MODULE__{} = backup, zip_path) do
end
end
- defp actor(dir, user) do
+ defp actor(dir, user, caller_pid) do
with {:ok, json} <-
UserView.render("user.json", %{user: user})
|> Map.merge(%{"likes" => "likes.json", "bookmarks" => "bookmarks.json"})
|> Jason.encode() do
+ send(caller_pid, {:progress, 1})
File.write(Path.join(dir, "actor.json"), json)
end
end
@@ -196,47 +269,80 @@ defp write_header(file, name) do
)
end
- defp write(query, dir, name, fun) do
+ defp should_report?(num, chunk_size), do: rem(num, chunk_size) == 0
+
+ defp backup_tempdir(backup) do
+ name = String.trim_trailing(backup.file_name, ".zip")
+ dir(name)
+ end
+
+ defp cleanup(backup) do
+ dir = backup_tempdir(backup)
+ File.rm_rf(dir)
+ end
+
+ defp write(query, dir, name, fun, caller_pid) do
path = Path.join(dir, "#{name}.json")
+ chunk_size = Pleroma.Config.get([__MODULE__, :process_chunk_size])
+
with {:ok, file} <- File.open(path, [:write, :utf8]),
:ok <- write_header(file, name) do
total =
query
- |> Pleroma.Repo.chunk_stream(100)
+ |> Pleroma.Repo.chunk_stream(chunk_size, _returns_as = :one, timeout: :infinity)
|> Enum.reduce(0, fn i, acc ->
- with {:ok, data} <- fun.(i),
+ with {:ok, data} <-
+ (try do
+ fun.(i)
+ rescue
+ e -> {:error, e}
+ end),
{:ok, str} <- Jason.encode(data),
:ok <- IO.write(file, str <> ",\n") do
+ if should_report?(acc + 1, chunk_size) do
+ send(caller_pid, {:progress, chunk_size})
+ end
+
acc + 1
else
- _ -> acc
+ {:error, e} ->
+ Logger.warning(
+ "Error processing backup item: #{inspect(e)}\n The item is: #{inspect(i)}"
+ )
+
+ acc
+
+ _ ->
+ acc
end
end)
+ send(caller_pid, {:progress, rem(total, chunk_size)})
+
with :ok <- :file.pwrite(file, {:eof, -2}, "\n],\n \"totalItems\": #{total}}") do
File.close(file)
end
end
end
- defp bookmarks(dir, %{id: user_id} = _user) do
+ defp bookmarks(dir, %{id: user_id} = _user, caller_pid) do
Bookmark
|> where(user_id: ^user_id)
|> join(:inner, [b], activity in assoc(b, :activity))
|> select([b, a], %{id: b.id, object: fragment("(?)->>'object'", a.data)})
- |> write(dir, "bookmarks", fn a -> {:ok, a.object} end)
+ |> write(dir, "bookmarks", fn a -> {:ok, a.object} end, caller_pid)
end
- defp likes(dir, user) do
+ defp likes(dir, user, caller_pid) do
user.ap_id
|> Activity.Queries.by_actor()
|> Activity.Queries.by_type("Like")
|> select([like], %{id: like.id, object: fragment("(?)->>'object'", like.data)})
- |> write(dir, "likes", fn a -> {:ok, a.object} end)
+ |> write(dir, "likes", fn a -> {:ok, a.object} end, caller_pid)
end
- defp statuses(dir, user) do
+ defp statuses(dir, user, caller_pid) do
opts =
%{}
|> Map.put(:type, ["Create", "Announce"])
@@ -249,10 +355,59 @@ defp statuses(dir, user) do
]
|> Enum.concat()
|> ActivityPub.fetch_activities_query(opts)
- |> write(dir, "outbox", fn a ->
- with {:ok, activity} <- Transmogrifier.prepare_outgoing(a.data) do
- {:ok, Map.delete(activity, "@context")}
- end
- end)
+ |> write(
+ dir,
+ "outbox",
+ fn a ->
+ with {:ok, activity} <- Transmogrifier.prepare_outgoing(a.data) do
+ {:ok, Map.delete(activity, "@context")}
+ end
+ end,
+ caller_pid
+ )
+ end
+
+ defp followers(dir, user, caller_pid) do
+ User.get_followers_query(user)
+ |> write(dir, "followers", fn a -> {:ok, a.ap_id} end, caller_pid)
+ end
+
+ defp following(dir, user, caller_pid) do
+ User.get_friends_query(user)
+ |> write(dir, "following", fn a -> {:ok, a.ap_id} end, caller_pid)
+ end
+end
+
+defmodule Pleroma.User.Backup.ProcessorAPI do
+ @callback do_process(%Pleroma.User.Backup{}, pid()) ::
+ {:ok, %Pleroma.User.Backup{}} | {:error, any()}
+end
+
+defmodule Pleroma.User.Backup.Processor do
+ @behaviour Pleroma.User.Backup.ProcessorAPI
+
+ alias Pleroma.Repo
+ alias Pleroma.User.Backup
+
+ import Ecto.Changeset
+
+ @impl true
+ def do_process(backup, current_pid) do
+ with {:ok, zip_file} <- Backup.export(backup, current_pid),
+ {:ok, %{size: size}} <- File.stat(zip_file),
+ {:ok, _upload} <- Backup.upload(backup, zip_file) do
+ backup
+ |> cast(
+ %{
+ file_size: size,
+ processed: true,
+ state: :complete
+ },
+ [:file_size, :processed, :state]
+ )
+ |> Repo.update()
+ else
+ e -> {:error, e}
+ end
end
end
diff --git a/lib/pleroma/user/query.ex b/lib/pleroma/user/query.ex
index 20bc1ea61..cd9586452 100644
--- a/lib/pleroma/user/query.ex
+++ b/lib/pleroma/user/query.ex
@@ -22,13 +22,14 @@ defmodule Pleroma.User.Query do
- pass non empty string
- e.g. Pleroma.User.Query.build(%{email: "email@example.com"})
- *contains criteria*
- - add field to @containns_criteria list
+ - add field to @contains_criteria list
- pass values list
- e.g. Pleroma.User.Query.build(%{ap_id: ["http://ap_id1", "http://ap_id2"]})
"""
import Ecto.Query
import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1]
+ alias Pleroma.Config
alias Pleroma.FollowingRelationship
alias Pleroma.User
@@ -49,6 +50,7 @@ defmodule Pleroma.User.Query do
is_suggested: boolean(),
is_discoverable: boolean(),
super_users: boolean(),
+ is_privileged: atom(),
invisible: boolean(),
internal: boolean(),
followers: User.t(),
@@ -69,7 +71,7 @@ defmodule Pleroma.User.Query do
@equal_criteria [:email]
@contains_criteria [:ap_id, :nickname]
- @spec build(Query.t(), criteria()) :: Query.t()
+ @spec build(Ecto.Query.t(), criteria()) :: Ecto.Query.t()
def build(query \\ base_query(), criteria) do
prepare_query(query, criteria)
end
@@ -136,6 +138,43 @@ defp compose_query({:super_users, _}, query) do
)
end
+ defp compose_query({:is_privileged, privilege}, query) do
+ moderator_privileged = privilege in Config.get([:instance, :moderator_privileges])
+ admin_privileged = privilege in Config.get([:instance, :admin_privileges])
+
+ query = compose_query({:active, true}, query)
+ query = compose_query({:local, true}, query)
+
+ case {admin_privileged, moderator_privileged} do
+ {false, false} ->
+ where(
+ query,
+ false
+ )
+
+ {true, true} ->
+ where(
+ query,
+ [u],
+ u.is_admin or u.is_moderator
+ )
+
+ {true, false} ->
+ where(
+ query,
+ [u],
+ u.is_admin
+ )
+
+ {false, true} ->
+ where(
+ query,
+ [u],
+ u.is_moderator
+ )
+ end
+ end
+
defp compose_query({:local, _}, query), do: location_query(query, true)
defp compose_query({:external, _}, query), do: location_query(query, false)
diff --git a/lib/pleroma/user/search.ex b/lib/pleroma/user/search.ex
index cd6f69f56..a7fb8fb83 100644
--- a/lib/pleroma/user/search.ex
+++ b/lib/pleroma/user/search.ex
@@ -94,6 +94,7 @@ defp search_query(query_string, for_user, following, top_user_ids) do
|> subquery()
|> order_by(desc: :search_rank)
|> maybe_restrict_local(for_user)
+ |> filter_deactivated_users()
end
defp select_top_users(query, top_user_ids) do
@@ -166,6 +167,10 @@ defp filter_internal_users(query) do
from(q in query, where: q.actor_type != "Application")
end
+ defp filter_deactivated_users(query) do
+ from(q in query, where: q.is_active == true)
+ end
+
defp filter_blocked_user(query, %User{} = blocker) do
query
|> join(:left, [u], b in Pleroma.UserRelationship,
diff --git a/lib/pleroma/user_invite_token.ex b/lib/pleroma/user_invite_token.ex
index b242a8848..4bfb3a6a7 100644
--- a/lib/pleroma/user_invite_token.ex
+++ b/lib/pleroma/user_invite_token.ex
@@ -64,7 +64,7 @@ def update_invite!(invite, changes) do
end
@spec update_invite(UserInviteToken.t(), map()) ::
- {:ok, UserInviteToken.t()} | {:error, Changeset.t()}
+ {:ok, UserInviteToken.t()} | {:error, Ecto.Changeset.t()}
def update_invite(invite, changes) do
change(invite, changes) |> Repo.update()
end
diff --git a/lib/pleroma/user_relationship.ex b/lib/pleroma/user_relationship.ex
index 1432a1d83..82fcc1cdd 100644
--- a/lib/pleroma/user_relationship.ex
+++ b/lib/pleroma/user_relationship.ex
@@ -14,20 +14,23 @@ defmodule Pleroma.UserRelationship do
alias Pleroma.User
alias Pleroma.UserRelationship
+ @type t :: %__MODULE__{}
+
schema "user_relationships" do
belongs_to(:source, User, type: FlakeId.Ecto.CompatType)
belongs_to(:target, User, type: FlakeId.Ecto.CompatType)
field(:relationship_type, Pleroma.UserRelationship.Type)
+ field(:expires_at, :utc_datetime)
timestamps(updated_at: false)
end
for relationship_type <- Keyword.keys(Pleroma.UserRelationship.Type.__enum_map__()) do
- # `def create_block/2`, `def create_mute/2`, `def create_reblog_mute/2`,
- # `def create_notification_mute/2`, `def create_inverse_subscription/2`,
- # `def endorsement/2`
- def unquote(:"create_#{relationship_type}")(source, target),
- do: create(unquote(relationship_type), source, target)
+ # `def create_block/3`, `def create_mute/3`, `def create_reblog_mute/3`,
+ # `def create_notification_mute/3`, `def create_inverse_subscription/3`,
+ # `def endorsement/3`
+ def unquote(:"create_#{relationship_type}")(source, target, expires_at \\ nil),
+ do: create(unquote(relationship_type), source, target, expires_at)
# `def delete_block/2`, `def delete_mute/2`, `def delete_reblog_mute/2`,
# `def delete_notification_mute/2`, `def delete_inverse_subscription/2`,
@@ -37,9 +40,15 @@ def unquote(:"delete_#{relationship_type}")(source, target),
# `def block_exists?/2`, `def mute_exists?/2`, `def reblog_mute_exists?/2`,
# `def notification_mute_exists?/2`, `def inverse_subscription_exists?/2`,
- # `def inverse_endorsement?/2`
+ # `def inverse_endorsement_exists?/2`
def unquote(:"#{relationship_type}_exists?")(source, target),
do: exists?(unquote(relationship_type), source, target)
+
+ # `def get_block_expire_date/2`, `def get_mute_expire_date/2`,
+ # `def get_reblog_mute_expire_date/2`, `def get_notification_mute_exists?/2`,
+ # `def get_inverse_subscription_expire_date/2`, `def get_inverse_endorsement_expire_date/2`
+ def unquote(:"get_#{relationship_type}_expire_date")(source, target),
+ do: get_expire_date(unquote(relationship_type), source, target)
end
def user_relationship_types, do: Keyword.keys(user_relationship_mappings())
@@ -48,7 +57,7 @@ def user_relationship_mappings, do: Pleroma.UserRelationship.Type.__enum_map__()
def changeset(%UserRelationship{} = user_relationship, params \\ %{}) do
user_relationship
- |> cast(params, [:relationship_type, :source_id, :target_id])
+ |> cast(params, [:relationship_type, :source_id, :target_id, :expires_at])
|> validate_required([:relationship_type, :source_id, :target_id])
|> unique_constraint(:relationship_type,
name: :user_relationships_source_id_relationship_type_target_id_index
@@ -62,16 +71,31 @@ def exists?(relationship_type, %User{} = source, %User{} = target) do
|> Repo.exists?()
end
- def create(relationship_type, %User{} = source, %User{} = target) do
+ def get_expire_date(relationship_type, %User{} = source, %User{} = target) do
+ %UserRelationship{expires_at: expires_at} =
+ UserRelationship
+ |> where(
+ relationship_type: ^relationship_type,
+ source_id: ^source.id,
+ target_id: ^target.id
+ )
+ |> Repo.one!()
+
+ expires_at
+ end
+
+ def create(relationship_type, %User{} = source, %User{} = target, expires_at \\ nil) do
%UserRelationship{}
|> changeset(%{
relationship_type: relationship_type,
source_id: source.id,
- target_id: target.id
+ target_id: target.id,
+ expires_at: expires_at
})
|> Repo.insert(
- on_conflict: {:replace_all_except, [:id]},
- conflict_target: [:source_id, :relationship_type, :target_id]
+ on_conflict: {:replace_all_except, [:id, :inserted_at]},
+ conflict_target: [:source_id, :relationship_type, :target_id],
+ returning: true
)
end
diff --git a/lib/pleroma/web.ex b/lib/pleroma/web.ex
index aee41b0fe..7a8b176cd 100644
--- a/lib/pleroma/web.ex
+++ b/lib/pleroma/web.ex
@@ -136,7 +136,7 @@ def view do
namespace: Pleroma.Web
# Import convenience functions from controllers
- import Phoenix.Controller, only: [get_csrf_token: 0, get_flash: 2, view_module: 1]
+ import Phoenix.Controller, only: [get_csrf_token: 0, view_module: 1]
import Pleroma.Web.ErrorHelpers
import Pleroma.Web.Gettext
diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex
index 064f93b22..643877268 100644
--- a/lib/pleroma/web/activity_pub/activity_pub.ex
+++ b/lib/pleroma/web/activity_pub/activity_pub.ex
@@ -74,29 +74,40 @@ defp check_remote_limit(%{"object" => %{"content" => content}}) when not is_nil(
defp check_remote_limit(_), do: true
def increase_note_count_if_public(actor, object) do
- if is_public?(object), do: User.increase_note_count(actor), else: {:ok, actor}
+ if public?(object), do: User.increase_note_count(actor), else: {:ok, actor}
end
def decrease_note_count_if_public(actor, object) do
- if is_public?(object), do: User.decrease_note_count(actor), else: {:ok, actor}
+ if public?(object), do: User.decrease_note_count(actor), else: {:ok, actor}
end
def update_last_status_at_if_public(actor, object) do
- if is_public?(object), do: User.update_last_status_at(actor), else: {:ok, actor}
+ if public?(object), do: User.update_last_status_at(actor), else: {:ok, actor}
end
defp increase_replies_count_if_reply(%{
"object" => %{"inReplyTo" => reply_ap_id} = object,
"type" => "Create"
}) do
- if is_public?(object) do
+ if public?(object) do
Object.increase_replies_count(reply_ap_id)
end
end
defp increase_replies_count_if_reply(_create_data), do: :noop
- @object_types ~w[ChatMessage Question Answer Audio Video Event Article Note Page]
+ defp increase_quotes_count_if_quote(%{
+ "object" => %{"quoteUrl" => quote_ap_id} = object,
+ "type" => "Create"
+ }) do
+ if public?(object) do
+ Object.increase_quotes_count(quote_ap_id)
+ end
+ end
+
+ defp increase_quotes_count_if_quote(_create_data), do: :noop
+
+ @object_types ~w[ChatMessage Question Answer Audio Video Image Event Article Note Page]
@impl true
def persist(%{"type" => type} = object, meta) when type in @object_types do
with {:ok, object} <- Object.create(object) do
@@ -136,9 +147,10 @@ def insert(map, local \\ true, fake \\ false, bypass_actor_check \\ false) when
# Splice in the child object if we have one.
activity = Maps.put_if_present(activity, :object, object)
- ConcurrentLimiter.limit(Pleroma.Web.RichMedia.Helpers, fn ->
- Task.start(fn -> Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity) end)
- end)
+ Pleroma.Web.RichMedia.Card.get_by_activity(activity)
+
+ # Add local posts to search index
+ if local, do: Pleroma.Search.add_to_index(activity)
{:ok, activity}
else
@@ -163,7 +175,7 @@ def insert(map, local \\ true, fake \\ false, bypass_actor_check \\ false) when
id: "pleroma:fakeid"
}
- Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
+ Pleroma.Web.RichMedia.Card.get_by_activity(activity)
{:ok, activity}
{:remote_limit_pass, _} ->
@@ -188,9 +200,19 @@ defp insert_activity_with_expiration(data, local, recipients) do
end
def notify_and_stream(activity) do
- Notification.create_notifications(activity)
+ {:ok, notifications} = Notification.create_notifications(activity)
+ Notification.send(notifications)
- conversation = create_or_bump_conversation(activity, activity.actor)
+ original_activity =
+ case activity do
+ %{data: %{"type" => "Update"}, object: %{data: %{"id" => id}}} ->
+ Activity.get_create_by_object_ap_id_with_object(id)
+
+ _ ->
+ activity
+ end
+
+ conversation = create_or_bump_conversation(original_activity, original_activity.actor)
participations = get_participations(conversation)
stream_out(activity)
stream_out_participations(participations)
@@ -256,7 +278,7 @@ def stream_out_participations(_, _), do: :noop
@impl true
def stream_out(%Activity{data: %{"type" => data_type}} = activity)
- when data_type in ["Create", "Announce", "Delete"] do
+ when data_type in ["Create", "Announce", "Delete", "Update"] do
activity
|> Topics.get_activity_topics()
|> Streamer.stream(activity)
@@ -290,11 +312,13 @@ defp do_create(%{to: to, actor: actor, context: context, object: object} = param
with {:ok, activity} <- insert(create_data, local, fake),
{:fake, false, activity} <- {:fake, fake, activity},
_ <- increase_replies_count_if_reply(create_data),
+ _ <- increase_quotes_count_if_quote(create_data),
{:quick_insert, false, activity} <- {:quick_insert, quick_insert?, activity},
{:ok, _actor} <- increase_note_count_if_public(actor, activity),
{:ok, _actor} <- update_last_status_at_if_public(actor, activity),
_ <- notify_and_stream(activity),
:ok <- maybe_schedule_poll_notifications(activity),
+ :ok <- maybe_handle_group_posts(activity),
:ok <- maybe_federate(activity) do
{:ok, activity}
else
@@ -392,11 +416,11 @@ defp do_flag(
_ <- notify_and_stream(activity),
:ok <-
maybe_federate(stripped_activity) do
- User.all_superusers()
+ User.all_users_with_privilege(:reports_manage_reports)
|> Enum.filter(fn user -> user.ap_id != actor end)
|> Enum.filter(fn user -> not is_nil(user.email) end)
- |> Enum.each(fn superuser ->
- superuser
+ |> Enum.each(fn privileged_user ->
+ privileged_user
|> Pleroma.Emails.AdminEmail.report(actor, account, statuses, content)
|> Pleroma.Emails.Mailer.deliver_async()
end)
@@ -413,7 +437,8 @@ def move(%User{} = origin, %User{} = target, local \\ true) do
"type" => "Move",
"actor" => origin.ap_id,
"object" => origin.ap_id,
- "target" => target.ap_id
+ "target" => target.ap_id,
+ "to" => [origin.follower_address]
}
with true <- origin.ap_id in target.also_known_as,
@@ -445,6 +470,7 @@ def fetch_activities_for_context_query(context, opts) do
|> maybe_preload_objects(opts)
|> maybe_preload_bookmarks(opts)
|> maybe_set_thread_muted_field(opts)
+ |> restrict_unauthenticated(opts[:user])
|> restrict_blocked(opts)
|> restrict_blockers_visibility(opts)
|> restrict_recipients(recipients, opts[:user])
@@ -472,7 +498,7 @@ def fetch_activities_for_context(context, opts \\ %{}) do
end
@spec fetch_latest_direct_activity_id_for_context(String.t(), keyword() | map()) ::
- FlakeId.Ecto.CompatType.t() | nil
+ Ecto.UUID.t() | nil
def fetch_latest_direct_activity_id_for_context(context, opts \\ %{}) do
context
|> fetch_activities_for_context_query(Map.merge(%{skip_preload: true}, opts))
@@ -501,9 +527,18 @@ def fetch_activities(recipients, opts \\ %{}, pagination \\ :keyset) do
@spec fetch_public_or_unlisted_activities(map(), Pagination.type()) :: [Activity.t()]
def fetch_public_or_unlisted_activities(opts \\ %{}, pagination \\ :keyset) do
+ includes_local_public = Map.get(opts, :includes_local_public, false)
+
opts = Map.delete(opts, :user)
- [Constants.as_public()]
+ intended_recipients =
+ if includes_local_public do
+ [Constants.as_public(), as_local_public()]
+ else
+ [Constants.as_public()]
+ end
+
+ intended_recipients
|> fetch_activities_query(opts)
|> restrict_unlisted(opts)
|> fetch_paginated_optimized(opts, pagination)
@@ -603,9 +638,11 @@ defp restrict_thread_visibility(query, %{user: %User{skip_thread_containment: tr
do: query
defp restrict_thread_visibility(query, %{user: %User{ap_id: ap_id}}, _) do
+ local_public = as_local_public()
+
from(
a in query,
- where: fragment("thread_visibility(?, (?)->>'id') = true", ^ap_id, a.data)
+ where: fragment("thread_visibility(?, (?)->>'id', ?) = true", ^ap_id, a.data, ^local_public)
)
end
@@ -692,8 +729,12 @@ defp fetch_activities_for_reading_user(reading_user, params) do
defp user_activities_recipients(%{godmode: true}), do: []
defp user_activities_recipients(%{reading_user: reading_user}) do
- if reading_user do
- [Constants.as_public(), reading_user.ap_id | User.following(reading_user)]
+ if not is_nil(reading_user) and reading_user.local do
+ [
+ Constants.as_public(),
+ as_local_public(),
+ reading_user.ap_id | User.following(reading_user)
+ ]
else
[Constants.as_public()]
end
@@ -1134,8 +1175,7 @@ defp restrict_pinned(query, %{pinned: true, pinned_object_ids: ids}) do
[activity, object: o] in query,
where:
fragment(
- "(?)->>'type' = 'Create' and coalesce((?)->'object'->>'id', (?)->>'object') = any (?)",
- activity.data,
+ "(?)->>'type' = 'Create' and associated_object_id((?)) = any (?)",
activity.data,
activity.data,
^ids
@@ -1191,6 +1231,44 @@ defp restrict_filtered(query, %{blocking_user: %User{} = user}) do
defp restrict_filtered(query, _), do: query
+ defp restrict_unauthenticated(query, nil) do
+ local = Config.restrict_unauthenticated_access?(:activities, :local)
+ remote = Config.restrict_unauthenticated_access?(:activities, :remote)
+
+ cond do
+ local and remote ->
+ from(activity in query, where: false)
+
+ local ->
+ from(activity in query, where: activity.local == false)
+
+ remote ->
+ from(activity in query, where: activity.local == true)
+
+ true ->
+ query
+ end
+ end
+
+ defp restrict_unauthenticated(query, _), do: query
+
+ defp restrict_quote_url(query, %{quote_url: quote_url}) do
+ from([_activity, object] in query,
+ where: fragment("(?)->'quoteUrl' = ?", object.data, ^quote_url)
+ )
+ end
+
+ defp restrict_quote_url(query, _), do: query
+
+ defp restrict_rule(query, %{rule_id: rule_id}) do
+ from(
+ activity in query,
+ where: fragment("(?)->'rules' \\? (?)", activity.data, ^rule_id)
+ )
+ end
+
+ defp restrict_rule(query, _), do: query
+
defp exclude_poll_votes(query, %{include_poll_votes: true}), do: query
defp exclude_poll_votes(query, _) do
@@ -1215,15 +1293,15 @@ defp exclude_chat_messages(query, _) do
end
end
+ defp exclude_invisible_actors(query, %{type: "Flag"}), do: query
defp exclude_invisible_actors(query, %{invisible_actors: true}), do: query
defp exclude_invisible_actors(query, _opts) do
- invisible_ap_ids =
- User.Query.build(%{invisible: true, select: [:ap_id]})
- |> Repo.all()
- |> Enum.map(fn %{ap_id: ap_id} -> ap_id end)
-
- from([activity] in query, where: activity.actor not in ^invisible_ap_ids)
+ query
+ |> join(:inner, [activity], u in User,
+ as: :u,
+ on: activity.actor == u.ap_id and u.invisible == false
+ )
end
defp exclude_id(query, %{exclude_id: id}) when is_binary(id) do
@@ -1353,7 +1431,9 @@ def fetch_activities_query(recipients, opts \\ %{}) do
|> restrict_instance(opts)
|> restrict_announce_object_actor(opts)
|> restrict_filtered(opts)
- |> Activity.restrict_deactivated_users()
+ |> restrict_rule(opts)
+ |> restrict_quote_url(opts)
+ |> maybe_restrict_deactivated_users(opts)
|> exclude_poll_votes(opts)
|> exclude_chat_messages(opts)
|> exclude_invisible_actors(opts)
@@ -1429,13 +1509,22 @@ def fetch_activities_bounded(
@spec upload(Upload.source(), keyword()) :: {:ok, Object.t()} | {:error, any()}
def upload(file, opts \\ []) do
- with {:ok, data} <- Upload.store(file, opts) do
+ with {:ok, data} <- Upload.store(sanitize_upload_file(file), opts) do
obj_data = Maps.put_if_present(data, "actor", opts[:actor])
Repo.insert(%Object{data: obj_data})
end
end
+ defp sanitize_upload_file(%Plug.Upload{filename: filename} = upload) when is_binary(filename) do
+ %Plug.Upload{
+ upload
+ | filename: Path.basename(filename)
+ }
+ end
+
+ defp sanitize_upload_file(upload), do: upload
+
@spec get_actor_url(any()) :: binary() | nil
defp get_actor_url(url) when is_binary(url), do: url
defp get_actor_url(%{"href" => href}) when is_binary(href), do: href
@@ -1458,7 +1547,7 @@ defp normalize_image(%{"url" => url}) do
defp normalize_image(urls) when is_list(urls), do: urls |> List.first() |> normalize_image()
defp normalize_image(_), do: nil
- defp object_to_user_data(data) do
+ defp object_to_user_data(data, additional) do
fields =
data
|> Map.get("attachment", [])
@@ -1490,15 +1579,11 @@ defp object_to_user_data(data) do
public_key =
if is_map(data["publicKey"]) && is_binary(data["publicKey"]["publicKeyPem"]) do
data["publicKey"]["publicKeyPem"]
- else
- nil
end
shared_inbox =
if is_map(data["endpoints"]) && is_binary(data["endpoints"]["sharedInbox"]) do
data["endpoints"]["sharedInbox"]
- else
- nil
end
birthday =
@@ -1507,16 +1592,17 @@ defp object_to_user_data(data) do
{:ok, date} -> date
{:error, _} -> nil
end
- else
- nil
end
show_birthday = !!birthday
- user_data = %{
+ # if WebFinger request was already done, we probably have acct, otherwise
+ # we request WebFinger here
+ nickname = additional[:nickname_from_acct] || generate_nickname(data)
+
+ %{
ap_id: data["id"],
uri: get_actor_url(data["url"]),
- ap_enabled: true,
banner: normalize_image(data["image"]),
fields: fields,
emoji: emojis,
@@ -1535,23 +1621,29 @@ defp object_to_user_data(data) do
inbox: data["inbox"],
shared_inbox: shared_inbox,
accepts_chat_messages: accepts_chat_messages,
- pinned_objects: pinned_objects,
birthday: birthday,
- show_birthday: show_birthday
+ show_birthday: show_birthday,
+ pinned_objects: pinned_objects,
+ nickname: nickname
}
+ end
- # nickname can be nil because of virtual actors
- if data["preferredUsername"] do
- Map.put(
- user_data,
- :nickname,
- "#{data["preferredUsername"]}@#{URI.parse(data["id"]).host}"
- )
+ defp generate_nickname(%{"preferredUsername" => username} = data) when is_binary(username) do
+ generated = "#{username}@#{URI.parse(data["id"]).host}"
+
+ if Config.get([WebFinger, :update_nickname_on_user_fetch]) do
+ case WebFinger.finger(generated) do
+ {:ok, %{"subject" => "acct:" <> acct}} -> acct
+ _ -> generated
+ end
else
- Map.put(user_data, :nickname, nil)
+ generated
end
end
+ # nickname can be nil because of virtual actors
+ defp generate_nickname(_), do: nil
+
def fetch_follow_information_for_user(user) do
with {:ok, following_data} <-
Fetcher.fetch_and_contain_remote_object_from_id(user.following_address),
@@ -1615,25 +1707,23 @@ defp collection_private(%{"first" => first}) do
Fetcher.fetch_and_contain_remote_object_from_id(first) do
{:ok, false}
else
- {:error, {:ok, %{status: code}}} when code in [401, 403] -> {:ok, true}
- {:error, _} = e -> e
- e -> {:error, e}
+ {:error, _} -> {:ok, true}
end
end
defp collection_private(_data), do: {:ok, true}
- def user_data_from_user_object(data) do
+ def user_data_from_user_object(data, additional \\ []) do
with {:ok, data} <- MRF.filter(data) do
- {:ok, object_to_user_data(data)}
+ {:ok, object_to_user_data(data, additional)}
else
e -> {:error, e}
end
end
- def fetch_and_prepare_user_from_ap_id(ap_id) do
+ defp fetch_and_prepare_user_from_ap_id(ap_id, additional) do
with {:ok, data} <- Fetcher.fetch_and_contain_remote_object_from_id(ap_id),
- {:ok, data} <- user_data_from_user_object(data) do
+ {:ok, data} <- user_data_from_user_object(data, additional) do
{:ok, maybe_update_follow_information(data)}
else
# If this has been deleted, only log a debug and not an error
@@ -1684,6 +1774,11 @@ def pin_data_from_featured_collection(%{
end)
end
+ def pin_data_from_featured_collection(obj) do
+ Logger.error("Could not parse featured collection #{inspect(obj)}")
+ %{}
+ end
+
def fetch_and_prepare_featured_from_ap_id(nil) do
{:ok, %{}}
end
@@ -1711,34 +1806,31 @@ def pinned_fetch_task(%{pinned_objects: pins}) do
end
end
- def make_user_from_ap_id(ap_id) do
+ def make_user_from_ap_id(ap_id, additional \\ []) do
user = User.get_cached_by_ap_id(ap_id)
- if user && !User.ap_enabled?(user) do
- Transmogrifier.upgrade_user_from_ap_id(ap_id)
- else
- with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id) do
- {:ok, _pid} = Task.start(fn -> pinned_fetch_task(data) end)
+ with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id, additional) do
+ {:ok, _pid} = Task.start(fn -> pinned_fetch_task(data) end)
- if user do
- user
- |> User.remote_user_changeset(data)
- |> User.update_and_set_cache()
- else
- maybe_handle_clashing_nickname(data)
+ if user do
+ user
+ |> User.remote_user_changeset(data)
+ |> User.update_and_set_cache()
+ else
+ maybe_handle_clashing_nickname(data)
- data
- |> User.remote_user_changeset()
- |> Repo.insert()
- |> User.set_cache()
- end
+ data
+ |> User.remote_user_changeset()
+ |> Repo.insert()
+ |> User.set_cache()
end
end
end
def make_user_from_nickname(nickname) do
- with {:ok, %{"ap_id" => ap_id}} when not is_nil(ap_id) <- WebFinger.finger(nickname) do
- make_user_from_ap_id(ap_id)
+ with {:ok, %{"ap_id" => ap_id, "subject" => "acct:" <> acct}} when not is_nil(ap_id) <-
+ WebFinger.finger(nickname) do
+ make_user_from_ap_id(ap_id, nickname_from_acct: acct)
else
_e -> {:error, "No AP id in WebFinger"}
end
@@ -1760,4 +1852,9 @@ def fetch_direct_messages_query do
|> restrict_visibility(%{visibility: "direct"})
|> order_by([activity], asc: activity.id)
end
+
+ defp maybe_restrict_deactivated_users(activity, %{type: "Flag"}), do: activity
+
+ defp maybe_restrict_deactivated_users(activity, _opts),
+ do: Activity.restrict_deactivated_users(activity)
end
diff --git a/lib/pleroma/web/activity_pub/activity_pub_controller.ex b/lib/pleroma/web/activity_pub/activity_pub_controller.ex
index b8f63d69d..e38a94966 100644
--- a/lib/pleroma/web/activity_pub/activity_pub_controller.ex
+++ b/lib/pleroma/web/activity_pub/activity_pub_controller.ex
@@ -66,8 +66,7 @@ defp relay_active?(conn, _) do
end
def user(conn, %{"nickname" => nickname}) do
- with %User{local: true} = user <- User.get_cached_by_nickname(nickname),
- {:ok, user} <- User.ensure_keys_present(user) do
+ with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
conn
|> put_resp_content_type("application/activity+json")
|> put_view(UserView)
@@ -174,7 +173,6 @@ def relay_following(conn, _params) do
def following(%{assigns: %{user: for_user}} = conn, %{"nickname" => nickname, "page" => page}) do
with %User{} = user <- User.get_cached_by_nickname(nickname),
- {user, for_user} <- ensure_user_keys_present_and_maybe_refresh_for_user(user, for_user),
{:show_follows, true} <-
{:show_follows, (for_user && for_user == user) || !user.hide_follows} do
{page, _} = Integer.parse(page)
@@ -192,8 +190,7 @@ def following(%{assigns: %{user: for_user}} = conn, %{"nickname" => nickname, "p
end
def following(%{assigns: %{user: for_user}} = conn, %{"nickname" => nickname}) do
- with %User{} = user <- User.get_cached_by_nickname(nickname),
- {user, for_user} <- ensure_user_keys_present_and_maybe_refresh_for_user(user, for_user) do
+ with %User{} = user <- User.get_cached_by_nickname(nickname) do
conn
|> put_resp_content_type("application/activity+json")
|> put_view(UserView)
@@ -213,7 +210,6 @@ def relay_followers(conn, _params) do
def followers(%{assigns: %{user: for_user}} = conn, %{"nickname" => nickname, "page" => page}) do
with %User{} = user <- User.get_cached_by_nickname(nickname),
- {user, for_user} <- ensure_user_keys_present_and_maybe_refresh_for_user(user, for_user),
{:show_followers, true} <-
{:show_followers, (for_user && for_user == user) || !user.hide_followers} do
{page, _} = Integer.parse(page)
@@ -231,8 +227,7 @@ def followers(%{assigns: %{user: for_user}} = conn, %{"nickname" => nickname, "p
end
def followers(%{assigns: %{user: for_user}} = conn, %{"nickname" => nickname}) do
- with %User{} = user <- User.get_cached_by_nickname(nickname),
- {user, for_user} <- ensure_user_keys_present_and_maybe_refresh_for_user(user, for_user) do
+ with %User{} = user <- User.get_cached_by_nickname(nickname) do
conn
|> put_resp_content_type("application/activity+json")
|> put_view(UserView)
@@ -245,8 +240,7 @@ def outbox(
%{"nickname" => nickname, "page" => page?} = params
)
when page? in [true, "true"] do
- with %User{} = user <- User.get_cached_by_nickname(nickname),
- {:ok, user} <- User.ensure_keys_present(user) do
+ with %User{} = user <- User.get_cached_by_nickname(nickname) do
# "include_poll_votes" is a hack because postgres generates inefficient
# queries when filtering by 'Answer', poll votes will be hidden by the
# visibility filter in this case anyway
@@ -270,8 +264,7 @@ def outbox(
end
def outbox(conn, %{"nickname" => nickname}) do
- with %User{} = user <- User.get_cached_by_nickname(nickname),
- {:ok, user} <- User.ensure_keys_present(user) do
+ with %User{} = user <- User.get_cached_by_nickname(nickname) do
conn
|> put_resp_content_type("application/activity+json")
|> put_view(UserView)
@@ -280,12 +273,17 @@ def outbox(conn, %{"nickname" => nickname}) do
end
def inbox(%{assigns: %{valid_signature: true}} = conn, %{"nickname" => nickname} = params) do
- with %User{} = recipient <- User.get_cached_by_nickname(nickname),
- {:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(params["actor"]),
+ with %User{is_active: true} = recipient <- User.get_cached_by_nickname(nickname),
+ {:ok, %User{is_active: true} = actor} <- User.get_or_fetch_by_ap_id(params["actor"]),
true <- Utils.recipient_in_message(recipient, actor, params),
params <- Utils.maybe_splice_recipient(recipient.ap_id, params) do
Federator.incoming_ap_doc(params)
json(conn, "ok")
+ else
+ _ ->
+ conn
+ |> put_status(:bad_request)
+ |> json("Invalid request.")
end
end
@@ -294,10 +292,9 @@ def inbox(%{assigns: %{valid_signature: true}} = conn, params) do
json(conn, "ok")
end
- def inbox(%{assigns: %{valid_signature: false}} = conn, _params) do
- conn
- |> put_status(:bad_request)
- |> json("Invalid HTTP Signature")
+ def inbox(%{assigns: %{valid_signature: false}, req_headers: req_headers} = conn, params) do
+ Federator.incoming_ap_doc(%{req_headers: req_headers, params: params})
+ json(conn, "ok")
end
# POST /relay/inbox -or- POST /internal/fetch/inbox
@@ -328,14 +325,10 @@ defp post_inbox_relayed_create(conn, params) do
end
defp represent_service_actor(%User{} = user, conn) do
- with {:ok, user} <- User.ensure_keys_present(user) do
- conn
- |> put_resp_content_type("application/activity+json")
- |> put_view(UserView)
- |> render("user.json", %{user: user})
- else
- nil -> {:error, :not_found}
- end
+ conn
+ |> put_resp_content_type("application/activity+json")
+ |> put_view(UserView)
+ |> render("user.json", %{user: user})
end
defp represent_service_actor(nil, _), do: {:error, :not_found}
@@ -388,12 +381,10 @@ def read_inbox(
def read_inbox(%{assigns: %{user: %User{nickname: nickname} = user}} = conn, %{
"nickname" => nickname
}) do
- with {:ok, user} <- User.ensure_keys_present(user) do
- conn
- |> put_resp_content_type("application/activity+json")
- |> put_view(UserView)
- |> render("activity_collection.json", %{iri: "#{user.ap_id}/inbox"})
- end
+ conn
+ |> put_resp_content_type("application/activity+json")
+ |> put_view(UserView)
+ |> render("activity_collection.json", %{iri: "#{user.ap_id}/inbox"})
end
def read_inbox(%{assigns: %{user: %User{nickname: as_nickname}}} = conn, %{
@@ -489,7 +480,7 @@ def update_outbox(
|> json(message)
e ->
- Logger.warn(fn -> "AP C2S: #{inspect(e)}" end)
+ Logger.warning(fn -> "AP C2S: #{inspect(e)}" end)
conn
|> put_status(:bad_request)
@@ -530,19 +521,6 @@ defp set_requester_reachable(%Plug.Conn{} = conn, _) do
conn
end
- defp ensure_user_keys_present_and_maybe_refresh_for_user(user, for_user) do
- {:ok, new_user} = User.ensure_keys_present(user)
-
- for_user =
- if new_user != user and match?(%User{}, for_user) do
- User.get_cached_by_nickname(for_user.nickname)
- else
- for_user
- end
-
- {new_user, for_user}
- end
-
def upload_media(%{assigns: %{user: %User{} = user}} = conn, %{"file" => file} = data) do
with {:ok, object} <-
ActivityPub.upload(
diff --git a/lib/pleroma/web/activity_pub/builder.ex b/lib/pleroma/web/activity_pub/builder.ex
index 5b25138a4..2a1e56278 100644
--- a/lib/pleroma/web/activity_pub/builder.ex
+++ b/lib/pleroma/web/activity_pub/builder.ex
@@ -9,6 +9,7 @@ defmodule Pleroma.Web.ActivityPub.Builder do
This module encodes our addressing policies and general shape of our objects.
"""
+ alias Pleroma.Activity
alias Pleroma.Emoji
alias Pleroma.Object
alias Pleroma.User
@@ -16,6 +17,7 @@ defmodule Pleroma.Web.ActivityPub.Builder do
alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.ActivityPub.Visibility
alias Pleroma.Web.CommonAPI.ActivityDraft
+ alias Pleroma.Web.Endpoint
require Pleroma.Constants
@@ -54,13 +56,87 @@ def follow(follower, followed) do
{:ok, data, []}
end
+ defp unicode_emoji_react(_object, data, emoji) do
+ data
+ |> Map.put("content", emoji)
+ |> Map.put("type", "EmojiReact")
+ end
+
+ defp add_emoji_content(data, emoji, url) do
+ tag = [
+ %{
+ "id" => url,
+ "type" => "Emoji",
+ "name" => Emoji.maybe_quote(emoji),
+ "icon" => %{
+ "type" => "Image",
+ "url" => url
+ }
+ }
+ ]
+
+ data
+ |> Map.put("content", Emoji.maybe_quote(emoji))
+ |> Map.put("type", "EmojiReact")
+ |> Map.put("tag", tag)
+ end
+
+ defp remote_custom_emoji_react(
+ %{data: %{"reactions" => existing_reactions}},
+ data,
+ emoji
+ ) do
+ [emoji_code, instance] = String.split(Emoji.maybe_strip_name(emoji), "@")
+
+ matching_reaction =
+ Enum.find(
+ existing_reactions,
+ fn [name, _, url] ->
+ if url != nil do
+ url = URI.parse(url)
+ url.host == instance && name == emoji_code
+ end
+ end
+ )
+
+ if matching_reaction do
+ [name, _, url] = matching_reaction
+ add_emoji_content(data, name, url)
+ else
+ {:error, "Could not react"}
+ end
+ end
+
+ defp remote_custom_emoji_react(_object, _data, _emoji) do
+ {:error, "Could not react"}
+ end
+
+ defp local_custom_emoji_react(data, emoji) do
+ with %{file: path} = emojo <- Emoji.get(emoji) do
+ url = "#{Endpoint.url()}#{path}"
+ add_emoji_content(data, emojo.code, url)
+ else
+ _ -> {:error, "Emoji does not exist"}
+ end
+ end
+
+ defp custom_emoji_react(object, data, emoji) do
+ if String.contains?(emoji, "@") do
+ remote_custom_emoji_react(object, data, emoji)
+ else
+ local_custom_emoji_react(data, emoji)
+ end
+ end
+
@spec emoji_react(User.t(), Object.t(), String.t()) :: {:ok, map(), keyword()}
def emoji_react(actor, object, emoji) do
with {:ok, data, meta} <- object_action(actor, object) do
data =
- data
- |> Map.put("content", emoji)
- |> Map.put("type", "EmojiReact")
+ if Emoji.unicode?(emoji) do
+ unicode_emoji_react(object, data, emoji)
+ else
+ custom_emoji_react(object, data, emoji)
+ end
{:ok, data, meta}
end
@@ -142,6 +218,7 @@ def note(%ActivityDraft{} = draft) do
"tag" => Keyword.values(draft.tags) |> Enum.uniq()
}
|> add_in_reply_to(draft.in_reply_to)
+ |> add_quote(draft.quote_post)
|> Map.merge(draft.extra)
{:ok, data, []}
@@ -157,6 +234,16 @@ defp add_in_reply_to(object, in_reply_to) do
end
end
+ defp add_quote(object, nil), do: object
+
+ defp add_quote(object, quote_post) do
+ with %Object{} = quote_object <- Object.normalize(quote_post, fetch: false) do
+ Map.put(object, "quoteUrl", quote_object.data["id"])
+ else
+ _ -> object
+ end
+ end
+
def chat_message(actor, recipient, content, opts \\ []) do
basic = %{
"id" => Utils.generate_object_id(),
@@ -218,10 +305,16 @@ def like(actor, object) do
end
end
- # Retricted to user updates for now, always public
@spec update(User.t(), Object.t()) :: {:ok, map(), keyword()}
def update(actor, object) do
- to = [Pleroma.Constants.as_public(), actor.follower_address]
+ {to, cc} =
+ if object["type"] in Pleroma.Constants.actor_types() do
+ # User updates, always public
+ {[Pleroma.Constants.as_public(), actor.follower_address], []}
+ else
+ # Status updates, follow the recipients in the object
+ {object["to"] || [], object["cc"] || []}
+ end
{:ok,
%{
@@ -229,7 +322,8 @@ def update(actor, object) do
"type" => "Update",
"actor" => actor.ap_id,
"object" => object,
- "to" => to
+ "to" => to,
+ "cc" => cc
}, []}
end
@@ -254,7 +348,7 @@ def announce(actor, object, options \\ []) do
actor.ap_id == Relay.ap_id() ->
[actor.follower_address]
- public? and Visibility.is_local_public?(object) ->
+ public? and Visibility.local_public?(object) ->
[actor.follower_address, object.data["actor"], Utils.as_local_public()]
public? ->
@@ -282,7 +376,7 @@ defp object_action(actor, object) do
# Address the actor of the object, and our actor's follower collection if the post is public.
to =
- if Visibility.is_public?(object) do
+ if Visibility.public?(object) do
[actor.follower_address, object.data["actor"]]
else
[object.data["actor"]]
diff --git a/lib/pleroma/web/activity_pub/mrf.ex b/lib/pleroma/web/activity_pub/mrf.ex
index 323ecdbf1..1071f8e6e 100644
--- a/lib/pleroma/web/activity_pub/mrf.ex
+++ b/lib/pleroma/web/activity_pub/mrf.ex
@@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server
-# Copyright © 2017-2022 Pleroma Authors
+# Copyright © 2017-2023 Pleroma Authors
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF do
@@ -53,10 +53,55 @@ defmodule Pleroma.Web.ActivityPub.MRF do
@required_description_keys [:key, :related_policy]
+ def filter_one(policy, message) do
+ Code.ensure_loaded(policy)
+
+ should_plug_history? =
+ if function_exported?(policy, :history_awareness, 0) do
+ policy.history_awareness()
+ else
+ :manual
+ end
+ |> Kernel.==(:auto)
+
+ if not should_plug_history? do
+ policy.filter(message)
+ else
+ main_result = policy.filter(message)
+
+ with {_, {:ok, main_message}} <- {:main, main_result},
+ {_,
+ %{
+ "formerRepresentations" => %{
+ "orderedItems" => [_ | _]
+ }
+ }} = {_, object} <- {:object, message["object"]},
+ {_, {:ok, new_history}} <-
+ {:history,
+ Pleroma.Object.Updater.for_each_history_item(
+ object["formerRepresentations"],
+ object,
+ fn item ->
+ with {:ok, filtered} <- policy.filter(Map.put(message, "object", item)) do
+ {:ok, filtered["object"]}
+ else
+ e -> e
+ end
+ end
+ )} do
+ {:ok, put_in(main_message, ["object", "formerRepresentations"], new_history)}
+ else
+ {:main, _} -> main_result
+ {:object, _} -> main_result
+ {:history, e} -> e
+ end
+ end
+ end
+
def filter(policies, %{} = message) do
policies
|> Enum.reduce({:ok, message}, fn
- policy, {:ok, message} -> policy.filter(message)
+ policy, {:ok, message} -> filter_one(policy, message)
_, error -> error
end)
end
@@ -94,7 +139,16 @@ defp get_policies(_), do: []
@spec subdomains_regex([String.t()]) :: [Regex.t()]
def subdomains_regex(domains) when is_list(domains) do
- for domain <- domains, do: ~r(^#{String.replace(domain, "*.", "(.*\\.)*")}$)i
+ for domain <- domains do
+ try do
+ target = String.replace(domain, "*.", "(.*\\.)*")
+ ~r<^#{target}$>i
+ rescue
+ e ->
+ Logger.error("MRF: Invalid subdomain Regex: #{domain}")
+ reraise e, __STACKTRACE__
+ end
+ end
end
@spec subdomain_match?([Regex.t()], String.t()) :: boolean()
@@ -145,6 +199,8 @@ def config_descriptions do
def config_descriptions(policies) do
Enum.reduce(policies, @mrf_config_descriptions, fn policy, acc ->
+ Code.ensure_loaded(policy)
+
if function_exported?(policy, :config_description, 0) do
description =
@default_description
@@ -156,7 +212,7 @@ def config_descriptions(policies) do
if Enum.all?(@required_description_keys, &Map.has_key?(description, &1)) do
[description | acc]
else
- Logger.warn(
+ Logger.warning(
"#{policy} config description doesn't have one or all required keys #{inspect(@required_description_keys)}"
)
diff --git a/lib/pleroma/web/activity_pub/mrf/anti_followbot_policy.ex b/lib/pleroma/web/activity_pub/mrf/anti_followbot_policy.ex
index 97d75ecf2..df4ba819c 100644
--- a/lib/pleroma/web/activity_pub/mrf/anti_followbot_policy.ex
+++ b/lib/pleroma/web/activity_pub/mrf/anti_followbot_policy.ex
@@ -56,8 +56,6 @@ defp determine_if_followbot(%User{nickname: nickname, name: displayname, actor_t
nick_score + name_score + actor_type_score
end
- defp determine_if_followbot(_), do: 0.0
-
defp bot_allowed?(%{"object" => target}, bot_actor) do
%User{} = user = normalize_by_ap_id(target)
diff --git a/lib/pleroma/web/activity_pub/mrf/anti_link_spam_policy.ex b/lib/pleroma/web/activity_pub/mrf/anti_link_spam_policy.ex
index f0504ead4..3ec9c52ee 100644
--- a/lib/pleroma/web/activity_pub/mrf/anti_link_spam_policy.ex
+++ b/lib/pleroma/web/activity_pub/mrf/anti_link_spam_policy.ex
@@ -9,6 +9,9 @@ defmodule Pleroma.Web.ActivityPub.MRF.AntiLinkSpamPolicy do
require Logger
+ @impl true
+ def history_awareness, do: :auto
+
# has the user successfully posted before?
defp old_user?(%User{} = u) do
u.note_count > 0 || u.follower_count > 0
diff --git a/lib/pleroma/web/activity_pub/mrf/emoji_policy.ex b/lib/pleroma/web/activity_pub/mrf/emoji_policy.ex
new file mode 100644
index 000000000..f884962b9
--- /dev/null
+++ b/lib/pleroma/web/activity_pub/mrf/emoji_policy.ex
@@ -0,0 +1,281 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2023 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.ActivityPub.MRF.EmojiPolicy do
+ require Pleroma.Constants
+
+ alias Pleroma.Object.Updater
+ alias Pleroma.Web.ActivityPub.MRF.Utils
+
+ @moduledoc "Reject or force-unlisted emojis with certain URLs or names"
+
+ @behaviour Pleroma.Web.ActivityPub.MRF.Policy
+
+ defp config_remove_url do
+ Pleroma.Config.get([:mrf_emoji, :remove_url], [])
+ end
+
+ defp config_remove_shortcode do
+ Pleroma.Config.get([:mrf_emoji, :remove_shortcode], [])
+ end
+
+ defp config_unlist_url do
+ Pleroma.Config.get([:mrf_emoji, :federated_timeline_removal_url], [])
+ end
+
+ defp config_unlist_shortcode do
+ Pleroma.Config.get([:mrf_emoji, :federated_timeline_removal_shortcode], [])
+ end
+
+ @impl Pleroma.Web.ActivityPub.MRF.Policy
+ def history_awareness, do: :manual
+
+ @impl Pleroma.Web.ActivityPub.MRF.Policy
+ def filter(%{"type" => type, "object" => %{"type" => objtype} = object} = message)
+ when type in ["Create", "Update"] and objtype in Pleroma.Constants.status_object_types() do
+ with {:ok, object} <-
+ Updater.do_with_history(object, fn object ->
+ {:ok, process_remove(object, :url, config_remove_url())}
+ end),
+ {:ok, object} <-
+ Updater.do_with_history(object, fn object ->
+ {:ok, process_remove(object, :shortcode, config_remove_shortcode())}
+ end),
+ activity <- Map.put(message, "object", object),
+ activity <- maybe_delist(activity) do
+ {:ok, activity}
+ end
+ end
+
+ @impl Pleroma.Web.ActivityPub.MRF.Policy
+ def filter(%{"type" => type} = object) when type in Pleroma.Constants.actor_types() do
+ with object <- process_remove(object, :url, config_remove_url()),
+ object <- process_remove(object, :shortcode, config_remove_shortcode()) do
+ {:ok, object}
+ end
+ end
+
+ @impl Pleroma.Web.ActivityPub.MRF.Policy
+ def filter(%{"type" => "EmojiReact"} = object) do
+ with {:ok, _} <-
+ matched_emoji_checker(config_remove_url(), config_remove_shortcode()).(object) do
+ {:ok, object}
+ else
+ _ ->
+ {:reject, "[EmojiPolicy] Rejected for having disallowed emoji"}
+ end
+ end
+
+ @impl Pleroma.Web.ActivityPub.MRF.Policy
+ def filter(message) do
+ {:ok, message}
+ end
+
+ defp match_string?(string, pattern) when is_binary(pattern) do
+ string == pattern
+ end
+
+ defp match_string?(string, %Regex{} = pattern) do
+ String.match?(string, pattern)
+ end
+
+ defp match_any?(string, patterns) do
+ Enum.any?(patterns, &match_string?(string, &1))
+ end
+
+ defp url_from_tag(%{"icon" => %{"url" => url}}), do: url
+ defp url_from_tag(_), do: nil
+
+ defp url_from_emoji({_name, url}), do: url
+
+ defp shortcode_from_tag(%{"name" => name}) when is_binary(name), do: String.trim(name, ":")
+ defp shortcode_from_tag(_), do: nil
+
+ defp shortcode_from_emoji({name, _url}), do: name
+
+ defp process_remove(object, :url, patterns) do
+ process_remove_impl(object, &url_from_tag/1, &url_from_emoji/1, patterns)
+ end
+
+ defp process_remove(object, :shortcode, patterns) do
+ process_remove_impl(object, &shortcode_from_tag/1, &shortcode_from_emoji/1, patterns)
+ end
+
+ defp process_remove_impl(object, extract_from_tag, extract_from_emoji, patterns) do
+ object =
+ if object["tag"] do
+ Map.put(
+ object,
+ "tag",
+ Enum.filter(
+ object["tag"],
+ fn
+ %{"type" => "Emoji"} = tag ->
+ str = extract_from_tag.(tag)
+
+ if is_binary(str) do
+ not match_any?(str, patterns)
+ else
+ true
+ end
+
+ _ ->
+ true
+ end
+ )
+ )
+ else
+ object
+ end
+
+ object =
+ if object["emoji"] do
+ Map.put(
+ object,
+ "emoji",
+ object["emoji"]
+ |> Enum.reduce(%{}, fn {name, url} = emoji, acc ->
+ if not match_any?(extract_from_emoji.(emoji), patterns) do
+ Map.put(acc, name, url)
+ else
+ acc
+ end
+ end)
+ )
+ else
+ object
+ end
+
+ object
+ end
+
+ defp matched_emoji_checker(urls, shortcodes) do
+ fn object ->
+ if any_emoji_match?(object, &url_from_tag/1, &url_from_emoji/1, urls) or
+ any_emoji_match?(
+ object,
+ &shortcode_from_tag/1,
+ &shortcode_from_emoji/1,
+ shortcodes
+ ) do
+ {:matched, nil}
+ else
+ {:ok, %{}}
+ end
+ end
+ end
+
+ defp maybe_delist(%{"object" => object, "to" => to, "type" => "Create"} = activity) do
+ check = matched_emoji_checker(config_unlist_url(), config_unlist_shortcode())
+
+ should_delist? = fn object ->
+ with {:ok, _} <- Pleroma.Object.Updater.do_with_history(object, check) do
+ false
+ else
+ _ -> true
+ end
+ end
+
+ if Pleroma.Constants.as_public() in to and should_delist?.(object) do
+ to = List.delete(to, Pleroma.Constants.as_public())
+ cc = [Pleroma.Constants.as_public() | activity["cc"] || []]
+
+ activity
+ |> Map.put("to", to)
+ |> Map.put("cc", cc)
+ else
+ activity
+ end
+ end
+
+ defp maybe_delist(activity), do: activity
+
+ defp any_emoji_match?(object, extract_from_tag, extract_from_emoji, patterns) do
+ Kernel.||(
+ Enum.any?(
+ object["tag"] || [],
+ fn
+ %{"type" => "Emoji"} = tag ->
+ str = extract_from_tag.(tag)
+
+ if is_binary(str) do
+ match_any?(str, patterns)
+ else
+ false
+ end
+
+ _ ->
+ false
+ end
+ ),
+ (object["emoji"] || [])
+ |> Enum.any?(fn emoji -> match_any?(extract_from_emoji.(emoji), patterns) end)
+ )
+ end
+
+ @impl Pleroma.Web.ActivityPub.MRF.Policy
+ def describe do
+ mrf_emoji =
+ Pleroma.Config.get(:mrf_emoji, [])
+ |> Enum.map(fn {key, value} ->
+ {key, Enum.map(value, &Utils.describe_regex_or_string/1)}
+ end)
+ |> Enum.into(%{})
+
+ {:ok, %{mrf_emoji: mrf_emoji}}
+ end
+
+ @impl Pleroma.Web.ActivityPub.MRF.Policy
+ def config_description do
+ %{
+ key: :mrf_emoji,
+ related_policy: "Pleroma.Web.ActivityPub.MRF.EmojiPolicy",
+ label: "MRF Emoji",
+ description:
+ "Reject or force-unlisted emojis whose URLs or names match a keyword or [Regex](https://hexdocs.pm/elixir/Regex.html).",
+ children: [
+ %{
+ key: :remove_url,
+ type: {:list, :string},
+ description: """
+ A list of patterns which result in emoji whose URL matches being removed from the message. This will apply to statuses, emoji reactions, and user profiles.
+
+ Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.
+ """,
+ suggestions: ["https://example.org/foo.png", ~r/example.org\/foo/iu]
+ },
+ %{
+ key: :remove_shortcode,
+ type: {:list, :string},
+ description: """
+ A list of patterns which result in emoji whose shortcode matches being removed from the message. This will apply to statuses, emoji reactions, and user profiles.
+
+ Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.
+ """,
+ suggestions: ["foo", ~r/foo/iu]
+ },
+ %{
+ key: :federated_timeline_removal_url,
+ type: {:list, :string},
+ description: """
+ A list of patterns which result in message with emojis whose URLs match being removed from federated timelines (a.k.a unlisted). This will apply only to statuses.
+
+ Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.
+ """,
+ suggestions: ["https://example.org/foo.png", ~r/example.org\/foo/iu]
+ },
+ %{
+ key: :federated_timeline_removal_shortcode,
+ type: {:list, :string},
+ description: """
+ A list of patterns which result in message with emojis whose shortcodes match being removed from federated timelines (a.k.a unlisted). This will apply only to statuses.
+
+ Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.
+ """,
+ suggestions: ["foo", ~r/foo/iu]
+ }
+ ]
+ }
+ end
+end
diff --git a/lib/pleroma/web/activity_pub/mrf/ensure_re_prepended.ex b/lib/pleroma/web/activity_pub/mrf/ensure_re_prepended.ex
index 51596c09f..a148cc1e7 100644
--- a/lib/pleroma/web/activity_pub/mrf/ensure_re_prepended.ex
+++ b/lib/pleroma/web/activity_pub/mrf/ensure_re_prepended.ex
@@ -10,6 +10,8 @@ defmodule Pleroma.Web.ActivityPub.MRF.EnsureRePrepended do
@reply_prefix Regex.compile!("^re:[[:space:]]*", [:caseless])
+ def history_awareness, do: :auto
+
def filter_by_summary(
%{data: %{"summary" => parent_summary}} = _in_reply_to,
%{"summary" => child_summary} = child
@@ -27,8 +29,8 @@ def filter_by_summary(
def filter_by_summary(_in_reply_to, child), do: child
- def filter(%{"type" => "Create", "object" => child_object} = object)
- when is_map(child_object) do
+ def filter(%{"type" => type, "object" => child_object} = object)
+ when type in ["Create", "Update"] and is_map(child_object) do
child =
child_object["inReplyTo"]
|> Object.normalize(fetch: false)
diff --git a/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex b/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex
index 5b6adbb4b..5a4a97626 100644
--- a/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex
+++ b/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex
@@ -19,7 +19,7 @@ def filter(message) do
try_follow(follower, message)
else
nil ->
- Logger.warn(
+ Logger.warning(
"#{__MODULE__} skipped because of missing `:mrf_follow_bot, :follower_nickname` configuration, the :follower_nickname
account does not exist, or the account is not correctly configured as a bot."
)
diff --git a/lib/pleroma/web/activity_pub/mrf/force_mention.ex b/lib/pleroma/web/activity_pub/mrf/force_mention.ex
new file mode 100644
index 000000000..3853489fc
--- /dev/null
+++ b/lib/pleroma/web/activity_pub/mrf/force_mention.ex
@@ -0,0 +1,59 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2024 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.ActivityPub.MRF.ForceMention do
+ require Pleroma.Constants
+
+ alias Pleroma.Config
+ alias Pleroma.Object
+ alias Pleroma.User
+
+ @behaviour Pleroma.Web.ActivityPub.MRF.Policy
+
+ defp get_author(url) do
+ with %Object{data: %{"actor" => actor}} <- Object.normalize(url, fetch: false),
+ %User{ap_id: ap_id, nickname: nickname} <- User.get_cached_by_ap_id(actor) do
+ %{"type" => "Mention", "href" => ap_id, "name" => "@#{nickname}"}
+ else
+ _ -> nil
+ end
+ end
+
+ defp prepend_author(tags, _, false), do: tags
+
+ defp prepend_author(tags, nil, _), do: tags
+
+ defp prepend_author(tags, url, _) do
+ actor = get_author(url)
+
+ if not is_nil(actor) do
+ [actor | tags]
+ else
+ tags
+ end
+ end
+
+ @impl true
+ def filter(%{"type" => "Create", "object" => %{"tag" => tag} = object} = activity) do
+ tag =
+ tag
+ |> prepend_author(
+ object["inReplyTo"],
+ Config.get([:mrf_force_mention, :mention_parent, true])
+ )
+ |> prepend_author(
+ object["quoteUrl"],
+ Config.get([:mrf_force_mention, :mention_quoted, true])
+ )
+ |> Enum.uniq()
+
+ {:ok, put_in(activity["object"]["tag"], tag)}
+ end
+
+ @impl true
+ def filter(object), do: {:ok, object}
+
+ @impl true
+ def describe, do: {:ok, %{}}
+end
diff --git a/lib/pleroma/web/activity_pub/mrf/force_mentions_in_content.ex b/lib/pleroma/web/activity_pub/mrf/force_mentions_in_content.ex
index 255910b2f..5532093cb 100644
--- a/lib/pleroma/web/activity_pub/mrf/force_mentions_in_content.ex
+++ b/lib/pleroma/web/activity_pub/mrf/force_mentions_in_content.ex
@@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server
-# Copyright © 2017-2022 Pleroma Authors
+# Copyright © 2017-2023 Pleroma Authors
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.ForceMentionsInContent do
@@ -11,6 +11,9 @@ defmodule Pleroma.Web.ActivityPub.MRF.ForceMentionsInContent do
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
+ @impl true
+ def history_awareness, do: :auto
+
defp do_extract({:a, attrs, _}, acc) do
if Enum.find(attrs, fn {name, value} ->
name == "class" && value in ["mention", "u-url mention", "mention u-url"]
@@ -74,11 +77,11 @@ defp clean_recipients(recipients, object) do
@impl true
def filter(
%{
- "type" => "Create",
+ "type" => type,
"object" => %{"type" => "Note", "to" => to, "inReplyTo" => in_reply_to}
} = object
)
- when is_list(to) and is_binary(in_reply_to) do
+ when type in ["Create", "Update"] and is_list(to) and is_binary(in_reply_to) do
# image-only posts from pleroma apparently reach this MRF without the content field
content = object["object"]["content"] || ""
@@ -92,11 +95,13 @@ def filter(
|> Enum.reject(&is_nil/1)
|> sort_replied_user(replied_to_user)
- explicitly_mentioned_uris = extract_mention_uris_from_content(content)
+ explicitly_mentioned_uris =
+ extract_mention_uris_from_content(content)
+ |> MapSet.new()
added_mentions =
- Enum.reduce(mention_users, "", fn %User{ap_id: uri} = user, acc ->
- unless uri in explicitly_mentioned_uris do
+ Enum.reduce(mention_users, "", fn %User{ap_id: ap_id, uri: uri} = user, acc ->
+ if MapSet.disjoint?(MapSet.new([ap_id, uri]), explicitly_mentioned_uris) do
acc <> Formatter.mention_from_user(user, %{mentions_format: :compact}) <> " "
else
acc
diff --git a/lib/pleroma/web/activity_pub/mrf/hashtag_policy.ex b/lib/pleroma/web/activity_pub/mrf/hashtag_policy.ex
index 2142b7add..fdb9a9dba 100644
--- a/lib/pleroma/web/activity_pub/mrf/hashtag_policy.ex
+++ b/lib/pleroma/web/activity_pub/mrf/hashtag_policy.ex
@@ -9,13 +9,16 @@ defmodule Pleroma.Web.ActivityPub.MRF.HashtagPolicy do
alias Pleroma.Object
@moduledoc """
- Reject, TWKN-remove or Set-Sensitive messsages with specific hashtags (without the leading #)
+ Reject, TWKN-remove or Set-Sensitive messages with specific hashtags (without the leading #)
Note: This MRF Policy is always enabled, if you want to disable it you have to set empty lists.
"""
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
+ @impl true
+ def history_awareness, do: :manual
+
defp check_reject(message, hashtags) do
if Enum.any?(Config.get([:mrf_hashtag, :reject]), fn match -> match in hashtags end) do
{:reject, "[HashtagPolicy] Matches with rejected keyword"}
@@ -47,22 +50,46 @@ defp check_ftl_removal(%{"to" => to} = message, hashtags) do
defp check_ftl_removal(message, _hashtags), do: {:ok, message}
- defp check_sensitive(message, hashtags) do
- if Enum.any?(Config.get([:mrf_hashtag, :sensitive]), fn match -> match in hashtags end) do
- {:ok, Kernel.put_in(message, ["object", "sensitive"], true)}
- else
- {:ok, message}
- end
+ defp check_sensitive(message) do
+ {:ok, new_object} =
+ Object.Updater.do_with_history(message["object"], fn object ->
+ hashtags = Object.hashtags(%Object{data: object})
+
+ if Enum.any?(Config.get([:mrf_hashtag, :sensitive]), fn match -> match in hashtags end) do
+ {:ok, Map.put(object, "sensitive", true)}
+ else
+ {:ok, object}
+ end
+ end)
+
+ {:ok, Map.put(message, "object", new_object)}
end
@impl true
- def filter(%{"type" => "Create", "object" => object} = message) do
- hashtags = Object.hashtags(%Object{data: object})
+ def filter(%{"type" => type, "object" => object} = message) when type in ["Create", "Update"] do
+ history_items =
+ with %{"formerRepresentations" => %{"orderedItems" => items}} <- object do
+ items
+ else
+ _ -> []
+ end
+
+ historical_hashtags =
+ Enum.reduce(history_items, [], fn item, acc ->
+ acc ++ Object.hashtags(%Object{data: item})
+ end)
+
+ hashtags = Object.hashtags(%Object{data: object}) ++ historical_hashtags
if hashtags != [] do
with {:ok, message} <- check_reject(message, hashtags),
- {:ok, message} <- check_ftl_removal(message, hashtags),
- {:ok, message} <- check_sensitive(message, hashtags) do
+ {:ok, message} <-
+ (if type == "Create" do
+ check_ftl_removal(message, hashtags)
+ else
+ {:ok, message}
+ end),
+ {:ok, message} <- check_sensitive(message) do
{:ok, message}
end
else
diff --git a/lib/pleroma/web/activity_pub/mrf/inline_quote_policy.ex b/lib/pleroma/web/activity_pub/mrf/inline_quote_policy.ex
new file mode 100644
index 000000000..b7a01c27c
--- /dev/null
+++ b/lib/pleroma/web/activity_pub/mrf/inline_quote_policy.ex
@@ -0,0 +1,77 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2021 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.ActivityPub.MRF.InlineQuotePolicy do
+ @moduledoc "Force a quote line into the message content."
+ @behaviour Pleroma.Web.ActivityPub.MRF.Policy
+
+ defp build_inline_quote(template, url) do
+ quote_line = String.replace(template, "{url}", "#{url} ")
+
+ " #{quote_line} "
+ end
+
+ defp has_inline_quote?(content, quote_url) do
+ cond do
+ # Does the quote URL exist in the content?
+ content =~ quote_url -> true
+ # Does the content already have a .quote-inline span?
+ content =~ "" -> true
+ # No inline quote found
+ true -> false
+ end
+ end
+
+ defp filter_object(%{"quoteUrl" => quote_url} = object) do
+ content = object["content"] || ""
+
+ if has_inline_quote?(content, quote_url) do
+ object
+ else
+ template = Pleroma.Config.get([:mrf_inline_quote, :template])
+
+ content =
+ if String.ends_with?(content, "
"),
+ do:
+ String.trim_trailing(content, "") <>
+ build_inline_quote(template, quote_url) <> "",
+ else: content <> build_inline_quote(template, quote_url)
+
+ Map.put(object, "content", content)
+ end
+ end
+
+ @impl true
+ def filter(%{"object" => %{"quoteUrl" => _} = object} = activity) do
+ {:ok, Map.put(activity, "object", filter_object(object))}
+ end
+
+ @impl true
+ def filter(object), do: {:ok, object}
+
+ @impl true
+ def describe, do: {:ok, %{}}
+
+ @impl Pleroma.Web.ActivityPub.MRF.Policy
+ def history_awareness, do: :auto
+
+ @impl true
+ def config_description do
+ %{
+ key: :mrf_inline_quote,
+ related_policy: "Pleroma.Web.ActivityPub.MRF.InlineQuotePolicy",
+ label: "MRF Inline Quote Policy",
+ description: "Force quote url to appear in post content.",
+ children: [
+ %{
+ key: :template,
+ type: :string,
+ description:
+ "The template to append to the post. `{url}` will be replaced with the actual link to the quoted post.",
+ suggestions: ["RT: {url}"]
+ }
+ ]
+ }
+ end
+end
diff --git a/lib/pleroma/web/activity_pub/mrf/keyword_policy.ex b/lib/pleroma/web/activity_pub/mrf/keyword_policy.ex
index 00b64744f..729da4e9c 100644
--- a/lib/pleroma/web/activity_pub/mrf/keyword_policy.ex
+++ b/lib/pleroma/web/activity_pub/mrf/keyword_policy.ex
@@ -5,18 +5,17 @@
defmodule Pleroma.Web.ActivityPub.MRF.KeywordPolicy do
require Pleroma.Constants
+ alias Pleroma.Web.ActivityPub.MRF.Utils
+
@moduledoc "Reject or Word-Replace messages with a keyword or regex"
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
- defp string_matches?(string, _) when not is_binary(string) do
- false
- end
defp string_matches?(string, pattern) when is_binary(pattern) do
String.contains?(string, pattern)
end
- defp string_matches?(string, pattern) do
+ defp string_matches?(string, %Regex{} = pattern) do
String.match?(string, pattern)
end
@@ -27,24 +26,46 @@ defp object_payload(%{} = object) do
end
defp check_reject(%{"object" => %{} = object} = message) do
- payload = object_payload(object)
+ with {:ok, _new_object} <-
+ Pleroma.Object.Updater.do_with_history(object, fn object ->
+ payload = object_payload(object)
- if Enum.any?(Pleroma.Config.get([:mrf_keyword, :reject]), fn pattern ->
- string_matches?(payload, pattern)
- end) do
- {:reject, "[KeywordPolicy] Matches with rejected keyword"}
- else
+ if Enum.any?(Pleroma.Config.get([:mrf_keyword, :reject]), fn pattern ->
+ string_matches?(payload, pattern)
+ end) do
+ {:reject, "[KeywordPolicy] Matches with rejected keyword"}
+ else
+ {:ok, message}
+ end
+ end) do
{:ok, message}
+ else
+ e -> e
end
end
- defp check_ftl_removal(%{"to" => to, "object" => %{} = object} = message) do
- payload = object_payload(object)
+ defp check_ftl_removal(%{"type" => "Create", "to" => to, "object" => %{} = object} = message) do
+ check_keyword = fn object ->
+ payload = object_payload(object)
- if Pleroma.Constants.as_public() in to and
- Enum.any?(Pleroma.Config.get([:mrf_keyword, :federated_timeline_removal]), fn pattern ->
+ if Enum.any?(Pleroma.Config.get([:mrf_keyword, :federated_timeline_removal]), fn pattern ->
string_matches?(payload, pattern)
end) do
+ {:should_delist, nil}
+ else
+ {:ok, %{}}
+ end
+ end
+
+ should_delist? = fn object ->
+ with {:ok, _} <- Pleroma.Object.Updater.do_with_history(object, check_keyword) do
+ false
+ else
+ _ -> true
+ end
+ end
+
+ if Pleroma.Constants.as_public() in to and should_delist?.(object) do
to = List.delete(to, Pleroma.Constants.as_public())
cc = [Pleroma.Constants.as_public() | message["cc"] || []]
@@ -59,8 +80,12 @@ defp check_ftl_removal(%{"to" => to, "object" => %{} = object} = message) do
end
end
+ defp check_ftl_removal(message) do
+ {:ok, message}
+ end
+
defp check_replace(%{"object" => %{} = object} = message) do
- object =
+ replace_kw = fn object ->
["content", "name", "summary"]
|> Enum.filter(fn field -> Map.has_key?(object, field) && object[field] end)
|> Enum.reduce(object, fn field, object ->
@@ -73,6 +98,10 @@ defp check_replace(%{"object" => %{} = object} = message) do
Map.put(object, field, data)
end)
+ |> (fn object -> {:ok, object} end).()
+ end
+
+ {:ok, object} = Pleroma.Object.Updater.do_with_history(object, replace_kw)
message = Map.put(message, "object", object)
@@ -80,7 +109,8 @@ defp check_replace(%{"object" => %{} = object} = message) do
end
@impl true
- def filter(%{"type" => "Create", "object" => %{"content" => _content}} = message) do
+ def filter(%{"type" => type, "object" => %{"content" => _content}} = message)
+ when type in ["Create", "Update"] do
with {:ok, message} <- check_reject(message),
{:ok, message} <- check_ftl_removal(message),
{:ok, message} <- check_replace(message) do
@@ -97,7 +127,6 @@ def filter(message), do: {:ok, message}
@impl true
def describe do
- # This horror is needed to convert regex sigils to strings
mrf_keyword =
Pleroma.Config.get(:mrf_keyword, [])
|> Enum.map(fn {key, value} ->
@@ -105,21 +134,12 @@ def describe do
Enum.map(value, fn
{pattern, replacement} ->
%{
- "pattern" =>
- if not is_binary(pattern) do
- inspect(pattern)
- else
- pattern
- end,
+ "pattern" => Utils.describe_regex_or_string(pattern),
"replacement" => replacement
}
pattern ->
- if not is_binary(pattern) do
- inspect(pattern)
- else
- pattern
- end
+ Utils.describe_regex_or_string(pattern)
end)}
end)
|> Enum.into(%{})
diff --git a/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex b/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex
index 0eac8f021..c95d35bb9 100644
--- a/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex
+++ b/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex
@@ -16,6 +16,9 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do
recv_timeout: 10_000
]
+ @impl true
+ def history_awareness, do: :auto
+
defp prefetch(url) do
# Fetching only proxiable resources
if MediaProxy.enabled?() and MediaProxy.url_proxiable?(url) do
@@ -54,10 +57,8 @@ defp preload(%{"object" => %{"attachment" => attachments}} = _message) do
end
@impl true
- def filter(
- %{"type" => "Create", "object" => %{"attachment" => attachments} = _object} = message
- )
- when is_list(attachments) and length(attachments) > 0 do
+ def filter(%{"type" => type, "object" => %{"attachment" => attachments} = _object} = message)
+ when type in ["Create", "Update"] and is_list(attachments) and length(attachments) > 0 do
preload(message)
{:ok, message}
diff --git a/lib/pleroma/web/activity_pub/mrf/no_empty_policy.ex b/lib/pleroma/web/activity_pub/mrf/no_empty_policy.ex
index 4dc96e068..12bf4ddd2 100644
--- a/lib/pleroma/web/activity_pub/mrf/no_empty_policy.ex
+++ b/lib/pleroma/web/activity_pub/mrf/no_empty_policy.ex
@@ -10,8 +10,9 @@ defmodule Pleroma.Web.ActivityPub.MRF.NoEmptyPolicy do
@impl true
def filter(%{"actor" => actor} = object) do
- with true <- is_local?(actor),
- true <- is_note?(object),
+ with true <- local?(actor),
+ true <- eligible_type?(object),
+ true <- note?(object),
false <- has_attachment?(object),
true <- only_mentions?(object) do
{:reject, "[NoEmptyPolicy]"}
@@ -23,7 +24,7 @@ def filter(%{"actor" => actor} = object) do
def filter(object), do: {:ok, object}
- defp is_local?(actor) do
+ defp local?(actor) do
if actor |> String.starts_with?("#{Endpoint.url()}") do
true
else
@@ -32,7 +33,6 @@ defp is_local?(actor) do
end
defp has_attachment?(%{
- "type" => "Create",
"object" => %{"type" => "Note", "attachment" => attachments}
})
when length(attachments) > 0,
@@ -40,7 +40,13 @@ defp has_attachment?(%{
defp has_attachment?(_), do: false
- defp only_mentions?(%{"type" => "Create", "object" => %{"type" => "Note", "source" => source}}) do
+ defp only_mentions?(%{"object" => %{"type" => "Note", "source" => source}}) do
+ source =
+ case source do
+ %{"content" => text} -> text
+ _ -> source
+ end
+
non_mentions =
source |> String.split() |> Enum.filter(&(not String.starts_with?(&1, "@"))) |> length
@@ -53,8 +59,11 @@ defp only_mentions?(%{"type" => "Create", "object" => %{"type" => "Note", "sourc
defp only_mentions?(_), do: false
- defp is_note?(%{"type" => "Create", "object" => %{"type" => "Note"}}), do: true
- defp is_note?(_), do: false
+ defp note?(%{"object" => %{"type" => "Note"}}), do: true
+ defp note?(_), do: false
+
+ defp eligible_type?(%{"type" => type}) when type in ["Create", "Update"], do: true
+ defp eligible_type?(_), do: false
@impl true
def describe, do: {:ok, %{}}
diff --git a/lib/pleroma/web/activity_pub/mrf/no_placeholder_text_policy.ex b/lib/pleroma/web/activity_pub/mrf/no_placeholder_text_policy.ex
index aab647d8e..f81e9e52a 100644
--- a/lib/pleroma/web/activity_pub/mrf/no_placeholder_text_policy.ex
+++ b/lib/pleroma/web/activity_pub/mrf/no_placeholder_text_policy.ex
@@ -6,14 +6,17 @@ defmodule Pleroma.Web.ActivityPub.MRF.NoPlaceholderTextPolicy do
@moduledoc "Ensure no content placeholder is present (such as the dot from mastodon)"
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
+ @impl true
+ def history_awareness, do: :auto
+
@impl true
def filter(
%{
- "type" => "Create",
+ "type" => type,
"object" => %{"content" => content, "attachment" => _} = _child_object
} = object
)
- when content in [".", ".
"] do
+ when type in ["Create", "Update"] and content in [".", ".
"] do
{:ok, put_in(object, ["object", "content"], "")}
end
diff --git a/lib/pleroma/web/activity_pub/mrf/normalize_markup.ex b/lib/pleroma/web/activity_pub/mrf/normalize_markup.ex
index dc2c19d49..2dfc9a901 100644
--- a/lib/pleroma/web/activity_pub/mrf/normalize_markup.ex
+++ b/lib/pleroma/web/activity_pub/mrf/normalize_markup.ex
@@ -9,7 +9,11 @@ defmodule Pleroma.Web.ActivityPub.MRF.NormalizeMarkup do
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
@impl true
- def filter(%{"type" => "Create", "object" => child_object} = object) do
+ def history_awareness, do: :auto
+
+ @impl true
+ def filter(%{"type" => type, "object" => child_object} = object)
+ when type in ["Create", "Update"] do
scrub_policy = Pleroma.Config.get([:mrf_normalize_markup, :scrub_policy])
content =
diff --git a/lib/pleroma/web/activity_pub/mrf/object_age_policy.ex b/lib/pleroma/web/activity_pub/mrf/object_age_policy.ex
index 0e9d25a0a..df1a6dcbb 100644
--- a/lib/pleroma/web/activity_pub/mrf/object_age_policy.ex
+++ b/lib/pleroma/web/activity_pub/mrf/object_age_policy.ex
@@ -131,7 +131,7 @@ def config_description do
type: {:list, :atom},
description:
"A list of actions to apply to the post. `:delist` removes the post from public timelines; " <>
- "`:strip_followers` removes followers from the ActivityPub recipient list ensuring they won't be delivered to home timelines; " <>
+ "`:strip_followers` removes followers from the ActivityPub recipient list ensuring they won't be delivered to home timelines, additionally for followers-only it degrades to a direct message; " <>
"`:reject` rejects the message entirely",
suggestions: [:delist, :strip_followers, :reject]
}
diff --git a/lib/pleroma/web/activity_pub/mrf/policy.ex b/lib/pleroma/web/activity_pub/mrf/policy.ex
index 0ac250c3d..1f34883e7 100644
--- a/lib/pleroma/web/activity_pub/mrf/policy.ex
+++ b/lib/pleroma/web/activity_pub/mrf/policy.ex
@@ -3,8 +3,8 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.Policy do
- @callback filter(Map.t()) :: {:ok | :reject, Map.t()}
- @callback describe() :: {:ok | :error, Map.t()}
+ @callback filter(map()) :: {:ok | :reject, map()}
+ @callback describe() :: {:ok | :error, map()}
@callback config_description() :: %{
optional(:children) => [map()],
key: atom(),
@@ -12,5 +12,6 @@ defmodule Pleroma.Web.ActivityPub.MRF.Policy do
label: String.t(),
description: String.t()
}
- @optional_callbacks config_description: 0
+ @callback history_awareness() :: :auto | :manual
+ @optional_callbacks config_description: 0, history_awareness: 0
end
diff --git a/lib/pleroma/web/activity_pub/mrf/quote_to_link_tag_policy.ex b/lib/pleroma/web/activity_pub/mrf/quote_to_link_tag_policy.ex
new file mode 100644
index 000000000..ac353f03f
--- /dev/null
+++ b/lib/pleroma/web/activity_pub/mrf/quote_to_link_tag_policy.ex
@@ -0,0 +1,49 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2023 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.ActivityPub.MRF.QuoteToLinkTagPolicy do
+ @moduledoc "Force a Link tag for posts quoting another post. (may break outgoing federation of quote posts with older Pleroma versions)"
+ @behaviour Pleroma.Web.ActivityPub.MRF.Policy
+
+ alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes
+
+ require Pleroma.Constants
+
+ @impl Pleroma.Web.ActivityPub.MRF.Policy
+ def filter(%{"object" => %{"quoteUrl" => _} = object} = activity) do
+ {:ok, Map.put(activity, "object", filter_object(object))}
+ end
+
+ @impl Pleroma.Web.ActivityPub.MRF.Policy
+ def filter(object), do: {:ok, object}
+
+ @impl Pleroma.Web.ActivityPub.MRF.Policy
+ def describe, do: {:ok, %{}}
+
+ @impl Pleroma.Web.ActivityPub.MRF.Policy
+ def history_awareness, do: :auto
+
+ defp filter_object(%{"quoteUrl" => quote_url} = object) do
+ tags = object["tag"] || []
+
+ if Enum.any?(tags, fn tag ->
+ CommonFixes.object_link_tag?(tag) and tag["href"] == quote_url
+ end) do
+ object
+ else
+ object
+ |> Map.put(
+ "tag",
+ tags ++
+ [
+ %{
+ "type" => "Link",
+ "mediaType" => Pleroma.Constants.activity_json_canonical_mime_type(),
+ "href" => quote_url
+ }
+ ]
+ )
+ end
+ end
+end
diff --git a/lib/pleroma/web/activity_pub/mrf/simple_policy.ex b/lib/pleroma/web/activity_pub/mrf/simple_policy.ex
index c0c7f3806..829ddeaea 100644
--- a/lib/pleroma/web/activity_pub/mrf/simple_policy.ex
+++ b/lib/pleroma/web/activity_pub/mrf/simple_policy.ex
@@ -40,9 +40,9 @@ defp check_reject(%{host: actor_host} = _actor_info, object) do
defp check_media_removal(
%{host: actor_host} = _actor_info,
- %{"type" => "Create", "object" => %{"attachment" => child_attachment}} = object
+ %{"type" => type, "object" => %{"attachment" => child_attachment}} = object
)
- when length(child_attachment) > 0 do
+ when length(child_attachment) > 0 and type in ["Create", "Update"] do
media_removal =
instance_list(:media_removal)
|> MRF.subdomains_regex()
@@ -63,10 +63,11 @@ defp check_media_removal(_actor_info, object), do: {:ok, object}
defp check_media_nsfw(
%{host: actor_host} = _actor_info,
%{
- "type" => "Create",
+ "type" => type,
"object" => %{} = _child_object
} = object
- ) do
+ )
+ when type in ["Create", "Update"] do
media_nsfw =
instance_list(:media_nsfw)
|> MRF.subdomains_regex()
diff --git a/lib/pleroma/web/activity_pub/mrf/steal_emoji_policy.ex b/lib/pleroma/web/activity_pub/mrf/steal_emoji_policy.ex
index f66c379b5..fa6b595ea 100644
--- a/lib/pleroma/web/activity_pub/mrf/steal_emoji_policy.ex
+++ b/lib/pleroma/web/activity_pub/mrf/steal_emoji_policy.ex
@@ -34,14 +34,17 @@ defp steal_emoji({shortcode, url}, emoji_dir_path) do
|> Path.basename()
|> Path.extname()
- file_path = Path.join(emoji_dir_path, shortcode <> (extension || ".png"))
+ extension = if extension == "", do: ".png", else: extension
+
+ shortcode = Path.basename(shortcode)
+ file_path = Path.join(emoji_dir_path, shortcode <> extension)
case File.write(file_path, response.body) do
:ok ->
shortcode
e ->
- Logger.warn("MRF.StealEmojiPolicy: Failed to write to #{file_path}: #{inspect(e)}")
+ Logger.warning("MRF.StealEmojiPolicy: Failed to write to #{file_path}: #{inspect(e)}")
nil
end
else
@@ -53,7 +56,7 @@ defp steal_emoji({shortcode, url}, emoji_dir_path) do
end
else
e ->
- Logger.warn("MRF.StealEmojiPolicy: Failed to fetch #{url}: #{inspect(e)}")
+ Logger.warning("MRF.StealEmojiPolicy: Failed to fetch #{url}: #{inspect(e)}")
nil
end
end
@@ -76,6 +79,7 @@ def filter(%{"object" => %{"emoji" => foreign_emojis, "actor" => actor}} = messa
new_emojis =
foreign_emojis
|> Enum.reject(fn {shortcode, _url} -> shortcode in installed_emoji end)
+ |> Enum.reject(fn {shortcode, _url} -> String.contains?(shortcode, ["/", "\\"]) end)
|> Enum.filter(fn {shortcode, _url} ->
reject_emoji? =
[:mrf_steal_emoji, :rejected_shortcodes]
diff --git a/lib/pleroma/web/activity_pub/mrf/tag_policy.ex b/lib/pleroma/web/activity_pub/mrf/tag_policy.ex
index 10072b693..73760ca8f 100644
--- a/lib/pleroma/web/activity_pub/mrf/tag_policy.ex
+++ b/lib/pleroma/web/activity_pub/mrf/tag_policy.ex
@@ -27,22 +27,22 @@ defp get_tags(_), do: []
defp process_tag(
"mrf_tag:media-force-nsfw",
%{
- "type" => "Create",
+ "type" => type,
"object" => %{"attachment" => child_attachment}
} = message
)
- when length(child_attachment) > 0 do
+ when length(child_attachment) > 0 and type in ["Create", "Update"] do
{:ok, Kernel.put_in(message, ["object", "sensitive"], true)}
end
defp process_tag(
"mrf_tag:media-strip",
%{
- "type" => "Create",
+ "type" => type,
"object" => %{"attachment" => child_attachment} = object
} = message
)
- when length(child_attachment) > 0 do
+ when length(child_attachment) > 0 and type in ["Create", "Update"] do
object = Map.delete(object, "attachment")
message = Map.put(message, "object", object)
@@ -152,7 +152,7 @@ def filter(%{"object" => target_actor, "type" => "Follow"} = message),
do: filter_message(target_actor, message)
@impl true
- def filter(%{"actor" => actor, "type" => "Create"} = message),
+ def filter(%{"actor" => actor, "type" => type} = message) when type in ["Create", "Update"],
do: filter_message(actor, message)
@impl true
diff --git a/lib/pleroma/web/activity_pub/mrf/utils.ex b/lib/pleroma/web/activity_pub/mrf/utils.ex
new file mode 100644
index 000000000..f2dc9eea9
--- /dev/null
+++ b/lib/pleroma/web/activity_pub/mrf/utils.ex
@@ -0,0 +1,15 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2023 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.ActivityPub.MRF.Utils do
+ @spec describe_regex_or_string(String.t() | Regex.t()) :: String.t()
+ def describe_regex_or_string(pattern) do
+ # This horror is needed to convert regex sigils to strings
+ if not is_binary(pattern) do
+ inspect(pattern)
+ else
+ pattern
+ end
+ end
+end
diff --git a/lib/pleroma/web/activity_pub/object_validator.ex b/lib/pleroma/web/activity_pub/object_validator.ex
index f3e31c931..b3043b93a 100644
--- a/lib/pleroma/web/activity_pub/object_validator.ex
+++ b/lib/pleroma/web/activity_pub/object_validator.ex
@@ -21,7 +21,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do
alias Pleroma.Web.ActivityPub.ObjectValidators.AnnounceValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.AnswerValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.ArticleNotePageValidator
- alias Pleroma.Web.ActivityPub.ObjectValidators.AudioVideoValidator
+ alias Pleroma.Web.ActivityPub.ObjectValidators.AudioImageVideoValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.BlockValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.ChatMessageValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.CreateChatMessageValidator
@@ -102,9 +102,9 @@ def validate(
%{"type" => "Create", "object" => %{"type" => objtype} = object} = create_activity,
meta
)
- when objtype in ~w[Question Answer Audio Video Event Article Note Page] do
- with {:ok, object_data} <- cast_and_apply(object),
- meta = Keyword.put(meta, :object_data, object_data |> stringify_keys),
+ when objtype in ~w[Question Answer Audio Video Image Event Article Note Page] do
+ with {:ok, object_data} <- cast_and_apply_and_stringify_with_history(object),
+ meta = Keyword.put(meta, :object_data, object_data),
{:ok, create_activity} <-
create_activity
|> CreateGenericValidator.cast_and_validate(meta)
@@ -115,32 +115,70 @@ def validate(
end
def validate(%{"type" => type} = object, meta)
- when type in ~w[Event Question Audio Video Article Note Page] do
+ when type in ~w[Event Question Audio Video Image Article Note Page] do
validator =
case type do
"Event" -> EventValidator
"Question" -> QuestionValidator
- "Audio" -> AudioVideoValidator
- "Video" -> AudioVideoValidator
+ "Audio" -> AudioImageVideoValidator
+ "Video" -> AudioImageVideoValidator
+ "Image" -> AudioImageVideoValidator
"Article" -> ArticleNotePageValidator
"Note" -> ArticleNotePageValidator
"Page" -> ArticleNotePageValidator
end
with {:ok, object} <-
- object
- |> validator.cast_and_validate()
- |> Ecto.Changeset.apply_action(:insert) do
- object = stringify_keys(object)
+ do_separate_with_history(object, fn object ->
+ with {:ok, object} <-
+ object
+ |> validator.cast_and_validate()
+ |> Ecto.Changeset.apply_action(:insert) do
+ object = stringify_keys(object)
- # Insert copy of hashtags as strings for the non-hashtag table indexing
- tag = (object["tag"] || []) ++ Object.hashtags(%Object{data: object})
- object = Map.put(object, "tag", tag)
+ # Insert copy of hashtags as strings for the non-hashtag table indexing
+ tag = (object["tag"] || []) ++ Object.hashtags(%Object{data: object})
+ object = Map.put(object, "tag", tag)
+ {:ok, object}
+ end
+ end) do
{:ok, object, meta}
end
end
+ def validate(
+ %{"type" => "Update", "object" => %{"type" => objtype} = object} = update_activity,
+ meta
+ )
+ when objtype in ~w[Question Answer Audio Video Event Article Note Page] do
+ with {_, false} <- {:local, Access.get(meta, :local, false)},
+ {_, {:ok, object_data, _}} <- {:object_validation, validate(object, meta)},
+ meta = Keyword.put(meta, :object_data, object_data),
+ {:ok, update_activity} <-
+ update_activity
+ |> UpdateValidator.cast_and_validate()
+ |> Ecto.Changeset.apply_action(:insert) do
+ update_activity = stringify_keys(update_activity)
+ {:ok, update_activity, meta}
+ else
+ {:local, _} ->
+ with {:ok, object} <-
+ update_activity
+ |> UpdateValidator.cast_and_validate()
+ |> Ecto.Changeset.apply_action(:insert) do
+ object = stringify_keys(object)
+ {:ok, object, meta}
+ end
+
+ {:object_validation, e} ->
+ e
+
+ {:error, %Ecto.Changeset{} = e} ->
+ {:error, e}
+ end
+ end
+
def validate(%{"type" => type} = object, meta)
when type in ~w[Accept Reject Follow Update Like EmojiReact Announce
ChatMessage Answer] do
@@ -178,6 +216,15 @@ def validate(%{"type" => type} = object, meta) when type in ~w(Add Remove) do
def validate(o, m), do: {:error, {:validator_not_set, {o, m}}}
+ def cast_and_apply_and_stringify_with_history(object) do
+ do_separate_with_history(object, fn object ->
+ with {:ok, object_data} <- cast_and_apply(object),
+ object_data <- object_data |> stringify_keys() do
+ {:ok, object_data}
+ end
+ end)
+ end
+
def cast_and_apply(%{"type" => "ChatMessage"} = object) do
ChatMessageValidator.cast_and_apply(object)
end
@@ -190,8 +237,8 @@ def cast_and_apply(%{"type" => "Answer"} = object) do
AnswerValidator.cast_and_apply(object)
end
- def cast_and_apply(%{"type" => type} = object) when type in ~w[Audio Video] do
- AudioVideoValidator.cast_and_apply(object)
+ def cast_and_apply(%{"type" => type} = object) when type in ~w[Audio Image Video] do
+ AudioImageVideoValidator.cast_and_apply(object)
end
def cast_and_apply(%{"type" => "Event"} = object) do
@@ -204,8 +251,7 @@ def cast_and_apply(%{"type" => type} = object) when type in ~w[Article Note Page
def cast_and_apply(o), do: {:error, {:validator_not_set, o}}
- # is_struct/1 appears in Elixir 1.11
- def stringify_keys(%{__struct__: _} = object) do
+ def stringify_keys(object) when is_struct(object) do
object
|> Map.from_struct()
|> stringify_keys
@@ -236,4 +282,54 @@ def fetch_actor_and_object(object) do
Object.normalize(object["object"], fetch: true)
:ok
end
+
+ defp for_each_history_item(
+ %{"type" => "OrderedCollection", "orderedItems" => items} = history,
+ object,
+ fun
+ ) do
+ processed_items =
+ Enum.map(items, fn item ->
+ with item <- Map.put(item, "id", object["id"]),
+ {:ok, item} <- fun.(item) do
+ item
+ else
+ _ -> nil
+ end
+ end)
+
+ if Enum.all?(processed_items, &(not is_nil(&1))) do
+ {:ok, Map.put(history, "orderedItems", processed_items)}
+ else
+ {:error, :invalid_history}
+ end
+ end
+
+ defp for_each_history_item(nil, _object, _fun) do
+ {:ok, nil}
+ end
+
+ defp for_each_history_item(_, _object, _fun) do
+ {:error, :invalid_history}
+ end
+
+ # fun is (object -> {:ok, validated_object_with_string_keys})
+ defp do_separate_with_history(object, fun) do
+ with history <- object["formerRepresentations"],
+ object <- Map.drop(object, ["formerRepresentations"]),
+ {_, {:ok, object}} <- {:main_body, fun.(object)},
+ {_, {:ok, history}} <- {:history_items, for_each_history_item(history, object, fun)} do
+ object =
+ if history do
+ Map.put(object, "formerRepresentations", history)
+ else
+ object
+ end
+
+ {:ok, object}
+ else
+ {:main_body, e} -> e
+ {:history_items, e} -> e
+ end
+ end
end
diff --git a/lib/pleroma/web/activity_pub/object_validators/add_remove_validator.ex b/lib/pleroma/web/activity_pub/object_validators/add_remove_validator.ex
index 5202db7f1..db3259550 100644
--- a/lib/pleroma/web/activity_pub/object_validators/add_remove_validator.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/add_remove_validator.ex
@@ -73,6 +73,7 @@ defp maybe_refetch_user(%User{featured_address: address} = user) when is_binary(
end
defp maybe_refetch_user(%User{ap_id: ap_id}) do
- Pleroma.Web.ActivityPub.Transmogrifier.upgrade_user_from_ap_id(ap_id)
+ # Maybe it could use User.get_or_fetch_by_ap_id to avoid refreshing too often
+ User.fetch_by_ap_id(ap_id)
end
end
diff --git a/lib/pleroma/web/activity_pub/object_validators/announce_validator.ex b/lib/pleroma/web/activity_pub/object_validators/announce_validator.ex
index c2c7ba1a8..d0218583e 100644
--- a/lib/pleroma/web/activity_pub/object_validators/announce_validator.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/announce_validator.ex
@@ -82,7 +82,7 @@ defp validate_announcable(cng) do
object when is_binary(object) <- get_field(cng, :object),
%User{} = actor <- User.get_cached_by_ap_id(actor),
%Object{} = object <- Object.get_cached_by_ap_id(object),
- false <- Visibility.is_public?(object) do
+ false <- Visibility.public?(object) do
same_actor = object.data["actor"] == actor.ap_id
recipients = get_field(cng, :to) ++ get_field(cng, :cc)
local_public = Utils.as_local_public()
diff --git a/lib/pleroma/web/activity_pub/object_validators/article_note_page_validator.ex b/lib/pleroma/web/activity_pub/object_validators/article_note_page_validator.ex
index ca335bc8a..1b5b2e8fb 100644
--- a/lib/pleroma/web/activity_pub/object_validators/article_note_page_validator.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/article_note_page_validator.ex
@@ -49,7 +49,10 @@ defp fix_url(%{"url" => url} = data) when is_bitstring(url), do: data
defp fix_url(%{"url" => url} = data) when is_map(url), do: Map.put(data, "url", url["href"])
defp fix_url(data), do: data
- defp fix_tag(%{"tag" => tag} = data) when is_list(tag), do: data
+ defp fix_tag(%{"tag" => tag} = data) when is_list(tag) do
+ Map.put(data, "tag", Enum.filter(tag, &is_map/1))
+ end
+
defp fix_tag(%{"tag" => tag} = data) when is_map(tag), do: Map.put(data, "tag", [tag])
defp fix_tag(data), do: Map.drop(data, ["tag"])
@@ -60,11 +63,19 @@ defp fix_replies(%{"replies" => %{"first" => %{"items" => replies}}} = data)
defp fix_replies(%{"replies" => %{"items" => replies}} = data) when is_list(replies),
do: Map.put(data, "replies", replies)
- defp fix_replies(%{"replies" => replies} = data) when is_bitstring(replies),
+ # TODO: Pleroma does not have any support for Collections at the moment.
+ # If the `replies` field is not something the ObjectID validator can handle,
+ # the activity/object would be rejected, which is bad behavior.
+ defp fix_replies(%{"replies" => replies} = data) when not is_list(replies),
do: Map.drop(data, ["replies"])
defp fix_replies(data), do: data
+ def fix_attachments(%{"attachment" => attachment} = data) when is_map(attachment),
+ do: Map.put(data, "attachment", [attachment])
+
+ def fix_attachments(data), do: data
+
defp fix(data) do
data
|> CommonFixes.fix_actor()
@@ -72,6 +83,8 @@ defp fix(data) do
|> fix_url()
|> fix_tag()
|> fix_replies()
+ |> fix_attachments()
+ |> CommonFixes.fix_quote_url()
|> Transmogrifier.fix_emoji()
|> Transmogrifier.fix_content_map()
end
@@ -88,7 +101,7 @@ def changeset(struct, data) do
defp validate_data(data_cng) do
data_cng
|> validate_inclusion(:type, ["Article", "Note", "Page"])
- |> validate_required([:id, :actor, :attributedTo, :type, :context, :context_id])
+ |> validate_required([:id, :actor, :attributedTo, :type, :context])
|> CommonValidations.validate_any_presence([:cc, :to])
|> CommonValidations.validate_fields_match([:actor, :attributedTo])
|> CommonValidations.validate_actor_presence()
diff --git a/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex b/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex
index 8b641d88d..72975f348 100644
--- a/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex
@@ -11,13 +11,14 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator do
@primary_key false
embedded_schema do
- field(:type, :string)
+ field(:id, :string)
+ field(:type, :string, default: "Link")
field(:mediaType, ObjectValidators.MIME, default: "application/octet-stream")
field(:name, :string)
field(:blurhash, :string)
embeds_many :url, UrlObjectValidator, primary_key: false do
- field(:type, :string)
+ field(:type, :string, default: "Link")
field(:href, ObjectValidators.Uri)
field(:mediaType, ObjectValidators.MIME, default: "application/octet-stream")
field(:width, :integer)
@@ -43,10 +44,10 @@ def changeset(struct, data) do
|> fix_url()
struct
- |> cast(data, [:type, :mediaType, :name, :blurhash])
- |> cast_embed(:url, with: &url_changeset/2)
+ |> cast(data, [:id, :type, :mediaType, :name, :blurhash])
+ |> cast_embed(:url, with: &url_changeset/2, required: true)
|> validate_inclusion(:type, ~w[Link Document Audio Image Video])
- |> validate_required([:type, :mediaType, :url])
+ |> validate_required([:type, :mediaType])
end
def url_changeset(struct, data) do
@@ -59,7 +60,7 @@ def url_changeset(struct, data) do
end
def fix_media_type(data) do
- Map.put_new(data, "mediaType", data["mimeType"])
+ Map.put_new(data, "mediaType", data["mimeType"] || "application/octet-stream")
end
defp handle_href(href, mediaType, data) do
@@ -90,6 +91,6 @@ defp fix_url(data) do
defp validate_data(cng) do
cng
|> validate_inclusion(:type, ~w[Document Audio Image Video])
- |> validate_required([:mediaType, :url, :type])
+ |> validate_required([:mediaType, :type])
end
end
diff --git a/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex b/lib/pleroma/web/activity_pub/object_validators/audio_image_video_validator.ex
similarity index 84%
rename from lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex
rename to lib/pleroma/web/activity_pub/object_validators/audio_image_video_validator.ex
index 432bd9039..65ac6bb93 100644
--- a/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/audio_image_video_validator.ex
@@ -2,7 +2,7 @@
# Copyright © 2017-2022 Pleroma Authors
# SPDX-License-Identifier: AGPL-3.0-only
-defmodule Pleroma.Web.ActivityPub.ObjectValidators.AudioVideoValidator do
+defmodule Pleroma.Web.ActivityPub.ObjectValidators.AudioImageVideoValidator do
use Ecto.Schema
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes
@@ -55,9 +55,14 @@ defp find_attachment(url) do
url
|> Enum.concat(mpeg_url["tag"] || [])
|> Enum.find(fn
- %{"mediaType" => mime_type} -> String.starts_with?(mime_type, ["video/", "audio/"])
- %{"mimeType" => mime_type} -> String.starts_with?(mime_type, ["video/", "audio/"])
- _ -> false
+ %{"mediaType" => mime_type} ->
+ String.starts_with?(mime_type, ["video/", "audio/", "image/"])
+
+ %{"mimeType" => mime_type} ->
+ String.starts_with?(mime_type, ["video/", "audio/", "image/"])
+
+ _ ->
+ false
end)
end
@@ -94,6 +99,7 @@ defp fix(data) do
data
|> CommonFixes.fix_actor()
|> CommonFixes.fix_object_defaults()
+ |> CommonFixes.fix_quote_url()
|> Transmogrifier.fix_emoji()
|> fix_url()
|> fix_content()
@@ -104,14 +110,14 @@ def changeset(struct, data) do
struct
|> cast(data, __schema__(:fields) -- [:attachment, :tag])
- |> cast_embed(:attachment)
+ |> cast_embed(:attachment, required: true)
|> cast_embed(:tag)
end
defp validate_data(data_cng) do
data_cng
- |> validate_inclusion(:type, ["Audio", "Video"])
- |> validate_required([:id, :actor, :attributedTo, :type, :context, :attachment])
+ |> validate_inclusion(:type, ~w[Audio Image Video])
+ |> validate_required([:id, :actor, :attributedTo, :type, :context])
|> CommonValidations.validate_any_presence([:cc, :to])
|> CommonValidations.validate_fields_match([:actor, :attributedTo])
|> CommonValidations.validate_actor_presence()
diff --git a/lib/pleroma/web/activity_pub/object_validators/chat_message_validator.ex b/lib/pleroma/web/activity_pub/object_validators/chat_message_validator.ex
index efae48cae..09e25be89 100644
--- a/lib/pleroma/web/activity_pub/object_validators/chat_message_validator.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/chat_message_validator.ex
@@ -57,6 +57,11 @@ def fix_attachment(%{"attachment" => [attachment | _]} = data) do
|> Map.put("attachment", attachment)
end
+ def fix_attachment(%{"attachment" => attachment} = data) when attachment == [] do
+ data
+ |> Map.drop(["attachment"])
+ end
+
def fix_attachment(data), do: data
def changeset(struct, data) do
diff --git a/lib/pleroma/web/activity_pub/object_validators/common_fields.ex b/lib/pleroma/web/activity_pub/object_validators/common_fields.ex
index 8e768ffbf..1a5d02601 100644
--- a/lib/pleroma/web/activity_pub/object_validators/common_fields.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/common_fields.ex
@@ -27,12 +27,13 @@ defmacro activity_fields do
end
end
- # All objects except Answer and CHatMessage
+ # All objects except Answer and ChatMessage
defmacro object_fields do
quote bind_quoted: binding() do
field(:content, :string)
field(:published, ObjectValidators.DateTime)
+ field(:updated, ObjectValidators.DateTime)
field(:emoji, ObjectValidators.Emoji, default: %{})
embeds_many(:attachment, AttachmentValidator)
end
@@ -51,15 +52,15 @@ defmacro status_object_fields do
field(:summary, :string)
field(:context, :string)
- # short identifier for PleromaFE to group statuses by context
- field(:context_id, :integer)
field(:sensitive, :boolean, default: false)
field(:replies_count, :integer, default: 0)
field(:like_count, :integer, default: 0)
field(:announcement_count, :integer, default: 0)
+ field(:quotes_count, :integer, default: 0)
field(:inReplyTo, ObjectValidators.ObjectID)
- field(:url, ObjectValidators.Uri)
+ field(:quoteUrl, ObjectValidators.ObjectID)
+ field(:url, ObjectValidators.BareUri)
field(:likes, {:array, ObjectValidators.ObjectID}, default: [])
field(:announcements, {:array, ObjectValidators.ObjectID}, default: [])
diff --git a/lib/pleroma/web/activity_pub/object_validators/common_fixes.ex b/lib/pleroma/web/activity_pub/object_validators/common_fixes.ex
index 4f8c083eb..4699029d4 100644
--- a/lib/pleroma/web/activity_pub/object_validators/common_fixes.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/common_fixes.ex
@@ -4,12 +4,15 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes do
alias Pleroma.EctoType.ActivityPub.ObjectValidators
+ alias Pleroma.Maps
alias Pleroma.Object
alias Pleroma.Object.Containment
alias Pleroma.User
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.ActivityPub.Utils
+ require Pleroma.Constants
+
def cast_and_filter_recipients(message, field, follower_collection, field_fallback \\ []) do
{:ok, data} = ObjectValidators.Recipients.cast(message[field] || field_fallback)
@@ -22,14 +25,17 @@ def cast_and_filter_recipients(message, field, follower_collection, field_fallba
end
def fix_object_defaults(data) do
- %{data: %{"id" => context}, id: context_id} =
- Utils.create_context(data["context"] || data["conversation"])
+ data = Maps.filter_empty_values(data)
+
+ context =
+ Utils.maybe_create_context(
+ data["context"] || data["conversation"] || data["inReplyTo"] || data["id"]
+ )
%User{follower_address: follower_collection} = User.get_cached_by_ap_id(data["attributedTo"])
data
|> Map.put("context", context)
- |> Map.put("context_id", context_id)
|> cast_and_filter_recipients("to", follower_collection)
|> cast_and_filter_recipients("cc", follower_collection)
|> cast_and_filter_recipients("bto", follower_collection)
@@ -75,4 +81,48 @@ def fix_object_action_recipients(data, %Object{data: %{"actor" => actor}}) do
Map.put(data, "to", to)
end
+
+ def fix_quote_url(%{"quoteUrl" => _quote_url} = data), do: data
+
+ # Fedibird
+ # https://github.com/fedibird/mastodon/commit/dbd7ae6cf58a92ec67c512296b4daaea0d01e6ac
+ def fix_quote_url(%{"quoteUri" => quote_url} = data) do
+ Map.put(data, "quoteUrl", quote_url)
+ end
+
+ # Old Fedibird (bug)
+ # https://github.com/fedibird/mastodon/issues/9
+ def fix_quote_url(%{"quoteURL" => quote_url} = data) do
+ Map.put(data, "quoteUrl", quote_url)
+ end
+
+ # Misskey fallback
+ def fix_quote_url(%{"_misskey_quote" => quote_url} = data) do
+ Map.put(data, "quoteUrl", quote_url)
+ end
+
+ def fix_quote_url(%{"tag" => [_ | _] = tags} = data) do
+ tag = Enum.find(tags, &object_link_tag?/1)
+
+ if not is_nil(tag) do
+ data
+ |> Map.put("quoteUrl", tag["href"])
+ else
+ data
+ end
+ end
+
+ def fix_quote_url(data), do: data
+
+ # https://codeberg.org/fediverse/fep/src/branch/main/fep/e232/fep-e232.md
+ def object_link_tag?(%{
+ "type" => "Link",
+ "mediaType" => media_type,
+ "href" => href
+ })
+ when media_type in Pleroma.Constants.activity_json_mime_types() and is_binary(href) do
+ true
+ end
+
+ def object_link_tag?(_), do: false
end
diff --git a/lib/pleroma/web/activity_pub/object_validators/common_validations.ex b/lib/pleroma/web/activity_pub/object_validators/common_validations.ex
index 704b3abc9..1c5b1a059 100644
--- a/lib/pleroma/web/activity_pub/object_validators/common_validations.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/common_validations.ex
@@ -136,11 +136,11 @@ def same_domain?(cng, fields \\ [:actor, :object]) do
# This figures out if a user is able to create, delete or modify something
# based on the domain and superuser status
- @spec validate_modification_rights(Ecto.Changeset.t()) :: Ecto.Changeset.t()
- def validate_modification_rights(cng) do
+ @spec validate_modification_rights(Ecto.Changeset.t(), atom()) :: Ecto.Changeset.t()
+ def validate_modification_rights(cng, privilege) do
actor = User.get_cached_by_ap_id(get_field(cng, :actor))
- if User.superuser?(actor) || same_domain?(cng) do
+ if User.privileged?(actor, privilege) || same_domain?(cng) do
cng
else
cng
diff --git a/lib/pleroma/web/activity_pub/object_validators/create_generic_validator.ex b/lib/pleroma/web/activity_pub/object_validators/create_generic_validator.ex
index c9a621cb1..2395abfd4 100644
--- a/lib/pleroma/web/activity_pub/object_validators/create_generic_validator.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/create_generic_validator.ex
@@ -75,7 +75,7 @@ def fix(data, meta) do
data
|> CommonFixes.fix_actor()
- |> Map.put_new("context", object["context"])
+ |> Map.put("context", object["context"])
|> fix_addressing(object)
end
diff --git a/lib/pleroma/web/activity_pub/object_validators/delete_validator.ex b/lib/pleroma/web/activity_pub/object_validators/delete_validator.ex
index 035fd5bc9..4d8502ada 100644
--- a/lib/pleroma/web/activity_pub/object_validators/delete_validator.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/delete_validator.ex
@@ -61,7 +61,7 @@ defp validate_data(cng) do
|> validate_required([:id, :type, :actor, :to, :cc, :object])
|> validate_inclusion(:type, ["Delete"])
|> validate_delete_actor(:actor)
- |> validate_modification_rights()
+ |> validate_modification_rights(:messages_delete)
|> validate_object_or_user_presence(allowed_types: @deletable_types)
|> add_deleted_activity_id()
end
diff --git a/lib/pleroma/web/activity_pub/object_validators/emoji_react_validator.ex b/lib/pleroma/web/activity_pub/object_validators/emoji_react_validator.ex
index ed072b888..65ba047e6 100644
--- a/lib/pleroma/web/activity_pub/object_validators/emoji_react_validator.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/emoji_react_validator.ex
@@ -5,8 +5,10 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.EmojiReactValidator do
use Ecto.Schema
+ alias Pleroma.Emoji
alias Pleroma.Object
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes
+ alias Pleroma.Web.ActivityPub.ObjectValidators.TagValidator
import Ecto.Changeset
import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
@@ -19,6 +21,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.EmojiReactValidator do
import Elixir.Pleroma.Web.ActivityPub.ObjectValidators.CommonFields
message_fields()
activity_fields()
+ embeds_many(:tag, TagValidator)
end
end
@@ -43,32 +46,75 @@ def cast_data(data) do
def changeset(struct, data) do
struct
- |> cast(data, __schema__(:fields))
+ |> cast(data, __schema__(:fields) -- [:tag])
+ |> cast_embed(:tag)
end
defp fix(data) do
data =
data
+ |> fix_emoji_qualification()
|> CommonFixes.fix_actor()
|> CommonFixes.fix_activity_addressing()
- with %Object{} = object <- Object.normalize(data["object"]) do
- data
- |> CommonFixes.fix_activity_context(object)
- |> CommonFixes.fix_object_action_recipients(object)
- else
- _ -> data
+ data = Map.put_new(data, "tag", [])
+
+ case Object.normalize(data["object"]) do
+ %Object{} = object ->
+ data
+ |> CommonFixes.fix_activity_context(object)
+ |> CommonFixes.fix_object_action_recipients(object)
+
+ _ ->
+ data
end
end
+ defp fix_emoji_qualification(%{"content" => emoji} = data) do
+ new_emoji = Pleroma.Emoji.fully_qualify_emoji(emoji)
+
+ cond do
+ Pleroma.Emoji.unicode?(emoji) ->
+ data
+
+ Pleroma.Emoji.unicode?(new_emoji) ->
+ data |> Map.put("content", new_emoji)
+
+ true ->
+ data
+ end
+ end
+
+ defp fix_emoji_qualification(data), do: data
+
defp validate_emoji(cng) do
content = get_field(cng, :content)
- if Pleroma.Emoji.is_unicode_emoji?(content) do
+ if Emoji.unicode?(content) || Emoji.custom?(content) do
cng
else
cng
- |> add_error(:content, "must be a single character emoji")
+ |> add_error(:content, "is not a valid emoji")
+ end
+ end
+
+ defp maybe_validate_tag_presence(cng) do
+ content = get_field(cng, :content)
+
+ if Emoji.unicode?(content) do
+ cng
+ else
+ tag = get_field(cng, :tag)
+ emoji_name = Emoji.maybe_strip_name(content)
+
+ case tag do
+ [%{name: ^emoji_name, type: "Emoji", icon: %{url: _}}] ->
+ cng
+
+ _ ->
+ cng
+ |> add_error(:tag, "does not contain an Emoji tag")
+ end
end
end
@@ -79,5 +125,6 @@ defp validate_data(data_cng) do
|> validate_actor_presence()
|> validate_object_presence()
|> validate_emoji()
+ |> maybe_validate_tag_presence()
end
end
diff --git a/lib/pleroma/web/activity_pub/object_validators/event_validator.ex b/lib/pleroma/web/activity_pub/object_validators/event_validator.ex
index 0e99f2037..ab204f69a 100644
--- a/lib/pleroma/web/activity_pub/object_validators/event_validator.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/event_validator.ex
@@ -62,7 +62,7 @@ def changeset(struct, data) do
defp validate_data(data_cng) do
data_cng
|> validate_inclusion(:type, ["Event"])
- |> validate_required([:id, :actor, :attributedTo, :type, :context, :context_id])
+ |> validate_required([:id, :actor, :attributedTo, :type, :context])
|> CommonValidations.validate_any_presence([:cc, :to])
|> CommonValidations.validate_fields_match([:actor, :attributedTo])
|> CommonValidations.validate_actor_presence()
diff --git a/lib/pleroma/web/activity_pub/object_validators/question_options_validator.ex b/lib/pleroma/web/activity_pub/object_validators/question_options_validator.ex
index 541945fa4..8d7f7b9fa 100644
--- a/lib/pleroma/web/activity_pub/object_validators/question_options_validator.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/question_options_validator.ex
@@ -14,10 +14,10 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.QuestionOptionsValidator do
embeds_one :replies, Replies, primary_key: false do
field(:totalItems, :integer)
- field(:type, :string)
+ field(:type, :string, default: "Collection")
end
- field(:type, :string)
+ field(:type, :string, default: "Note")
end
def changeset(struct, data) do
diff --git a/lib/pleroma/web/activity_pub/object_validators/question_validator.ex b/lib/pleroma/web/activity_pub/object_validators/question_validator.ex
index 9412be4bc..7f9d4d648 100644
--- a/lib/pleroma/web/activity_pub/object_validators/question_validator.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/question_validator.ex
@@ -29,6 +29,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.QuestionValidator do
field(:closed, ObjectValidators.DateTime)
field(:voters, {:array, ObjectValidators.ObjectID}, default: [])
+ field(:nonAnonymous, :boolean)
embeds_many(:anyOf, QuestionOptionsValidator)
embeds_many(:oneOf, QuestionOptionsValidator)
end
@@ -62,6 +63,7 @@ defp fix(data) do
data
|> CommonFixes.fix_actor()
|> CommonFixes.fix_object_defaults()
+ |> CommonFixes.fix_quote_url()
|> Transmogrifier.fix_emoji()
|> fix_closed()
end
@@ -80,7 +82,7 @@ def changeset(struct, data) do
defp validate_data(data_cng) do
data_cng
|> validate_inclusion(:type, ["Question"])
- |> validate_required([:id, :actor, :attributedTo, :type, :context, :context_id])
+ |> validate_required([:id, :actor, :attributedTo, :type, :context])
|> CommonValidations.validate_any_presence([:cc, :to])
|> CommonValidations.validate_fields_match([:actor, :attributedTo])
|> CommonValidations.validate_actor_presence()
diff --git a/lib/pleroma/web/activity_pub/object_validators/tag_validator.ex b/lib/pleroma/web/activity_pub/object_validators/tag_validator.ex
index 9f15f1981..47cf7b415 100644
--- a/lib/pleroma/web/activity_pub/object_validators/tag_validator.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/tag_validator.ex
@@ -9,15 +9,20 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.TagValidator do
import Ecto.Changeset
+ require Pleroma.Constants
+
@primary_key false
embedded_schema do
# Common
field(:type, :string)
field(:name, :string)
- # Mention, Hashtag
+ # Mention, Hashtag, Link
field(:href, ObjectValidators.Uri)
+ # Link
+ field(:mediaType, :string)
+
# Emoji
embeds_one :icon, IconObjectValidator, primary_key: false do
field(:type, :string)
@@ -68,6 +73,19 @@ def changeset(struct, %{"type" => "Emoji"} = data) do
|> validate_required([:type, :name, :icon])
end
+ def changeset(struct, %{"type" => "Link"} = data) do
+ struct
+ |> cast(data, [:type, :name, :mediaType, :href])
+ |> validate_inclusion(:mediaType, Pleroma.Constants.activity_json_mime_types())
+ |> validate_required([:type, :href, :mediaType])
+ end
+
+ def changeset(struct, %{"type" => _} = data) do
+ struct
+ |> cast(data, [])
+ |> Map.put(:action, :ignore)
+ end
+
def icon_changeset(struct, data) do
struct
|> cast(data, [:type, :url])
diff --git a/lib/pleroma/web/activity_pub/object_validators/update_validator.ex b/lib/pleroma/web/activity_pub/object_validators/update_validator.ex
index a5def312e..1e940a400 100644
--- a/lib/pleroma/web/activity_pub/object_validators/update_validator.ex
+++ b/lib/pleroma/web/activity_pub/object_validators/update_validator.ex
@@ -51,7 +51,9 @@ def validate_updating_rights(cng) do
with actor = get_field(cng, :actor),
object = get_field(cng, :object),
{:ok, object_id} <- ObjectValidators.ObjectID.cast(object),
- true <- actor == object_id do
+ actor_uri <- URI.parse(actor),
+ object_uri <- URI.parse(object_id),
+ true <- actor_uri.host == object_uri.host do
cng
else
_e ->
diff --git a/lib/pleroma/web/activity_pub/pipeline.ex b/lib/pleroma/web/activity_pub/pipeline.ex
index ca8653ab1..40184bd97 100644
--- a/lib/pleroma/web/activity_pub/pipeline.ex
+++ b/lib/pleroma/web/activity_pub/pipeline.ex
@@ -62,7 +62,7 @@ defp maybe_federate(%Activity{} = activity, meta) do
with {:ok, local} <- Keyword.fetch(meta, :local) do
do_not_federate = meta[:do_not_federate] || !config().get([:instance, :federating])
- if !do_not_federate and local and not Visibility.is_local_public?(activity) do
+ if !do_not_federate and local and not Visibility.local_public?(activity) do
activity =
if object = Keyword.get(meta, :object_data) do
%{activity | data: Map.put(activity.data, "object", object)}
diff --git a/lib/pleroma/web/activity_pub/publisher.ex b/lib/pleroma/web/activity_pub/publisher.ex
index 6c1ba76a3..a42b4844e 100644
--- a/lib/pleroma/web/activity_pub/publisher.ex
+++ b/lib/pleroma/web/activity_pub/publisher.ex
@@ -13,23 +13,60 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
alias Pleroma.User
alias Pleroma.Web.ActivityPub.Relay
alias Pleroma.Web.ActivityPub.Transmogrifier
+ alias Pleroma.Workers.PublisherWorker
require Pleroma.Constants
import Pleroma.Web.ActivityPub.Visibility
- @behaviour Pleroma.Web.Federator.Publisher
-
require Logger
@moduledoc """
ActivityPub outgoing federation module.
"""
+ @doc """
+ Enqueue publishing a single activity.
+ """
+ @spec enqueue_one(map(), Keyword.t()) :: {:ok, %Oban.Job{}}
+ def enqueue_one(%{} = params, worker_args \\ []) do
+ PublisherWorker.enqueue(
+ "publish_one",
+ %{"params" => params},
+ worker_args
+ )
+ end
+
+ @doc """
+ Gathers a set of remote users given an IR envelope.
+ """
+ def remote_users(%User{id: user_id}, %{data: %{"to" => to} = data}) do
+ cc = Map.get(data, "cc", [])
+
+ bcc =
+ data
+ |> Map.get("bcc", [])
+ |> Enum.reduce([], fn ap_id, bcc ->
+ case Pleroma.List.get_by_ap_id(ap_id) do
+ %Pleroma.List{user_id: ^user_id} = list ->
+ {:ok, following} = Pleroma.List.get_following(list)
+ bcc ++ Enum.map(following, & &1.ap_id)
+
+ _ ->
+ bcc
+ end
+ end)
+
+ [to, cc, bcc]
+ |> Enum.concat()
+ |> Enum.map(&User.get_cached_by_ap_id/1)
+ |> Enum.filter(fn user -> user && !user.local end)
+ end
+
@doc """
Determine if an activity can be represented by running it through Transmogrifier.
"""
- def is_representable?(%Activity{} = activity) do
+ def representable?(%Activity{} = activity) do
with {:ok, _data} <- Transmogrifier.prepare_outgoing(activity.data) do
true
else
@@ -80,9 +117,27 @@ def publish_one(%{inbox: inbox, json: json, actor: %User{} = actor, id: id} = pa
result
else
- {_post_result, response} ->
+ {_post_result, %{status: code} = response} = e ->
unless params[:unreachable_since], do: Instances.set_unreachable(inbox)
- {:error, response}
+ Logger.metadata(activity: id, inbox: inbox, status: code)
+ Logger.error("Publisher failed to inbox #{inbox} with status #{code}")
+
+ case response do
+ %{status: 403} -> {:discard, :forbidden}
+ %{status: 404} -> {:discard, :not_found}
+ %{status: 410} -> {:discard, :not_found}
+ _ -> {:error, e}
+ end
+
+ {:error, :pool_full} ->
+ Logger.debug("Publisher snoozing worker job due to full connection pool")
+ {:snooze, 30}
+
+ e ->
+ unless params[:unreachable_since], do: Instances.set_unreachable(inbox)
+ Logger.metadata(activity: id, inbox: inbox)
+ Logger.error("Publisher failed to inbox #{inbox} #{inspect(e)}")
+ {:error, e}
end
end
@@ -103,22 +158,21 @@ defp signature_host(%URI{port: port, scheme: scheme, host: host}) do
end
end
- defp should_federate?(inbox, public) do
- if public do
- true
- else
- %{host: host} = URI.parse(inbox)
+ def should_federate?(nil, _), do: false
+ def should_federate?(_, true), do: true
- quarantined_instances =
- Config.get([:instance, :quarantined_instances], [])
- |> Pleroma.Web.ActivityPub.MRF.instance_list_from_tuples()
- |> Pleroma.Web.ActivityPub.MRF.subdomains_regex()
+ def should_federate?(inbox, _) do
+ %{host: host} = URI.parse(inbox)
- !Pleroma.Web.ActivityPub.MRF.subdomain_match?(quarantined_instances, host)
- end
+ quarantined_instances =
+ Config.get([:instance, :quarantined_instances], [])
+ |> Pleroma.Web.ActivityPub.MRF.instance_list_from_tuples()
+ |> Pleroma.Web.ActivityPub.MRF.subdomains_regex()
+
+ !Pleroma.Web.ActivityPub.MRF.subdomain_match?(quarantined_instances, host)
end
- @spec recipients(User.t(), Activity.t()) :: list(User.t()) | []
+ @spec recipients(User.t(), Activity.t()) :: [[User.t()]]
defp recipients(actor, activity) do
followers =
if actor.follower_address in activity.recipients do
@@ -138,7 +192,10 @@ defp recipients(actor, activity) do
[]
end
- Pleroma.Web.Federator.Publisher.remote_users(actor, activity) ++ followers ++ fetchers
+ mentioned = remote_users(actor, activity)
+ non_mentioned = (followers ++ fetchers) -- mentioned
+
+ [mentioned, non_mentioned]
end
defp get_cc_ap_ids(ap_id, recipients) do
@@ -192,45 +249,52 @@ def determine_inbox(
def publish(%User{} = actor, %{data: %{"bcc" => bcc}} = activity)
when is_list(bcc) and bcc != [] do
- public = is_public?(activity)
+ public = public?(activity)
{:ok, data} = Transmogrifier.prepare_outgoing(activity.data)
- recipients = recipients(actor, activity)
+ [priority_recipients, recipients] = recipients(actor, activity)
inboxes =
- recipients
- |> Enum.filter(&User.ap_enabled?/1)
- |> Enum.map(fn actor -> actor.inbox end)
- |> Enum.filter(fn inbox -> should_federate?(inbox, public) end)
- |> Instances.filter_reachable()
+ [priority_recipients, recipients]
+ |> Enum.map(fn recipients ->
+ recipients
+ |> Enum.map(fn %User{} = user ->
+ determine_inbox(activity, user)
+ end)
+ |> Enum.uniq()
+ |> Enum.filter(fn inbox -> should_federate?(inbox, public) end)
+ |> Instances.filter_reachable()
+ end)
Repo.checkout(fn ->
- Enum.each(inboxes, fn {inbox, unreachable_since} ->
- %User{ap_id: ap_id} = Enum.find(recipients, fn actor -> actor.inbox == inbox end)
+ Enum.each(inboxes, fn inboxes ->
+ Enum.each(inboxes, fn {inbox, unreachable_since} ->
+ %User{ap_id: ap_id} = Enum.find(recipients, fn actor -> actor.inbox == inbox end)
- # Get all the recipients on the same host and add them to cc. Otherwise, a remote
- # instance would only accept a first message for the first recipient and ignore the rest.
- cc = get_cc_ap_ids(ap_id, recipients)
+ # Get all the recipients on the same host and add them to cc. Otherwise, a remote
+ # instance would only accept a first message for the first recipient and ignore the rest.
+ cc = get_cc_ap_ids(ap_id, recipients)
- json =
- data
- |> Map.put("cc", cc)
- |> Jason.encode!()
+ json =
+ data
+ |> Map.put("cc", cc)
+ |> Jason.encode!()
- Pleroma.Web.Federator.Publisher.enqueue_one(__MODULE__, %{
- inbox: inbox,
- json: json,
- actor_id: actor.id,
- id: activity.data["id"],
- unreachable_since: unreachable_since
- })
+ __MODULE__.enqueue_one(%{
+ inbox: inbox,
+ json: json,
+ actor_id: actor.id,
+ id: activity.data["id"],
+ unreachable_since: unreachable_since
+ })
+ end)
end)
end)
end
# Publishes an activity to all relevant peers.
def publish(%User{} = actor, %Activity{} = activity) do
- public = is_public?(activity)
+ public = public?(activity)
if public && Config.get([:instance, :allow_relay]) do
Logger.debug(fn -> "Relaying #{activity.data["id"]} out" end)
@@ -240,26 +304,38 @@ def publish(%User{} = actor, %Activity{} = activity) do
{:ok, data} = Transmogrifier.prepare_outgoing(activity.data)
json = Jason.encode!(data)
- recipients(actor, activity)
- |> Enum.filter(fn user -> User.ap_enabled?(user) end)
- |> Enum.map(fn %User{} = user ->
- determine_inbox(activity, user)
- end)
- |> Enum.uniq()
- |> Enum.filter(fn inbox -> should_federate?(inbox, public) end)
- |> Instances.filter_reachable()
- |> Enum.each(fn {inbox, unreachable_since} ->
- Pleroma.Web.Federator.Publisher.enqueue_one(
- __MODULE__,
- %{
- inbox: inbox,
- json: json,
- actor_id: actor.id,
- id: activity.data["id"],
- unreachable_since: unreachable_since
- }
- )
+ [priority_inboxes, inboxes] =
+ recipients(actor, activity)
+ |> Enum.map(fn recipients ->
+ recipients
+ |> Enum.map(fn %User{} = user ->
+ determine_inbox(activity, user)
+ end)
+ |> Enum.uniq()
+ |> Enum.filter(fn inbox -> should_federate?(inbox, public) end)
+ end)
+
+ inboxes = inboxes -- priority_inboxes
+
+ [{priority_inboxes, 0}, {inboxes, 1}]
+ |> Enum.each(fn {inboxes, priority} ->
+ inboxes
+ |> Instances.filter_reachable()
+ |> Enum.each(fn {inbox, unreachable_since} ->
+ __MODULE__.enqueue_one(
+ %{
+ inbox: inbox,
+ json: json,
+ actor_id: actor.id,
+ id: activity.data["id"],
+ unreachable_since: unreachable_since
+ },
+ priority: priority
+ )
+ end)
end)
+
+ :ok
end
def gather_webfinger_links(%User{} = user) do
diff --git a/lib/pleroma/web/activity_pub/relay.ex b/lib/pleroma/web/activity_pub/relay.ex
index 2010351d1..91a647f29 100644
--- a/lib/pleroma/web/activity_pub/relay.ex
+++ b/lib/pleroma/web/activity_pub/relay.ex
@@ -58,7 +58,7 @@ defp fetch_target_user(ap_id, opts) do
@spec publish(any()) :: {:ok, Activity.t()} | {:error, any()}
def publish(%Activity{data: %{"type" => "Create"}} = activity) do
with %User{} = user <- get_actor(),
- true <- Visibility.is_public?(activity) do
+ true <- Visibility.public?(activity) do
CommonAPI.repeat(activity.id, user)
else
error -> format_error(error)
diff --git a/lib/pleroma/web/activity_pub/side_effects.ex b/lib/pleroma/web/activity_pub/side_effects.ex
index b997c15db..60b4d5f1b 100644
--- a/lib/pleroma/web/activity_pub/side_effects.ex
+++ b/lib/pleroma/web/activity_pub/side_effects.ex
@@ -21,10 +21,10 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
alias Pleroma.Web.ActivityPub.Builder
alias Pleroma.Web.ActivityPub.Pipeline
alias Pleroma.Web.ActivityPub.Utils
- alias Pleroma.Web.Push
alias Pleroma.Web.Streamer
alias Pleroma.Workers.PollWorker
+ require Pleroma.Constants
require Logger
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
@@ -124,7 +124,7 @@ def handle(
nil
end
- {:ok, notifications} = Notification.create_notifications(object, do_send: false)
+ {:ok, notifications} = Notification.create_notifications(object)
meta =
meta
@@ -153,23 +153,26 @@ def handle(
# Tasks this handles:
# - Update the user
+ # - Update a non-user object (Note, Question, etc.)
#
# For a local user, we also get a changeset with the full information, so we
# can update non-federating, non-activitypub settings as well.
@impl true
def handle(%{data: %{"type" => "Update", "object" => updated_object}} = object, meta) do
- if changeset = Keyword.get(meta, :user_update_changeset) do
- changeset
- |> User.update_and_set_cache()
+ updated_object_id = updated_object["id"]
+
+ with {_, true} <- {:has_id, is_binary(updated_object_id)},
+ %{"type" => type} <- updated_object,
+ {_, is_user} <- {:is_user, type in Pleroma.Constants.actor_types()} do
+ if is_user do
+ handle_update_user(object, meta)
+ else
+ handle_update_object(object, meta)
+ end
else
- {:ok, new_user_data} = ActivityPub.user_data_from_user_object(updated_object)
-
- User.get_by_ap_id(updated_object["id"])
- |> User.remote_user_changeset(new_user_data)
- |> User.update_and_set_cache()
+ _ ->
+ {:ok, object, meta}
end
-
- {:ok, object, meta}
end
# Tasks this handles:
@@ -180,7 +183,11 @@ def handle(%{data: %{"type" => "Like"}} = object, meta) do
liked_object = Object.get_by_ap_id(object.data["object"])
Utils.add_like_to_object(object, liked_object)
- Notification.create_notifications(object)
+ {:ok, notifications} = Notification.create_notifications(object)
+
+ meta =
+ meta
+ |> add_notifications(notifications)
{:ok, object, meta}
end
@@ -193,11 +200,12 @@ def handle(%{data: %{"type" => "Like"}} = object, meta) do
# - Increase replies count
# - Set up ActivityExpiration
# - Set up notifications
+ # - Index incoming posts for search (if needed)
@impl true
def handle(%{data: %{"type" => "Create"}} = activity, meta) do
with {:ok, object, meta} <- handle_object_creation(meta[:object_data], activity, meta),
%User{} = user <- User.get_cached_by_ap_id(activity.data["actor"]) do
- {:ok, notifications} = Notification.create_notifications(activity, do_send: false)
+ {:ok, notifications} = Notification.create_notifications(activity)
{:ok, _user} = ActivityPub.increase_note_count_if_public(user, object)
{:ok, _user} = ActivityPub.update_last_status_at_if_public(user, object)
@@ -205,6 +213,10 @@ def handle(%{data: %{"type" => "Create"}} = activity, meta) do
Object.increase_replies_count(in_reply_to)
end
+ if quote_url = object.data["quoteUrl"] do
+ Object.increase_quotes_count(quote_url)
+ end
+
reply_depth = (meta[:depth] || 0) + 1
# FIXME: Force inReplyTo to replies
@@ -218,9 +230,11 @@ def handle(%{data: %{"type" => "Create"}} = activity, meta) do
end
end
- ConcurrentLimiter.limit(Pleroma.Web.RichMedia.Helpers, fn ->
- Task.start(fn -> Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity) end)
- end)
+ Pleroma.Web.RichMedia.Card.get_by_activity(activity)
+
+ Pleroma.Search.add_to_index(Map.put(activity, :object, object))
+
+ Utils.maybe_handle_group_posts(activity)
meta =
meta
@@ -245,11 +259,13 @@ def handle(%{data: %{"type" => "Announce"}} = object, meta) do
Utils.add_announce_to_object(object, announced_object)
- if !User.is_internal_user?(user) do
- Notification.create_notifications(object)
+ {:ok, notifications} = Notification.create_notifications(object)
- ap_streamer().stream_out(object)
- end
+ if !User.internal?(user), do: ap_streamer().stream_out(object)
+
+ meta =
+ meta
+ |> add_notifications(notifications)
{:ok, object, meta}
end
@@ -270,7 +286,11 @@ def handle(%{data: %{"type" => "EmojiReact"}} = object, meta) do
reacted_object = Object.get_by_ap_id(object.data["object"])
Utils.add_emoji_reaction_to_object(object, reacted_object)
- Notification.create_notifications(object)
+ {:ok, notifications} = Notification.create_notifications(object)
+
+ meta =
+ meta
+ |> add_notifications(notifications)
{:ok, object, meta}
end
@@ -278,10 +298,10 @@ def handle(%{data: %{"type" => "EmojiReact"}} = object, meta) do
# Tasks this handles:
# - Delete and unpins the create activity
# - Replace object with Tombstone
- # - Set up notification
# - Reduce the user note count
# - Reduce the reply count
# - Stream out the activity
+ # - Removes posts from search index (if needed)
@impl true
def handle(%{data: %{"type" => "Delete", "object" => deleted_object}} = object, meta) do
deleted_object =
@@ -291,9 +311,9 @@ def handle(%{data: %{"type" => "Delete", "object" => deleted_object}} = object,
result =
case deleted_object do
%Object{} ->
- with {:ok, deleted_object, _activity} <- Object.delete(deleted_object),
+ with {_, {:ok, deleted_object, _activity}} <- {:object, Object.delete(deleted_object)},
{_, actor} when is_binary(actor) <- {:actor, deleted_object.data["actor"]},
- %User{} = user <- User.get_cached_by_ap_id(actor) do
+ {_, %User{} = user} <- {:user, User.get_cached_by_ap_id(actor)} do
User.remove_pinned_object_id(user, deleted_object.data["id"])
{:ok, user} = ActivityPub.decrease_note_count_if_public(user, deleted_object)
@@ -302,6 +322,10 @@ def handle(%{data: %{"type" => "Delete", "object" => deleted_object}} = object,
Object.decrease_replies_count(in_reply_to)
end
+ if quote_url = deleted_object.data["quoteUrl"] do
+ Object.decrease_quotes_count(quote_url)
+ end
+
MessageReference.delete_for_object(deleted_object)
ap_streamer().stream_out(object)
@@ -311,6 +335,17 @@ def handle(%{data: %{"type" => "Delete", "object" => deleted_object}} = object,
{:actor, _} ->
@logger.error("The object doesn't have an actor: #{inspect(deleted_object)}")
:no_object_actor
+
+ {:user, _} ->
+ @logger.error(
+ "The object's actor could not be resolved to a user: #{inspect(deleted_object)}"
+ )
+
+ :no_object_user
+
+ {:object, _} ->
+ @logger.error("The object could not be deleted: #{inspect(deleted_object)}")
+ {:error, object}
end
%User{} ->
@@ -320,7 +355,11 @@ def handle(%{data: %{"type" => "Delete", "object" => deleted_object}} = object,
end
if result == :ok do
- Notification.create_notifications(object)
+ # Only remove from index when deleting actual objects, not users or anything else
+ with %Pleroma.Object{} <- deleted_object do
+ Pleroma.Search.remove_from_index(deleted_object)
+ end
+
{:ok, object, meta}
else
{:error, result}
@@ -390,6 +429,55 @@ def handle(object, meta) do
{:ok, object, meta}
end
+ defp handle_update_user(
+ %{data: %{"type" => "Update", "object" => updated_object}} = object,
+ meta
+ ) do
+ if changeset = Keyword.get(meta, :user_update_changeset) do
+ changeset
+ |> User.update_and_set_cache()
+ else
+ {:ok, new_user_data} = ActivityPub.user_data_from_user_object(updated_object)
+
+ User.get_by_ap_id(updated_object["id"])
+ |> User.remote_user_changeset(new_user_data)
+ |> User.update_and_set_cache()
+ end
+
+ {:ok, object, meta}
+ end
+
+ defp handle_update_object(
+ %{data: %{"type" => "Update", "object" => updated_object}} = object,
+ meta
+ ) do
+ orig_object_ap_id = updated_object["id"]
+ orig_object = Object.get_by_ap_id(orig_object_ap_id)
+ orig_object_data = orig_object.data
+
+ updated_object =
+ if meta[:local] do
+ # If this is a local Update, we don't process it by transmogrifier,
+ # so we use the embedded object as-is.
+ updated_object
+ else
+ meta[:object_data]
+ end
+
+ if orig_object_data["type"] in Pleroma.Constants.updatable_object_types() do
+ {:ok, _, updated} =
+ Object.Updater.do_update_and_invalidate_cache(orig_object, updated_object)
+
+ if updated do
+ object
+ |> Activity.normalize()
+ |> ActivityPub.notify_and_stream()
+ end
+ end
+
+ {:ok, object, meta}
+ end
+
def handle_object_creation(%{"type" => "ChatMessage"} = object, _activity, meta) do
with {:ok, object, meta} <- Pipeline.common_pipeline(object, meta) do
actor = User.get_cached_by_ap_id(object.data["actor"])
@@ -445,7 +533,7 @@ def handle_object_creation(%{"type" => "Answer"} = object_map, _activity, meta)
end
def handle_object_creation(%{"type" => objtype} = object, _activity, meta)
- when objtype in ~w[Audio Video Event Article Note Page] do
+ when objtype in ~w[Audio Video Image Event Article Note Page] do
with {:ok, object, meta} <- Pipeline.common_pipeline(object, meta) do
{:ok, object, meta}
end
@@ -499,17 +587,14 @@ def handle_undoing(
def handle_undoing(object), do: {:error, ["don't know how to handle", object]}
- @spec delete_object(Object.t()) :: :ok | {:error, Ecto.Changeset.t()}
+ @spec delete_object(Activity.t()) :: :ok | {:error, Ecto.Changeset.t()}
defp delete_object(object) do
with {:ok, _} <- Repo.delete(object), do: :ok
end
defp send_notifications(meta) do
Keyword.get(meta, :notifications, [])
- |> Enum.each(fn notification ->
- Streamer.stream(["user", "user:notification"], notification)
- Push.send(notification)
- end)
+ |> Notification.send()
meta
end
diff --git a/lib/pleroma/web/activity_pub/side_effects/handling.ex b/lib/pleroma/web/activity_pub/side_effects/handling.ex
index eb012f576..4751bb4ce 100644
--- a/lib/pleroma/web/activity_pub/side_effects/handling.ex
+++ b/lib/pleroma/web/activity_pub/side_effects/handling.ex
@@ -4,5 +4,5 @@
defmodule Pleroma.Web.ActivityPub.SideEffects.Handling do
@callback handle(map(), keyword()) :: {:ok, map(), keyword()} | {:error, any()}
- @callback handle_after_transaction(map()) :: map()
+ @callback handle_after_transaction(keyword()) :: keyword()
end
diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex
index d6622df86..edfe73a25 100644
--- a/lib/pleroma/web/activity_pub/transmogrifier.ex
+++ b/lib/pleroma/web/activity_pub/transmogrifier.ex
@@ -20,11 +20,9 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.ActivityPub.Visibility
alias Pleroma.Web.Federator
- alias Pleroma.Workers.TransmogrifierWorker
import Ecto.Query
- require Logger
require Pleroma.Constants
@doc """
@@ -156,8 +154,7 @@ def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object, options)
|> Map.put("context", replied_object.data["context"] || object["conversation"])
|> Map.drop(["conversation", "inReplyToAtomUri"])
else
- e ->
- Logger.warn("Couldn't fetch #{inspect(in_reply_to_id)}, error: #{inspect(e)}")
+ _ ->
object
end
else
@@ -167,6 +164,26 @@ def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object, options)
def fix_in_reply_to(object, _options), do: object
+ def fix_quote_url_and_maybe_fetch(object, options \\ []) do
+ quote_url =
+ case Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes.fix_quote_url(object) do
+ %{"quoteUrl" => quote_url} -> quote_url
+ _ -> nil
+ end
+
+ with {:quoting?, true} <- {:quoting?, not is_nil(quote_url)},
+ {:ok, quoted_object} <- get_obj_helper(quote_url, options),
+ %Activity{} <- Activity.get_create_by_object_ap_id(quoted_object.data["id"]) do
+ Map.put(object, "quoteUrl", quoted_object.data["id"])
+ else
+ {:quoting?, _} ->
+ object
+
+ _ ->
+ object
+ end
+ end
+
defp prepare_in_reply_to(in_reply_to) do
cond do
is_bitstring(in_reply_to) ->
@@ -447,7 +464,7 @@ def handle_incoming(
%{"type" => "Create", "object" => %{"type" => objtype, "id" => obj_id}} = data,
options
)
- when objtype in ~w{Question Answer ChatMessage Audio Video Event Article Note Page} do
+ when objtype in ~w{Question Answer ChatMessage Audio Video Event Article Note Page Image} do
fetch_options = Keyword.put(options, :depth, (options[:depth] || 0) + 1)
object =
@@ -455,6 +472,7 @@ def handle_incoming(
|> strip_internal_fields()
|> fix_type(fetch_options)
|> fix_in_reply_to(fetch_options)
+ |> fix_quote_url_and_maybe_fetch(fetch_options)
data = Map.put(data, "object", object)
options = Keyword.put(options, :local, false)
@@ -629,6 +647,16 @@ def set_reply_to_uri(%{"inReplyTo" => in_reply_to} = object) when is_binary(in_r
def set_reply_to_uri(obj), do: obj
+ @doc """
+ Fedibird compatibility
+ https://github.com/fedibird/mastodon/commit/dbd7ae6cf58a92ec67c512296b4daaea0d01e6ac
+ """
+ def set_quote_url(%{"quoteUrl" => quote_url} = object) when is_binary(quote_url) do
+ Map.put(object, "quoteUri", quote_url)
+ end
+
+ def set_quote_url(obj), do: obj
+
@doc """
Serialized Mastodon-compatible `replies` collection containing _self-replies_.
Based on Mastodon's ActivityPub::NoteSerializer#replies.
@@ -683,10 +711,29 @@ def prepare_object(object) do
|> prepare_attachments
|> set_conversation
|> set_reply_to_uri
+ |> set_quote_url
|> set_replies
|> strip_internal_fields
|> strip_internal_tags
|> set_type
+ |> maybe_process_history
+ end
+
+ defp maybe_process_history(%{"formerRepresentations" => %{"orderedItems" => history}} = object) do
+ processed_history =
+ Enum.map(
+ history,
+ fn
+ item when is_map(item) -> prepare_object(item)
+ item -> item
+ end
+ )
+
+ put_in(object, ["formerRepresentations", "orderedItems"], processed_history)
+ end
+
+ defp maybe_process_history(object) do
+ object
end
# @doc
@@ -711,13 +758,28 @@ def prepare_outgoing(%{"type" => activity_type, "object" => object_id} = data)
{:ok, data}
end
+ def prepare_outgoing(%{"type" => "Update", "object" => %{"type" => objtype} = object} = data)
+ when objtype in Pleroma.Constants.updatable_object_types() do
+ object =
+ object
+ |> prepare_object
+
+ data =
+ data
+ |> Map.put("object", object)
+ |> Map.merge(Utils.make_json_ld_header())
+ |> Map.delete("bcc")
+
+ {:ok, data}
+ end
+
def prepare_outgoing(%{"type" => "Announce", "actor" => ap_id, "object" => object_id} = data) do
object =
object_id
|> Object.normalize(fetch: false)
data =
- if Visibility.is_private?(object) && object.data["actor"] == ap_id do
+ if Visibility.private?(object) && object.data["actor"] == ap_id do
data |> Map.put("object", object |> Map.get(:data) |> prepare_object)
else
data |> maybe_fix_object_url
@@ -787,8 +849,7 @@ def maybe_fix_object_url(%{"object" => object} = data) when is_binary(object) do
relative_object do
Map.put(data, "object", external_url)
else
- {:fetch, e} ->
- Logger.error("Couldn't fetch #{object} #{inspect(e)}")
+ {:fetch, _} ->
data
_ ->
@@ -913,47 +974,6 @@ defp strip_internal_tags(%{"tag" => tags} = object) do
defp strip_internal_tags(object), do: object
- def perform(:user_upgrade, user) do
- # we pass a fake user so that the followers collection is stripped away
- old_follower_address = User.ap_followers(%User{nickname: user.nickname})
-
- from(
- a in Activity,
- where: ^old_follower_address in a.recipients,
- update: [
- set: [
- recipients:
- fragment(
- "array_replace(?,?,?)",
- a.recipients,
- ^old_follower_address,
- ^user.follower_address
- )
- ]
- ]
- )
- |> Repo.update_all([])
- end
-
- def upgrade_user_from_ap_id(ap_id) do
- with %User{local: false} = user <- User.get_cached_by_ap_id(ap_id),
- {:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id),
- {:ok, user} <- update_user(user, data) do
- {:ok, _pid} = Task.start(fn -> ActivityPub.pinned_fetch_task(user) end)
- TransmogrifierWorker.enqueue("user_upgrade", %{"user_id" => user.id})
- {:ok, user}
- else
- %User{} = user -> {:ok, user}
- e -> e
- end
- end
-
- defp update_user(user, data) do
- user
- |> User.remote_user_changeset(data)
- |> User.update_and_set_cache()
- end
-
def maybe_fix_user_url(%{"url" => url} = data) when is_map(url) do
Map.put(data, "url", url["href"])
end
diff --git a/lib/pleroma/web/activity_pub/utils.ex b/lib/pleroma/web/activity_pub/utils.ex
index 9cde7805c..797e79dda 100644
--- a/lib/pleroma/web/activity_pub/utils.ex
+++ b/lib/pleroma/web/activity_pub/utils.ex
@@ -7,6 +7,7 @@ defmodule Pleroma.Web.ActivityPub.Utils do
alias Ecto.UUID
alias Pleroma.Activity
alias Pleroma.Config
+ alias Pleroma.EctoType.ActivityPub.ObjectValidators.ObjectID
alias Pleroma.Maps
alias Pleroma.Notification
alias Pleroma.Object
@@ -31,7 +32,8 @@ defmodule Pleroma.Web.ActivityPub.Utils do
"Page",
"Question",
"Answer",
- "Audio"
+ "Audio",
+ "Image"
]
@strip_status_report_states ~w(closed resolved)
@supported_report_states ~w(open closed resolved)
@@ -154,22 +156,7 @@ def get_notified_from_object(object) do
Notification.get_notified_from_activity(%Activity{data: object}, false)
end
- def create_context(context) do
- context = context || generate_id("contexts")
-
- # Ecto has problems accessing the constraint inside the jsonb,
- # so we explicitly check for the existed object before insert
- object = Object.get_cached_by_ap_id(context)
-
- with true <- is_nil(object),
- changeset <- Object.context_mapping(context),
- {:ok, inserted_object} <- Repo.insert(changeset) do
- inserted_object
- else
- _ ->
- object
- end
- end
+ def maybe_create_context(context), do: context || generate_id("contexts")
@doc """
Enqueues an activity for federation if it's local
@@ -180,7 +167,7 @@ def maybe_federate(%Activity{local: true, data: %{"type" => type}} = activity) d
with true <- Config.get!([:instance, :federating]),
true <- type != "Block" || outgoing_blocks,
- false <- Visibility.is_local_public?(activity) do
+ false <- Visibility.local_public?(activity) do
Pleroma.Web.Federator.publish(activity)
end
@@ -201,18 +188,16 @@ def lazy_put_activity_defaults(map, true) do
|> Map.put_new("id", "pleroma:fakeid")
|> Map.put_new_lazy("published", &make_date/0)
|> Map.put_new("context", "pleroma:fakecontext")
- |> Map.put_new("context_id", -1)
|> lazy_put_object_defaults(true)
end
def lazy_put_activity_defaults(map, _fake?) do
- %{data: %{"id" => context}, id: context_id} = create_context(map["context"])
+ context = maybe_create_context(map["context"])
map
|> Map.put_new_lazy("id", &generate_activity_id/0)
|> Map.put_new_lazy("published", &make_date/0)
|> Map.put_new("context", context)
- |> Map.put_new("context_id", context_id)
|> lazy_put_object_defaults(false)
end
@@ -226,7 +211,6 @@ defp lazy_put_object_defaults(%{"object" => map} = activity, true)
|> Map.put_new("id", "pleroma:fake_object_id")
|> Map.put_new_lazy("published", &make_date/0)
|> Map.put_new("context", activity["context"])
- |> Map.put_new("context_id", activity["context_id"])
|> Map.put_new("fake", true)
%{activity | "object" => object}
@@ -239,7 +223,6 @@ defp lazy_put_object_defaults(%{"object" => map} = activity, _)
|> Map.put_new_lazy("id", &generate_object_id/0)
|> Map.put_new_lazy("published", &make_date/0)
|> Map.put_new("context", activity["context"])
- |> Map.put_new("context_id", activity["context_id"])
%{activity | "object" => object}
end
@@ -294,7 +277,7 @@ def make_like_data(
object_actor = User.get_cached_by_ap_id(object_actor_id)
to =
- if Visibility.is_public?(object) do
+ if Visibility.public?(object) do
[actor.follower_address, object.data["actor"]]
else
[object.data["actor"]]
@@ -344,21 +327,29 @@ def update_element_in_object(property, element, object, count \\ nil) do
{:ok, Object.t()} | {:error, Ecto.Changeset.t()}
def add_emoji_reaction_to_object(
- %Activity{data: %{"content" => emoji, "actor" => actor}},
+ %Activity{data: %{"content" => emoji, "actor" => actor}} = activity,
object
) do
reactions = get_cached_emoji_reactions(object)
+ emoji = Pleroma.Emoji.maybe_strip_name(emoji)
+ url = maybe_emoji_url(emoji, activity)
new_reactions =
- case Enum.find_index(reactions, fn [candidate, _] -> emoji == candidate end) do
+ case Enum.find_index(reactions, fn [candidate, _, candidate_url] ->
+ if is_nil(candidate_url) do
+ emoji == candidate
+ else
+ url == candidate_url
+ end
+ end) do
nil ->
- reactions ++ [[emoji, [actor]]]
+ reactions ++ [[emoji, [actor], url]]
index ->
List.update_at(
reactions,
index,
- fn [emoji, users] -> [emoji, Enum.uniq([actor | users])] end
+ fn [emoji, users, url] -> [emoji, Enum.uniq([actor | users]), url] end
)
end
@@ -367,18 +358,40 @@ def add_emoji_reaction_to_object(
update_element_in_object("reaction", new_reactions, object, count)
end
+ defp maybe_emoji_url(
+ name,
+ %Activity{
+ data: %{
+ "tag" => [
+ %{"type" => "Emoji", "name" => name, "icon" => %{"url" => url}}
+ ]
+ }
+ }
+ ),
+ do: url
+
+ defp maybe_emoji_url(_, _), do: nil
+
def emoji_count(reactions_list) do
- Enum.reduce(reactions_list, 0, fn [_, users], acc -> acc + length(users) end)
+ Enum.reduce(reactions_list, 0, fn [_, users, _], acc -> acc + length(users) end)
end
def remove_emoji_reaction_from_object(
- %Activity{data: %{"content" => emoji, "actor" => actor}},
+ %Activity{data: %{"content" => emoji, "actor" => actor}} = activity,
object
) do
+ emoji = Pleroma.Emoji.maybe_strip_name(emoji)
reactions = get_cached_emoji_reactions(object)
+ url = maybe_emoji_url(emoji, activity)
new_reactions =
- case Enum.find_index(reactions, fn [candidate, _] -> emoji == candidate end) do
+ case Enum.find_index(reactions, fn [candidate, _, candidate_url] ->
+ if is_nil(candidate_url) do
+ emoji == candidate
+ else
+ url == candidate_url
+ end
+ end) do
nil ->
reactions
@@ -386,9 +399,9 @@ def remove_emoji_reaction_from_object(
List.update_at(
reactions,
index,
- fn [emoji, users] -> [emoji, List.delete(users, actor)] end
+ fn [emoji, users, url] -> [emoji, List.delete(users, actor), url] end
)
- |> Enum.reject(fn [_, users] -> Enum.empty?(users) end)
+ |> Enum.reject(fn [_, users, _] -> Enum.empty?(users) end)
end
count = emoji_count(new_reactions)
@@ -396,11 +409,7 @@ def remove_emoji_reaction_from_object(
end
def get_cached_emoji_reactions(object) do
- if is_list(object.data["reactions"]) do
- object.data["reactions"]
- else
- []
- end
+ Object.get_emoji_reactions(object)
end
@spec add_like_to_object(Activity.t(), Object.t()) ::
@@ -508,17 +517,37 @@ def fetch_latest_undo(%User{ap_id: ap_id}) do
def get_latest_reaction(internal_activity_id, %{ap_id: ap_id}, emoji) do
%{data: %{"object" => object_ap_id}} = Activity.get_by_id(internal_activity_id)
+ emoji = Pleroma.Emoji.maybe_quote(emoji)
"EmojiReact"
|> Activity.Queries.by_type()
|> where(actor: ^ap_id)
- |> where([activity], fragment("?->>'content' = ?", activity.data, ^emoji))
+ |> custom_emoji_discriminator(emoji)
|> Activity.Queries.by_object_id(object_ap_id)
|> order_by([activity], fragment("? desc nulls last", activity.id))
|> limit(1)
|> Repo.one()
end
+ defp custom_emoji_discriminator(query, emoji) do
+ if String.contains?(emoji, "@") do
+ stripped = Pleroma.Emoji.maybe_strip_name(emoji)
+ [name, domain] = String.split(stripped, "@")
+ domain_pattern = "%/" <> domain <> "/%"
+ emoji_pattern = Pleroma.Emoji.maybe_quote(name)
+
+ query
+ |> where([activity], fragment("?->>'content' = ?
+ AND EXISTS (
+ SELECT FROM jsonb_array_elements(?->'tag') elem
+ WHERE elem->>'id' ILIKE ?
+ )", activity.data, ^emoji_pattern, activity.data, ^domain_pattern))
+ else
+ query
+ |> where([activity], fragment("?->>'content' = ?", activity.data, ^emoji))
+ end
+ end
+
#### Announce-related helpers
@doc """
@@ -692,14 +721,18 @@ def make_listen_data(params, additional) do
#### Flag-related helpers
@spec make_flag_data(map(), map()) :: map()
- def make_flag_data(%{actor: actor, context: context, content: content} = params, additional) do
+ def make_flag_data(
+ %{actor: actor, context: context, content: content} = params,
+ additional
+ ) do
%{
"type" => "Flag",
"actor" => actor.ap_id,
"content" => content,
"object" => build_flag_object(params),
"context" => context,
- "state" => "open"
+ "state" => "open",
+ "rules" => Map.get(params, :rules, nil)
}
|> Map.merge(additional)
end
@@ -714,20 +747,24 @@ defp build_flag_object(%{statuses: statuses}) do
Enum.map(statuses || [], &build_flag_object/1)
end
- defp build_flag_object(%Activity{data: %{"id" => id}, object: %{data: data}}) do
- activity_actor = User.get_by_ap_id(data["actor"])
+ defp build_flag_object(%Activity{} = activity) do
+ object = Object.normalize(activity, fetch: false)
- %{
- "type" => "Note",
- "id" => id,
- "content" => data["content"],
- "published" => data["published"],
- "actor" =>
- AccountView.render(
- "show.json",
- %{user: activity_actor, skip_visibility_check: true}
- )
- }
+ # Do not allow people to report Creates. Instead, report the Object that is Created.
+ if activity.data["type"] != "Create" do
+ build_flag_object_with_actor_and_id(
+ object,
+ User.get_by_ap_id(activity.data["actor"]),
+ activity.data["id"]
+ )
+ else
+ build_flag_object(object)
+ end
+ end
+
+ defp build_flag_object(%Object{} = object) do
+ actor = User.get_by_ap_id(object.data["actor"])
+ build_flag_object_with_actor_and_id(object, actor, object.data["id"])
end
defp build_flag_object(act) when is_map(act) or is_binary(act) do
@@ -739,20 +776,33 @@ defp build_flag_object(act) when is_map(act) or is_binary(act) do
end
case Activity.get_by_ap_id_with_object(id) do
- %Activity{} = activity ->
- build_flag_object(activity)
+ %Activity{object: object} = _ ->
+ build_flag_object(object)
nil ->
- if activity = Activity.get_by_object_ap_id_with_object(id) do
- build_flag_object(activity)
- else
- %{"id" => id, "deleted" => true}
+ case Object.get_by_ap_id(id) do
+ %Object{} = object -> build_flag_object(object)
+ _ -> %{"id" => id, "deleted" => true}
end
end
end
defp build_flag_object(_), do: []
+ defp build_flag_object_with_actor_and_id(%Object{data: data}, actor, id) do
+ %{
+ "type" => "Note",
+ "id" => id,
+ "content" => data["content"],
+ "published" => data["published"],
+ "actor" =>
+ AccountView.render(
+ "show.json",
+ %{user: actor, skip_visibility_check: true}
+ )
+ }
+ end
+
#### Report-related helpers
def get_reports(params, page, page_size) do
params =
@@ -767,22 +817,21 @@ def get_reports(params, page, page_size) do
ActivityPub.fetch_activities([], params, :offset)
end
- def update_report_state(%Activity{} = activity, state)
- when state in @strip_status_report_states do
- {:ok, stripped_activity} = strip_report_status_data(activity)
-
- new_data =
- activity.data
- |> Map.put("state", state)
- |> Map.put("object", stripped_activity.data["object"])
-
- activity
- |> Changeset.change(data: new_data)
- |> Repo.update()
+ defp maybe_strip_report_status(data, state) do
+ with true <- Config.get([:instance, :report_strip_status]),
+ true <- state in @strip_status_report_states,
+ {:ok, stripped_activity} = strip_report_status_data(%Activity{data: data}) do
+ data |> Map.put("object", stripped_activity.data["object"])
+ else
+ _ -> data
+ end
end
def update_report_state(%Activity{} = activity, state) when state in @supported_report_states do
- new_data = Map.put(activity.data, "state", state)
+ new_data =
+ activity.data
+ |> Map.put("state", state)
+ |> maybe_strip_report_status(state)
activity
|> Changeset.change(data: new_data)
@@ -807,9 +856,11 @@ def strip_report_status_data(activity) do
[actor | reported_activities] = activity.data["object"]
stripped_activities =
- Enum.map(reported_activities, fn
- act when is_map(act) -> act["id"]
- act when is_binary(act) -> act
+ Enum.reduce(reported_activities, [], fn act, acc ->
+ case ObjectID.cast(act) do
+ {:ok, act} -> [act | acc]
+ _ -> acc
+ end
end)
new_data = put_in(activity.data, ["object"], [actor | stripped_activities])
@@ -887,4 +938,27 @@ def get_existing_votes(actor, %{data: %{"id" => id}}) do
|> where([a, object: o], fragment("(?)->>'type' = 'Answer'", o.data))
|> Repo.all()
end
+
+ def maybe_handle_group_posts(activity) do
+ poster = User.get_cached_by_ap_id(activity.actor)
+
+ mentions =
+ activity.data["to"]
+ |> Enum.filter(&(&1 != activity.actor))
+
+ mentioned_local_groups =
+ User.get_all_by_ap_id(mentions)
+ |> Enum.filter(fn user ->
+ user.actor_type == "Group" and
+ user.local and
+ not User.blocks?(user, poster)
+ end)
+
+ mentioned_local_groups
+ |> Enum.each(fn group ->
+ Pleroma.Web.CommonAPI.repeat(activity.id, group)
+ end)
+
+ :ok
+ end
end
diff --git a/lib/pleroma/web/activity_pub/views/object_view.ex b/lib/pleroma/web/activity_pub/views/object_view.ex
index f848aba3a..63caa915c 100644
--- a/lib/pleroma/web/activity_pub/views/object_view.ex
+++ b/lib/pleroma/web/activity_pub/views/object_view.ex
@@ -29,11 +29,11 @@ def render("object.json", %{object: %Activity{data: %{"type" => activity_type}}
def render("object.json", %{object: %Activity{} = activity}) do
base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header()
- object = Object.normalize(activity, fetch: false)
+ object_id = Object.normalize(activity, id_only: true)
additional =
Transmogrifier.prepare_object(activity.data)
- |> Map.put("object", object.data["id"])
+ |> Map.put("object", object_id)
Map.merge(base, additional)
end
diff --git a/lib/pleroma/web/activity_pub/views/user_view.ex b/lib/pleroma/web/activity_pub/views/user_view.ex
index 52f6bb56d..937e4fd67 100644
--- a/lib/pleroma/web/activity_pub/views/user_view.ex
+++ b/lib/pleroma/web/activity_pub/views/user_view.ex
@@ -34,7 +34,6 @@ def render("endpoints.json", %{user: %User{local: true} = _user}) do
def render("endpoints.json", _), do: %{}
def render("service.json", %{user: user}) do
- {:ok, user} = User.ensure_keys_present(user)
{:ok, _, public_key} = Keys.keys_from_pem(user.keys)
public_key = :public_key.pem_entry_encode(:SubjectPublicKeyInfo, public_key)
public_key = :public_key.pem_encode([public_key])
@@ -47,6 +46,7 @@ def render("service.json", %{user: user}) do
"following" => "#{user.ap_id}/following",
"followers" => "#{user.ap_id}/followers",
"inbox" => "#{user.ap_id}/inbox",
+ "outbox" => "#{user.ap_id}/outbox",
"name" => "Pleroma",
"summary" =>
"An internal service actor for this Pleroma instance. No user-serviceable parts inside.",
@@ -67,11 +67,15 @@ def render("service.json", %{user: user}) do
def render("user.json", %{user: %User{nickname: nil} = user}),
do: render("service.json", %{user: user})
- def render("user.json", %{user: %User{nickname: "internal." <> _} = user}),
- do: render("service.json", %{user: user}) |> Map.put("preferredUsername", user.nickname)
+ def render("user.json", %{user: %User{nickname: "internal." <> _} = user}) do
+ render("service.json", %{user: user})
+ |> Map.merge(%{
+ "preferredUsername" => user.nickname,
+ "webfinger" => "acct:#{User.full_nickname(user)}"
+ })
+ end
def render("user.json", %{user: user}) do
- {:ok, user} = User.ensure_keys_present(user)
{:ok, _, public_key} = Keys.keys_from_pem(user.keys)
public_key = :public_key.pem_entry_encode(:SubjectPublicKeyInfo, public_key)
public_key = :public_key.pem_encode([public_key])
@@ -122,7 +126,8 @@ def render("user.json", %{user: user}) do
"discoverable" => user.is_discoverable,
"capabilities" => capabilities,
"alsoKnownAs" => user.also_known_as,
- "vcard:bday" => birthday
+ "vcard:bday" => birthday,
+ "webfinger" => "acct:#{User.full_nickname(user)}"
}
|> Map.merge(maybe_make_image(&User.avatar_url/2, "icon", user))
|> Map.merge(maybe_make_image(&User.banner_url/2, "image", user))
diff --git a/lib/pleroma/web/activity_pub/visibility.ex b/lib/pleroma/web/activity_pub/visibility.ex
index 465f8a9b7..97fc7fa1b 100644
--- a/lib/pleroma/web/activity_pub/visibility.ex
+++ b/lib/pleroma/web/activity_pub/visibility.ex
@@ -11,28 +11,28 @@ defmodule Pleroma.Web.ActivityPub.Visibility do
require Pleroma.Constants
- @spec is_public?(Object.t() | Activity.t() | map()) :: boolean()
- def is_public?(%Object{data: %{"type" => "Tombstone"}}), do: false
- def is_public?(%Object{data: data}), do: is_public?(data)
- def is_public?(%Activity{data: %{"type" => "Move"}}), do: true
- def is_public?(%Activity{data: data}), do: is_public?(data)
- def is_public?(%{"directMessage" => true}), do: false
+ @spec public?(Object.t() | Activity.t() | map()) :: boolean()
+ def public?(%Object{data: %{"type" => "Tombstone"}}), do: false
+ def public?(%Object{data: data}), do: public?(data)
+ def public?(%Activity{data: %{"type" => "Move"}}), do: true
+ def public?(%Activity{data: data}), do: public?(data)
+ def public?(%{"directMessage" => true}), do: false
- def is_public?(data) do
+ def public?(data) do
Utils.label_in_message?(Pleroma.Constants.as_public(), data) or
Utils.label_in_message?(Utils.as_local_public(), data)
end
- def is_local_public?(%Object{data: data}), do: is_local_public?(data)
- def is_local_public?(%Activity{data: data}), do: is_local_public?(data)
+ def local_public?(%Object{data: data}), do: local_public?(data)
+ def local_public?(%Activity{data: data}), do: local_public?(data)
- def is_local_public?(data) do
+ def local_public?(data) do
Utils.label_in_message?(Utils.as_local_public(), data) and
not Utils.label_in_message?(Pleroma.Constants.as_public(), data)
end
- def is_private?(activity) do
- with false <- is_public?(activity),
+ def private?(activity) do
+ with false <- public?(activity),
%User{follower_address: follower_address} <-
User.get_cached_by_ap_id(activity.data["actor"]) do
follower_address in activity.data["to"]
@@ -41,20 +41,20 @@ def is_private?(activity) do
end
end
- def is_announceable?(activity, user, public \\ true) do
- is_public?(activity) ||
- (!public && is_private?(activity) && activity.data["actor"] == user.ap_id)
+ def announceable?(activity, user, public \\ true) do
+ public?(activity) ||
+ (!public && private?(activity) && activity.data["actor"] == user.ap_id)
end
- def is_direct?(%Activity{data: %{"directMessage" => true}}), do: true
- def is_direct?(%Object{data: %{"directMessage" => true}}), do: true
+ def direct?(%Activity{data: %{"directMessage" => true}}), do: true
+ def direct?(%Object{data: %{"directMessage" => true}}), do: true
- def is_direct?(activity) do
- !is_public?(activity) && !is_private?(activity)
+ def direct?(activity) do
+ !public?(activity) && !private?(activity)
end
- def is_list?(%{data: %{"listMessage" => _}}), do: true
- def is_list?(_), do: false
+ def list?(%{data: %{"listMessage" => _}}), do: true
+ def list?(_), do: false
@spec visible_for_user?(Object.t() | Activity.t() | nil, User.t() | nil) :: boolean()
def visible_for_user?(%Object{data: %{"type" => "Tombstone"}}, _), do: false
@@ -77,14 +77,17 @@ def visible_for_user?(%{__struct__: module} = message, nil)
when module in [Activity, Object] do
if restrict_unauthenticated_access?(message),
do: false,
- else: is_public?(message) and not is_local_public?(message)
+ else: public?(message) and not local_public?(message)
end
def visible_for_user?(%{__struct__: module} = message, user)
when module in [Activity, Object] do
x = [user.ap_id | User.following(user)]
y = [message.data["actor"]] ++ message.data["to"] ++ (message.data["cc"] || [])
- is_public?(message) || Enum.any?(x, &(&1 in y))
+
+ user_is_local = user.local
+ federatable = not local_public?(message)
+ (public?(message) || Enum.any?(x, &(&1 in y))) and (user_is_local || federatable)
end
def entire_thread_visible_for_user?(%Activity{} = activity, %User{} = user) do
diff --git a/lib/pleroma/web/admin_api/controllers/chat_controller.ex b/lib/pleroma/web/admin_api/controllers/chat_controller.ex
index c3e9e12ce..298543fcf 100644
--- a/lib/pleroma/web/admin_api/controllers/chat_controller.ex
+++ b/lib/pleroma/web/admin_api/controllers/chat_controller.ex
@@ -8,7 +8,6 @@ defmodule Pleroma.Web.AdminAPI.ChatController do
alias Pleroma.Activity
alias Pleroma.Chat
alias Pleroma.Chat.MessageReference
- alias Pleroma.ModerationLog
alias Pleroma.Pagination
alias Pleroma.Web.AdminAPI
alias Pleroma.Web.CommonAPI
@@ -42,12 +41,6 @@ def delete_message(%{assigns: %{user: user}} = conn, %{
^chat_id <- to_string(cm_ref.chat_id),
%Activity{id: activity_id} <- Activity.get_create_by_object_ap_id(object_ap_id),
{:ok, _} <- CommonAPI.delete(activity_id, user) do
- ModerationLog.insert_log(%{
- action: "chat_message_delete",
- actor: user,
- subject_id: message_id
- })
-
conn
|> put_view(MessageReferenceView)
|> render("show.json", chat_message_reference: cm_ref)
diff --git a/lib/pleroma/web/admin_api/controllers/config_controller.ex b/lib/pleroma/web/admin_api/controllers/config_controller.ex
index 55ab6d063..2c9c27294 100644
--- a/lib/pleroma/web/admin_api/controllers/config_controller.ex
+++ b/lib/pleroma/web/admin_api/controllers/config_controller.ex
@@ -9,7 +9,7 @@ defmodule Pleroma.Web.AdminAPI.ConfigController do
alias Pleroma.ConfigDB
alias Pleroma.Web.Plugs.OAuthScopesPlug
- plug(Pleroma.Web.ApiSpec.CastAndValidate)
+ plug(Pleroma.Web.ApiSpec.CastAndValidate, replace_params: false)
plug(OAuthScopesPlug, %{scopes: ["admin:write"]} when action == :update)
plug(
@@ -22,13 +22,61 @@ defmodule Pleroma.Web.AdminAPI.ConfigController do
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.Admin.ConfigOperation
+ defp translate_descriptions(descriptions, path \\ []) do
+ Enum.map(descriptions, fn desc -> translate_item(desc, path) end)
+ end
+
+ defp translate_string(str, path, type) do
+ Gettext.dpgettext(
+ Pleroma.Web.Gettext,
+ "config_descriptions",
+ Pleroma.Docs.Translator.Compiler.msgctxt_for(path, type),
+ str
+ )
+ end
+
+ defp maybe_put_translated(item, key, path) do
+ if item[key] do
+ Map.put(
+ item,
+ key,
+ translate_string(
+ item[key],
+ path ++ [Pleroma.Docs.Translator.Compiler.key_for(item)],
+ to_string(key)
+ )
+ )
+ else
+ item
+ end
+ end
+
+ defp translate_item(item, path) do
+ item
+ |> maybe_put_translated(:label, path)
+ |> maybe_put_translated(:description, path)
+ |> translate_children(path)
+ end
+
+ defp translate_children(%{children: children} = item, path) when is_list(children) do
+ item
+ |> Map.put(
+ :children,
+ translate_descriptions(children, path ++ [Pleroma.Docs.Translator.Compiler.key_for(item)])
+ )
+ end
+
+ defp translate_children(item, _path) do
+ item
+ end
+
def descriptions(conn, _params) do
descriptions = Enum.filter(Pleroma.Docs.JSON.compiled_descriptions(), &whitelisted_config?/1)
- json(conn, descriptions)
+ json(conn, translate_descriptions(descriptions))
end
- def show(conn, %{only_db: true}) do
+ def show(%{private: %{open_api_spex: %{params: %{only_db: true}}}} = conn, _) do
with :ok <- configurable_from_database() do
configs = Pleroma.Repo.all(ConfigDB)
@@ -80,7 +128,7 @@ def show(conn, _params) do
end
end
- def update(%{body_params: %{configs: configs}} = conn, _) do
+ def update(%{private: %{open_api_spex: %{body_params: %{configs: configs}}}} = conn, _) do
with :ok <- configurable_from_database() do
results =
configs
diff --git a/lib/pleroma/web/admin_api/controllers/frontend_controller.ex b/lib/pleroma/web/admin_api/controllers/frontend_controller.ex
index b4dbb82fe..9e2ed4aac 100644
--- a/lib/pleroma/web/admin_api/controllers/frontend_controller.ex
+++ b/lib/pleroma/web/admin_api/controllers/frontend_controller.ex
@@ -18,13 +18,24 @@ defmodule Pleroma.Web.AdminAPI.FrontendController do
def index(conn, _params) do
installed = installed()
+ # FIrst get frontends from config,
+ # then add frontends that are installed but not in the config
frontends =
- [:frontends, :available]
- |> Config.get([])
+ Config.get([:frontends, :available], [])
|> Enum.map(fn {name, desc} ->
- Map.put(desc, "installed", name in installed)
+ desc
+ |> Map.put("installed", name in installed)
+ |> Map.put("installed_refs", installed_refs(name))
end)
+ frontends =
+ frontends ++
+ (installed
+ |> Enum.filter(fn n -> not Enum.any?(frontends, fn f -> f["name"] == n end) end)
+ |> Enum.map(fn name ->
+ %{"name" => name, "installed" => true, "installed_refs" => installed_refs(name)}
+ end))
+
render(conn, "index.json", frontends: frontends)
end
@@ -43,4 +54,12 @@ defp installed do
[]
end
end
+
+ def installed_refs(name) do
+ if name in installed() do
+ File.ls!(Path.join(Pleroma.Frontend.dir(), name))
+ else
+ []
+ end
+ end
end
diff --git a/lib/pleroma/web/admin_api/controllers/instance_document_controller.ex b/lib/pleroma/web/admin_api/controllers/instance_document_controller.ex
index 990a94313..d76a95960 100644
--- a/lib/pleroma/web/admin_api/controllers/instance_document_controller.ex
+++ b/lib/pleroma/web/admin_api/controllers/instance_document_controller.ex
@@ -9,7 +9,7 @@ defmodule Pleroma.Web.AdminAPI.InstanceDocumentController do
alias Pleroma.Web.Plugs.InstanceStatic
alias Pleroma.Web.Plugs.OAuthScopesPlug
- plug(Pleroma.Web.ApiSpec.CastAndValidate)
+ plug(Pleroma.Web.ApiSpec.CastAndValidate, replace_params: false)
action_fallback(Pleroma.Web.AdminAPI.FallbackController)
@@ -18,7 +18,7 @@ defmodule Pleroma.Web.AdminAPI.InstanceDocumentController do
plug(OAuthScopesPlug, %{scopes: ["admin:read"]} when action == :show)
plug(OAuthScopesPlug, %{scopes: ["admin:write"]} when action in [:update, :delete])
- def show(conn, %{name: document_name}) do
+ def show(%{private: %{open_api_spex: %{params: %{name: document_name}}}} = conn, _) do
with {:ok, url} <- InstanceDocument.get(document_name),
{:ok, content} <- File.read(InstanceStatic.file_path(url)) do
conn
@@ -27,13 +27,18 @@ def show(conn, %{name: document_name}) do
end
end
- def update(%{body_params: %{file: file}} = conn, %{name: document_name}) do
+ def update(
+ %{
+ private: %{open_api_spex: %{body_params: %{file: file}, params: %{name: document_name}}}
+ } = conn,
+ _
+ ) do
with {:ok, url} <- InstanceDocument.put(document_name, file.path) do
json(conn, %{"url" => url})
end
end
- def delete(conn, %{name: document_name}) do
+ def delete(%{private: %{open_api_spex: %{params: %{name: document_name}}}} = conn, _) do
with :ok <- InstanceDocument.delete(document_name) do
json(conn, %{})
end
diff --git a/lib/pleroma/web/admin_api/controllers/invite_controller.ex b/lib/pleroma/web/admin_api/controllers/invite_controller.ex
index c5d759bb5..7e3020f28 100644
--- a/lib/pleroma/web/admin_api/controllers/invite_controller.ex
+++ b/lib/pleroma/web/admin_api/controllers/invite_controller.ex
@@ -13,7 +13,7 @@ defmodule Pleroma.Web.AdminAPI.InviteController do
require Logger
- plug(Pleroma.Web.ApiSpec.CastAndValidate)
+ plug(Pleroma.Web.ApiSpec.CastAndValidate, replace_params: false)
plug(OAuthScopesPlug, %{scopes: ["admin:read:invites"]} when action == :index)
plug(
@@ -33,14 +33,14 @@ def index(conn, _params) do
end
@doc "Create an account registration invite token"
- def create(%{body_params: params} = conn, _) do
+ def create(%{private: %{open_api_spex: %{body_params: params}}} = conn, _) do
{:ok, invite} = UserInviteToken.create_invite(params)
render(conn, "show.json", invite: invite)
end
@doc "Revokes invite by token"
- def revoke(%{body_params: %{token: token}} = conn, _) do
+ def revoke(%{private: %{open_api_spex: %{body_params: %{token: token}}}} = conn, _) do
with {:ok, invite} <- UserInviteToken.find_by_token(token),
{:ok, updated_invite} = UserInviteToken.update_invite(invite, %{used: true}) do
render(conn, "show.json", invite: updated_invite)
@@ -51,7 +51,13 @@ def revoke(%{body_params: %{token: token}} = conn, _) do
end
@doc "Sends registration invite via email"
- def email(%{assigns: %{user: user}, body_params: %{email: email} = params} = conn, _) do
+ def email(
+ %{
+ assigns: %{user: user},
+ private: %{open_api_spex: %{body_params: %{email: email} = params}}
+ } = conn,
+ _
+ ) do
with {_, false} <- {:registrations_open, Config.get([:instance, :registrations_open])},
{_, true} <- {:invites_enabled, Config.get([:instance, :invites_enabled])},
{:ok, invite_token} <- UserInviteToken.create_invite(),
diff --git a/lib/pleroma/web/admin_api/controllers/media_proxy_cache_controller.ex b/lib/pleroma/web/admin_api/controllers/media_proxy_cache_controller.ex
index 4d53f5451..8b43ea90f 100644
--- a/lib/pleroma/web/admin_api/controllers/media_proxy_cache_controller.ex
+++ b/lib/pleroma/web/admin_api/controllers/media_proxy_cache_controller.ex
@@ -11,7 +11,7 @@ defmodule Pleroma.Web.AdminAPI.MediaProxyCacheController do
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
- plug(Pleroma.Web.ApiSpec.CastAndValidate)
+ plug(Pleroma.Web.ApiSpec.CastAndValidate, replace_params: false)
plug(
OAuthScopesPlug,
@@ -27,7 +27,7 @@ defmodule Pleroma.Web.AdminAPI.MediaProxyCacheController do
defdelegate open_api_operation(action), to: Spec.MediaProxyCacheOperation
- def index(%{assigns: %{user: _}} = conn, params) do
+ def index(%{assigns: %{user: _}, private: %{open_api_spex: %{params: params}}} = conn, _) do
entries = fetch_entries(params)
urls = paginate_entries(entries, params.page, params.page_size)
@@ -59,12 +59,19 @@ defp paginate_entries(entries, page, page_size) do
Enum.slice(entries, offset, page_size)
end
- def delete(%{assigns: %{user: _}, body_params: %{urls: urls}} = conn, _) do
+ def delete(
+ %{assigns: %{user: _}, private: %{open_api_spex: %{body_params: %{urls: urls}}}} = conn,
+ _
+ ) do
MediaProxy.remove_from_banned_urls(urls)
json(conn, %{})
end
- def purge(%{assigns: %{user: _}, body_params: %{urls: urls, ban: ban}} = conn, _) do
+ def purge(
+ %{assigns: %{user: _}, private: %{open_api_spex: %{body_params: %{urls: urls, ban: ban}}}} =
+ conn,
+ _
+ ) do
MediaProxy.Invalidation.purge(urls)
if ban do
diff --git a/lib/pleroma/web/admin_api/controllers/relay_controller.ex b/lib/pleroma/web/admin_api/controllers/relay_controller.ex
index 2e83fe139..1f36d3be5 100644
--- a/lib/pleroma/web/admin_api/controllers/relay_controller.ex
+++ b/lib/pleroma/web/admin_api/controllers/relay_controller.ex
@@ -11,7 +11,7 @@ defmodule Pleroma.Web.AdminAPI.RelayController do
require Logger
- plug(Pleroma.Web.ApiSpec.CastAndValidate)
+ plug(Pleroma.Web.ApiSpec.CastAndValidate, replace_params: false)
plug(
OAuthScopesPlug,
@@ -31,7 +31,13 @@ def index(conn, _params) do
end
end
- def follow(%{assigns: %{user: admin}, body_params: %{relay_url: target}} = conn, _) do
+ def follow(
+ %{
+ assigns: %{user: admin},
+ private: %{open_api_spex: %{body_params: %{relay_url: target}}}
+ } = conn,
+ _
+ ) do
with {:ok, _message} <- Relay.follow(target) do
ModerationLog.insert_log(%{action: "relay_follow", actor: admin, target: target})
@@ -44,7 +50,13 @@ def follow(%{assigns: %{user: admin}, body_params: %{relay_url: target}} = conn,
end
end
- def unfollow(%{assigns: %{user: admin}, body_params: %{relay_url: target} = params} = conn, _) do
+ def unfollow(
+ %{
+ assigns: %{user: admin},
+ private: %{open_api_spex: %{body_params: %{relay_url: target} = params}}
+ } = conn,
+ _
+ ) do
with {:ok, _message} <- Relay.unfollow(target, %{force: params[:force]}) do
ModerationLog.insert_log(%{action: "relay_unfollow", actor: admin, target: target})
diff --git a/lib/pleroma/web/admin_api/controllers/report_controller.ex b/lib/pleroma/web/admin_api/controllers/report_controller.ex
index 15cbbcc3e..89d8cc820 100644
--- a/lib/pleroma/web/admin_api/controllers/report_controller.ex
+++ b/lib/pleroma/web/admin_api/controllers/report_controller.ex
@@ -18,7 +18,7 @@ defmodule Pleroma.Web.AdminAPI.ReportController do
require Logger
- plug(Pleroma.Web.ApiSpec.CastAndValidate)
+ plug(Pleroma.Web.ApiSpec.CastAndValidate, replace_params: false)
plug(OAuthScopesPlug, %{scopes: ["admin:read:reports"]} when action in [:index, :show])
plug(
@@ -31,13 +31,13 @@ defmodule Pleroma.Web.AdminAPI.ReportController do
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.Admin.ReportOperation
- def index(conn, params) do
+ def index(%{private: %{open_api_spex: %{params: params}}} = conn, _) do
reports = Utils.get_reports(params, params.page, params.page_size)
render(conn, "index.json", reports: reports)
end
- def show(conn, %{id: id}) do
+ def show(%{private: %{open_api_spex: %{params: %{id: id}}}} = conn, _) do
with %Activity{} = report <- Activity.get_report(id) do
render(conn, "show.json", Report.extract_report_info(report))
else
@@ -45,7 +45,13 @@ def show(conn, %{id: id}) do
end
end
- def update(%{assigns: %{user: admin}, body_params: %{reports: reports}} = conn, _) do
+ def update(
+ %{
+ assigns: %{user: admin},
+ private: %{open_api_spex: %{body_params: %{reports: reports}}}
+ } = conn,
+ _
+ ) do
result =
Enum.map(reports, fn report ->
case CommonAPI.update_report_state(report.id, report.state) do
@@ -73,9 +79,13 @@ def update(%{assigns: %{user: admin}, body_params: %{reports: reports}} = conn,
end
end
- def notes_create(%{assigns: %{user: user}, body_params: %{content: content}} = conn, %{
- id: report_id
- }) do
+ def notes_create(
+ %{
+ assigns: %{user: user},
+ private: %{open_api_spex: %{body_params: %{content: content}, params: %{id: report_id}}}
+ } = conn,
+ _
+ ) do
with {:ok, _} <- ReportNote.create(user.id, report_id, content),
report <- Activity.get_by_id_with_user_actor(report_id) do
ModerationLog.insert_log(%{
@@ -92,10 +102,20 @@ def notes_create(%{assigns: %{user: user}, body_params: %{content: content}} = c
end
end
- def notes_delete(%{assigns: %{user: user}} = conn, %{
- id: note_id,
- report_id: report_id
- }) do
+ def notes_delete(
+ %{
+ assigns: %{user: user},
+ private: %{
+ open_api_spex: %{
+ params: %{
+ id: note_id,
+ report_id: report_id
+ }
+ }
+ }
+ } = conn,
+ _
+ ) do
with {:ok, note} <- ReportNote.destroy(note_id),
report <- Activity.get_by_id_with_user_actor(report_id) do
ModerationLog.insert_log(%{
diff --git a/lib/pleroma/web/admin_api/controllers/rule_controller.ex b/lib/pleroma/web/admin_api/controllers/rule_controller.ex
new file mode 100644
index 000000000..43b2f209a
--- /dev/null
+++ b/lib/pleroma/web/admin_api/controllers/rule_controller.ex
@@ -0,0 +1,62 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.AdminAPI.RuleController do
+ use Pleroma.Web, :controller
+
+ alias Pleroma.Repo
+ alias Pleroma.Rule
+ alias Pleroma.Web.Plugs.OAuthScopesPlug
+
+ import Pleroma.Web.ControllerHelper,
+ only: [
+ json_response: 3
+ ]
+
+ plug(Pleroma.Web.ApiSpec.CastAndValidate)
+
+ plug(
+ OAuthScopesPlug,
+ %{scopes: ["admin:write"]}
+ when action in [:create, :update, :delete]
+ )
+
+ plug(OAuthScopesPlug, %{scopes: ["admin:read"]} when action == :index)
+
+ action_fallback(AdminAPI.FallbackController)
+
+ defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.Admin.RuleOperation
+
+ def index(conn, _) do
+ rules =
+ Rule.query()
+ |> Repo.all()
+
+ render(conn, "index.json", rules: rules)
+ end
+
+ def create(%{body_params: params} = conn, _) do
+ rule =
+ params
+ |> Rule.create()
+
+ render(conn, "show.json", rule: rule)
+ end
+
+ def update(%{body_params: params} = conn, %{id: id}) do
+ rule =
+ params
+ |> Rule.update(id)
+
+ render(conn, "show.json", rule: rule)
+ end
+
+ def delete(conn, %{id: id}) do
+ with {:ok, _} <- Rule.delete(id) do
+ json(conn, %{})
+ else
+ _ -> json_response(conn, :bad_request, "")
+ end
+ end
+end
diff --git a/lib/pleroma/web/admin_api/controllers/status_controller.ex b/lib/pleroma/web/admin_api/controllers/status_controller.ex
index c9a4bfde9..9a3d49b57 100644
--- a/lib/pleroma/web/admin_api/controllers/status_controller.ex
+++ b/lib/pleroma/web/admin_api/controllers/status_controller.ex
@@ -65,12 +65,6 @@ def update(%{assigns: %{user: admin}, body_params: params} = conn, %{id: id}) do
def delete(%{assigns: %{user: user}} = conn, %{id: id}) do
with {:ok, %Activity{}} <- CommonAPI.delete(id, user) do
- ModerationLog.insert_log(%{
- action: "status_delete",
- actor: user,
- subject_id: id
- })
-
json(conn, %{})
end
end
diff --git a/lib/pleroma/web/admin_api/controllers/user_controller.ex b/lib/pleroma/web/admin_api/controllers/user_controller.ex
index 7b4ee46a4..9ac275396 100644
--- a/lib/pleroma/web/admin_api/controllers/user_controller.ex
+++ b/lib/pleroma/web/admin_api/controllers/user_controller.ex
@@ -18,7 +18,7 @@ defmodule Pleroma.Web.AdminAPI.UserController do
@users_page_size 50
- plug(Pleroma.Web.ApiSpec.CastAndValidate)
+ plug(Pleroma.Web.ApiSpec.CastAndValidate, replace_params: false)
plug(
OAuthScopesPlug,
@@ -51,13 +51,22 @@ defmodule Pleroma.Web.AdminAPI.UserController do
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.Admin.UserOperation
- def delete(conn, %{nickname: nickname}) do
+ def delete(%{private: %{open_api_spex: %{params: %{nickname: nickname}}}} = conn, _) do
conn
- |> Map.put(:body_params, %{nicknames: [nickname]})
- |> delete(%{})
+ |> do_deletes([nickname])
end
- def delete(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}} = conn, _) do
+ def delete(
+ %{
+ private: %{open_api_spex: %{body_params: %{nicknames: nicknames}}}
+ } = conn,
+ _
+ ) do
+ conn
+ |> do_deletes(nicknames)
+ end
+
+ defp do_deletes(%{assigns: %{user: admin}} = conn, nicknames) when is_list(nicknames) do
users = Enum.map(nicknames, &User.get_cached_by_nickname/1)
Enum.each(users, fn user ->
@@ -77,9 +86,13 @@ def delete(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}} = co
def follow(
%{
assigns: %{user: admin},
- body_params: %{
- follower: follower_nick,
- followed: followed_nick
+ private: %{
+ open_api_spex: %{
+ body_params: %{
+ follower: follower_nick,
+ followed: followed_nick
+ }
+ }
}
} = conn,
_
@@ -102,9 +115,13 @@ def follow(
def unfollow(
%{
assigns: %{user: admin},
- body_params: %{
- follower: follower_nick,
- followed: followed_nick
+ private: %{
+ open_api_spex: %{
+ body_params: %{
+ follower: follower_nick,
+ followed: followed_nick
+ }
+ }
}
} = conn,
_
@@ -124,7 +141,13 @@ def unfollow(
json(conn, "ok")
end
- def create(%{assigns: %{user: admin}, body_params: %{users: users}} = conn, _) do
+ def create(
+ %{
+ assigns: %{user: admin},
+ private: %{open_api_spex: %{body_params: %{users: users}}}
+ } = conn,
+ _
+ ) do
changesets =
users
|> Enum.map(fn %{nickname: nickname, email: email, password: password} ->
@@ -178,7 +201,13 @@ def create(%{assigns: %{user: admin}, body_params: %{users: users}} = conn, _) d
end
end
- def show(%{assigns: %{user: admin}} = conn, %{nickname: nickname}) do
+ def show(
+ %{
+ assigns: %{user: admin},
+ private: %{open_api_spex: %{params: %{nickname: nickname}}}
+ } = conn,
+ _
+ ) do
with %User{} = user <- User.get_cached_by_nickname_or_id(nickname, for: admin) do
render(conn, "show.json", %{user: user})
else
@@ -186,7 +215,11 @@ def show(%{assigns: %{user: admin}} = conn, %{nickname: nickname}) do
end
end
- def toggle_activation(%{assigns: %{user: admin}} = conn, %{nickname: nickname}) do
+ def toggle_activation(
+ %{assigns: %{user: admin}, private: %{open_api_spex: %{params: %{nickname: nickname}}}} =
+ conn,
+ _
+ ) do
user = User.get_cached_by_nickname(nickname)
{:ok, updated_user} = User.set_activation(user, !user.is_active)
@@ -202,7 +235,13 @@ def toggle_activation(%{assigns: %{user: admin}} = conn, %{nickname: nickname})
render(conn, "show.json", user: updated_user)
end
- def activate(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}} = conn, _) do
+ def activate(
+ %{
+ assigns: %{user: admin},
+ private: %{open_api_spex: %{body_params: %{nicknames: nicknames}}}
+ } = conn,
+ _
+ ) do
users = Enum.map(nicknames, &User.get_cached_by_nickname/1)
{:ok, updated_users} = User.set_activation(users, true)
@@ -212,10 +251,16 @@ def activate(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}} =
action: "activate"
})
- render(conn, "index.json", users: Keyword.values(updated_users))
+ render(conn, "index.json", users: updated_users)
end
- def deactivate(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}} = conn, _) do
+ def deactivate(
+ %{
+ assigns: %{user: admin},
+ private: %{open_api_spex: %{body_params: %{nicknames: nicknames}}}
+ } = conn,
+ _
+ ) do
users = Enum.map(nicknames, &User.get_cached_by_nickname/1)
{:ok, updated_users} = User.set_activation(users, false)
@@ -225,10 +270,16 @@ def deactivate(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}}
action: "deactivate"
})
- render(conn, "index.json", users: Keyword.values(updated_users))
+ render(conn, "index.json", users: updated_users)
end
- def approve(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}} = conn, _) do
+ def approve(
+ %{
+ assigns: %{user: admin},
+ private: %{open_api_spex: %{body_params: %{nicknames: nicknames}}}
+ } = conn,
+ _
+ ) do
users = Enum.map(nicknames, &User.get_cached_by_nickname/1)
{:ok, updated_users} = User.approve(users)
@@ -241,7 +292,13 @@ def approve(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}} = c
render(conn, "index.json", users: updated_users)
end
- def suggest(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}} = conn, _) do
+ def suggest(
+ %{
+ assigns: %{user: admin},
+ private: %{open_api_spex: %{body_params: %{nicknames: nicknames}}}
+ } = conn,
+ _
+ ) do
users = Enum.map(nicknames, &User.get_cached_by_nickname/1)
{:ok, updated_users} = User.set_suggestion(users, true)
@@ -254,7 +311,13 @@ def suggest(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}} = c
render(conn, "index.json", users: updated_users)
end
- def unsuggest(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}} = conn, _) do
+ def unsuggest(
+ %{
+ assigns: %{user: admin},
+ private: %{open_api_spex: %{body_params: %{nicknames: nicknames}}}
+ } = conn,
+ _
+ ) do
users = Enum.map(nicknames, &User.get_cached_by_nickname/1)
{:ok, updated_users} = User.set_suggestion(users, false)
@@ -267,7 +330,7 @@ def unsuggest(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}} =
render(conn, "index.json", users: updated_users)
end
- def index(conn, params) do
+ def index(%{private: %{open_api_spex: %{params: params}}} = conn, _) do
{page, page_size} = page_params(params)
filters = maybe_parse_filters(params[:filters])
diff --git a/lib/pleroma/web/admin_api/report.ex b/lib/pleroma/web/admin_api/report.ex
index 8d1abfa56..fa89e3405 100644
--- a/lib/pleroma/web/admin_api/report.ex
+++ b/lib/pleroma/web/admin_api/report.ex
@@ -4,6 +4,7 @@
defmodule Pleroma.Web.AdminAPI.Report do
alias Pleroma.Activity
+ alias Pleroma.Object
alias Pleroma.User
def extract_report_info(
@@ -16,10 +17,44 @@ def extract_report_info(
status_ap_ids
|> Enum.reject(&is_nil(&1))
|> Enum.map(fn
- act when is_map(act) -> Activity.get_by_ap_id_with_object(act["id"])
- act when is_binary(act) -> Activity.get_by_ap_id_with_object(act)
+ act when is_map(act) ->
+ Activity.get_create_by_object_ap_id_with_object(act["id"]) ||
+ Activity.get_by_ap_id_with_object(act["id"]) || make_fake_activity(act, user)
+
+ act when is_binary(act) ->
+ Activity.get_create_by_object_ap_id_with_object(act) ||
+ Activity.get_by_ap_id_with_object(act)
end)
%{report: report, user: user, account: account, statuses: statuses}
end
+
+ defp make_fake_activity(act, user) do
+ %Activity{
+ id: "pleroma:fake:#{act["id"]}",
+ data: %{
+ "actor" => user.ap_id,
+ "type" => "Create",
+ "to" => [],
+ "cc" => [],
+ "object" => act["id"],
+ "published" => act["published"],
+ "id" => act["id"],
+ "context" => "pleroma:fake"
+ },
+ recipients: [user.ap_id],
+ object: %Object{
+ data: %{
+ "actor" => user.ap_id,
+ "type" => "Note",
+ "content" => act["content"],
+ "published" => act["published"],
+ "to" => [],
+ "cc" => [],
+ "id" => act["id"],
+ "context" => "pleroma:fake"
+ }
+ }
+ }
+ end
end
diff --git a/lib/pleroma/web/admin_api/views/frontend_view.ex b/lib/pleroma/web/admin_api/views/frontend_view.ex
index 0ca3d67cb..ae4016581 100644
--- a/lib/pleroma/web/admin_api/views/frontend_view.ex
+++ b/lib/pleroma/web/admin_api/views/frontend_view.ex
@@ -15,7 +15,8 @@ def render("show.json", %{frontend: frontend}) do
git: frontend["git"],
build_url: frontend["build_url"],
ref: frontend["ref"],
- installed: frontend["installed"]
+ installed: frontend["installed"],
+ installed_refs: frontend["installed_refs"]
}
end
end
diff --git a/lib/pleroma/web/admin_api/views/report_view.ex b/lib/pleroma/web/admin_api/views/report_view.ex
index b761dbb22..b4b0be267 100644
--- a/lib/pleroma/web/admin_api/views/report_view.ex
+++ b/lib/pleroma/web/admin_api/views/report_view.ex
@@ -6,9 +6,11 @@ defmodule Pleroma.Web.AdminAPI.ReportView do
use Pleroma.Web, :view
alias Pleroma.HTML
+ alias Pleroma.Rule
alias Pleroma.User
alias Pleroma.Web.AdminAPI
alias Pleroma.Web.AdminAPI.Report
+ alias Pleroma.Web.AdminAPI.RuleView
alias Pleroma.Web.CommonAPI.Utils
alias Pleroma.Web.MastodonAPI.StatusView
@@ -46,7 +48,8 @@ def render("show.json", %{report: report, user: user, account: account, statuses
as: :activity
}),
state: report.data["state"],
- notes: render(__MODULE__, "index_notes.json", %{notes: report.report_notes})
+ notes: render(__MODULE__, "index_notes.json", %{notes: report.report_notes}),
+ rules: rules(Map.get(report.data, "rules", nil))
}
end
@@ -71,4 +74,16 @@ def render("show_note.json", %{
created_at: Utils.to_masto_date(inserted_at)
}
end
+
+ defp rules(nil) do
+ []
+ end
+
+ defp rules(rule_ids) do
+ rules =
+ rule_ids
+ |> Rule.get()
+
+ render(RuleView, "index.json", rules: rules)
+ end
end
diff --git a/lib/pleroma/web/admin_api/views/rule_view.ex b/lib/pleroma/web/admin_api/views/rule_view.ex
new file mode 100644
index 000000000..606443f05
--- /dev/null
+++ b/lib/pleroma/web/admin_api/views/rule_view.ex
@@ -0,0 +1,22 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.AdminAPI.RuleView do
+ use Pleroma.Web, :view
+
+ require Pleroma.Constants
+
+ def render("index.json", %{rules: rules} = _opts) do
+ render_many(rules, __MODULE__, "show.json")
+ end
+
+ def render("show.json", %{rule: rule} = _opts) do
+ %{
+ id: to_string(rule.id),
+ priority: rule.priority,
+ text: rule.text,
+ hint: rule.hint
+ }
+ end
+end
diff --git a/lib/pleroma/web/api_spec.ex b/lib/pleroma/web/api_spec.ex
index cae4241ff..314782818 100644
--- a/lib/pleroma/web/api_spec.ex
+++ b/lib/pleroma/web/api_spec.ex
@@ -10,6 +10,14 @@ defmodule Pleroma.Web.ApiSpec do
@behaviour OpenApi
+ defp streaming_paths do
+ %{
+ "/api/v1/streaming" => %OpenApiSpex.PathItem{
+ get: Pleroma.Web.ApiSpec.StreamingOperation.streaming_operation()
+ }
+ }
+ end
+
@impl OpenApi
def spec(opts \\ []) do
%OpenApi{
@@ -35,7 +43,7 @@ def spec(opts \\ []) do
- [Mastodon API documentation](https://docs.joinmastodon.org/client/intro/)
- [Differences in Mastodon API responses from vanilla Mastodon](https://docs-develop.pleroma.social/backend/development/API/differences_in_mastoapi_responses/)
- Please report such occurences on our [issue tracker](https://git.pleroma.social/pleroma/pleroma/-/issues). Feel free to submit API questions or proposals there too!
+ Please report such occurrences on our [issue tracker](https://git.pleroma.social/pleroma/pleroma/-/issues). Feel free to submit API questions or proposals there too!
""",
# Strip environment from the version
version: Application.spec(:pleroma, :vsn) |> to_string() |> String.replace(~r/\+.*$/, ""),
@@ -45,7 +53,7 @@ def spec(opts \\ []) do
}
},
# populate the paths from a phoenix router
- paths: OpenApiSpex.Paths.from_router(Router),
+ paths: Map.merge(streaming_paths(), OpenApiSpex.Paths.from_router(Router)),
components: %OpenApiSpex.Components{
parameters: %{
"accountIdOrNickname" =>
@@ -86,16 +94,18 @@ def spec(opts \\ []) do
"tags" => [
"Chat administration",
"Emoji pack administration",
- "Frontend managment",
+ "Frontend management",
"Instance configuration",
"Instance documents",
+ "Instance rule managment",
"Invites",
"MediaProxy cache",
- "OAuth application managment",
+ "OAuth application management",
"Relays",
- "Report managment",
+ "Report management",
"Status administration",
- "User administration"
+ "User administration",
+ "Announcement management"
]
},
%{"name" => "Applications", "tags" => ["Applications", "Push subscriptions"]},
@@ -110,10 +120,12 @@ def spec(opts \\ []) do
"Follow requests",
"Mascot",
"Markers",
- "Notifications"
+ "Notifications",
+ "Filters",
+ "Settings"
]
},
- %{"name" => "Instance", "tags" => ["Custom emojis"]},
+ %{"name" => "Instance", "tags" => ["Custom emojis", "Instance misc"]},
%{"name" => "Messaging", "tags" => ["Chats", "Conversations"]},
%{
"name" => "Statuses",
@@ -125,10 +137,22 @@ def spec(opts \\ []) do
"Retrieve status information",
"Scheduled statuses",
"Search",
- "Status actions"
+ "Status actions",
+ "Media attachments",
+ "Bookmark folders"
]
},
- %{"name" => "Miscellaneous", "tags" => ["Emoji packs", "Reports", "Suggestions"]}
+ %{
+ "name" => "Miscellaneous",
+ "tags" => [
+ "Emoji packs",
+ "Reports",
+ "Suggestions",
+ "Announcements",
+ "Remote interaction",
+ "Others"
+ ]
+ }
]
}
}
diff --git a/lib/pleroma/web/api_spec/cast_and_validate.ex b/lib/pleroma/web/api_spec/cast_and_validate.ex
index add59eb88..f3e8e093e 100644
--- a/lib/pleroma/web/api_spec/cast_and_validate.ex
+++ b/lib/pleroma/web/api_spec/cast_and_validate.ex
@@ -27,10 +27,12 @@ def init(opts) do
@impl Plug
- def call(conn, %{operation_id: operation_id, render_error: render_error}) do
+ def call(conn, %{operation_id: operation_id, render_error: render_error} = opts) do
{spec, operation_lookup} = PutApiSpec.get_spec_and_operation_lookup(conn)
operation = operation_lookup[operation_id]
+ cast_opts = opts |> Map.take([:replace_params]) |> Map.to_list()
+
content_type =
case Conn.get_req_header(conn, "content-type") do
[header_value | _] ->
@@ -44,7 +46,7 @@ def call(conn, %{operation_id: operation_id, render_error: render_error}) do
conn = Conn.put_private(conn, :operation_id, operation_id)
- case cast_and_validate(spec, operation, conn, content_type, strict?()) do
+ case cast_and_validate(spec, operation, conn, content_type, strict?(), cast_opts) do
{:ok, conn} ->
conn
@@ -94,11 +96,11 @@ def call(
def call(conn, opts), do: OpenApiSpex.Plug.CastAndValidate.call(conn, opts)
- defp cast_and_validate(spec, operation, conn, content_type, true = _strict) do
- OpenApiSpex.cast_and_validate(spec, operation, conn, content_type)
+ defp cast_and_validate(spec, operation, conn, content_type, true = _strict, cast_opts) do
+ OpenApiSpex.cast_and_validate(spec, operation, conn, content_type, cast_opts)
end
- defp cast_and_validate(spec, operation, conn, content_type, false = _strict) do
+ defp cast_and_validate(spec, operation, conn, content_type, false = _strict, cast_opts) do
case OpenApiSpex.cast_and_validate(spec, operation, conn, content_type) do
{:ok, conn} ->
{:ok, conn}
@@ -123,7 +125,7 @@ defp cast_and_validate(spec, operation, conn, content_type, false = _strict) do
end)
conn = %Conn{conn | query_params: query_params}
- OpenApiSpex.cast_and_validate(spec, operation, conn, content_type)
+ OpenApiSpex.cast_and_validate(spec, operation, conn, content_type, cast_opts)
end
end
diff --git a/lib/pleroma/web/api_spec/helpers.ex b/lib/pleroma/web/api_spec/helpers.ex
index f20a9163d..7257253ba 100644
--- a/lib/pleroma/web/api_spec/helpers.ex
+++ b/lib/pleroma/web/api_spec/helpers.ex
@@ -62,7 +62,7 @@ def with_relationships_param do
Operation.parameter(
:with_relationships,
:query,
- BooleanLike,
+ BooleanLike.schema(),
"Embed relationships into accounts. **If this parameter is not set account's `pleroma.relationship` is going to be `null`.**"
)
end
diff --git a/lib/pleroma/web/api_spec/operations/account_operation.ex b/lib/pleroma/web/api_spec/operations/account_operation.ex
index a64762285..85f02166f 100644
--- a/lib/pleroma/web/api_spec/operations/account_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/account_operation.ex
@@ -11,6 +11,7 @@ defmodule Pleroma.Web.ApiSpec.AccountOperation do
alias Pleroma.Web.ApiSpec.Schemas.ActorType
alias Pleroma.Web.ApiSpec.Schemas.ApiError
alias Pleroma.Web.ApiSpec.Schemas.BooleanLike
+ alias Pleroma.Web.ApiSpec.Schemas.FlakeID
alias Pleroma.Web.ApiSpec.Schemas.List
alias Pleroma.Web.ApiSpec.Schemas.Status
alias Pleroma.Web.ApiSpec.Schemas.VisibilityScope
@@ -64,7 +65,8 @@ def update_credentials_operation do
requestBody: request_body("Parameters", update_credentials_request(), required: true),
responses: %{
200 => Operation.response("Account", "application/json", Account),
- 403 => Operation.response("Error", "application/json", ApiError)
+ 403 => Operation.response("Error", "application/json", ApiError),
+ 413 => Operation.response("Error", "application/json", ApiError)
}
}
end
@@ -121,22 +123,27 @@ def statuses_operation do
parameters:
[
%Reference{"$ref": "#/components/parameters/accountIdOrNickname"},
- Operation.parameter(:pinned, :query, BooleanLike, "Include only pinned statuses"),
+ Operation.parameter(
+ :pinned,
+ :query,
+ BooleanLike.schema(),
+ "Include only pinned statuses"
+ ),
Operation.parameter(:tagged, :query, :string, "With tag"),
Operation.parameter(
:only_media,
:query,
- BooleanLike,
+ BooleanLike.schema(),
"Include only statuses with media attached"
),
Operation.parameter(
:with_muted,
:query,
- BooleanLike,
+ BooleanLike.schema(),
"Include statuses from muted accounts."
),
- Operation.parameter(:exclude_reblogs, :query, BooleanLike, "Exclude reblogs"),
- Operation.parameter(:exclude_replies, :query, BooleanLike, "Exclude replies"),
+ Operation.parameter(:exclude_reblogs, :query, BooleanLike.schema(), "Exclude reblogs"),
+ Operation.parameter(:exclude_replies, :query, BooleanLike.schema(), "Exclude replies"),
Operation.parameter(
:exclude_visibilities,
:query,
@@ -146,7 +153,7 @@ def statuses_operation do
Operation.parameter(
:with_muted,
:query,
- BooleanLike,
+ BooleanLike.schema(),
"Include reactions from muted accounts."
)
] ++ pagination_params(),
@@ -223,12 +230,12 @@ def follow_operation do
type: :object,
properties: %{
reblogs: %Schema{
- type: :boolean,
+ allOf: [BooleanLike],
description: "Receive this account's reblogs in home timeline? Defaults to true.",
default: true
},
notify: %Schema{
- type: :boolean,
+ allOf: [BooleanLike],
description:
"Receive notifications for all statuses posted by the account? Defaults to false.",
default: false
@@ -278,11 +285,17 @@ def mute_operation do
%Schema{allOf: [BooleanLike], default: true},
"Mute notifications in addition to statuses? Defaults to `true`."
),
+ Operation.parameter(
+ :duration,
+ :query,
+ %Schema{type: :integer},
+ "Expire the mute in `duration` seconds. Default 0 for infinity"
+ ),
Operation.parameter(
:expires_in,
:query,
%Schema{type: :integer, default: 0},
- "Expire the mute in `expires_in` seconds. Default 0 for infinity"
+ "Deprecated, use `duration` instead"
)
],
responses: %{
@@ -340,7 +353,7 @@ def endorse_operation do
summary: "Endorse",
operationId: "AccountController.endorse",
security: [%{"oAuth" => ["follow", "write:accounts"]}],
- description: "Addds the given account to endorsed accounts list.",
+ description: "Adds the given account to endorsed accounts list.",
parameters: [%Reference{"$ref": "#/components/parameters/accountIdOrNickname"}],
responses: %{
200 => Operation.response("Relationship", "application/json", AccountRelationship),
@@ -370,6 +383,22 @@ def unendorse_operation do
}
end
+ def remove_from_followers_operation do
+ %Operation{
+ tags: ["Account actions"],
+ summary: "Remove from followers",
+ operationId: "AccountController.remove_from_followers",
+ security: [%{"oAuth" => ["follow", "write:follows"]}],
+ description: "Remove the given account from followers",
+ parameters: [%Reference{"$ref": "#/components/parameters/accountIdOrNickname"}],
+ responses: %{
+ 200 => Operation.response("Relationship", "application/json", AccountRelationship),
+ 400 => Operation.response("Error", "application/json", ApiError),
+ 404 => Operation.response("Error", "application/json", ApiError)
+ }
+ }
+ end
+
def note_operation do
%Operation{
tags: ["Account actions"],
@@ -429,7 +458,7 @@ def blocks_operation do
operationId: "AccountController.blocks",
description: "View your blocks. See also accounts/:id/{block,unblock}",
security: [%{"oAuth" => ["read:blocks"]}],
- parameters: pagination_params(),
+ parameters: [with_relationships_param() | pagination_params()],
responses: %{
200 => Operation.response("Accounts", "application/json", array_of_accounts())
}
@@ -438,7 +467,7 @@ def blocks_operation do
def lookup_operation do
%Operation{
- tags: ["Account lookup"],
+ tags: ["Retrieve account information"],
summary: "Find a user by nickname",
operationId: "AccountController.lookup",
parameters: [
@@ -485,6 +514,48 @@ def identity_proofs_operation do
}
end
+ def familiar_followers_operation do
+ %Operation{
+ tags: ["Retrieve account information"],
+ summary: "Followers that you follow",
+ operationId: "AccountController.familiar_followers",
+ description:
+ "Obtain a list of all accounts that follow a given account, filtered for accounts you follow.",
+ security: [%{"oAuth" => ["read:follows"]}],
+ parameters: [
+ Operation.parameter(
+ :id,
+ :query,
+ %Schema{
+ oneOf: [%Schema{type: :array, items: %Schema{type: :string}}, %Schema{type: :string}]
+ },
+ "Account IDs",
+ example: "123"
+ )
+ ],
+ responses: %{
+ 200 =>
+ Operation.response("Accounts", "application/json", %Schema{
+ title: "ArrayOfAccounts",
+ type: :array,
+ items: %Schema{
+ title: "Account",
+ type: :object,
+ properties: %{
+ id: FlakeID,
+ accounts: %Schema{
+ title: "ArrayOfAccounts",
+ type: :array,
+ items: Account,
+ example: [Account.schema().example]
+ }
+ }
+ }
+ })
+ }
+ }
+ end
+
defp create_request do
%Schema{
title: "AccountCreateRequest",
@@ -545,10 +616,18 @@ defp create_request do
description: "Invite token required when the registrations aren't public"
},
birthday: %Schema{
- type: :string,
nullable: true,
description: "User's birthday",
- format: :date
+ anyOf: [
+ %Schema{
+ type: :string,
+ format: :date
+ },
+ %Schema{
+ type: :string,
+ maxLength: 0
+ }
+ ]
},
language: %Schema{
type: :string,
@@ -733,10 +812,18 @@ defp update_credentials_request do
},
actor_type: ActorType,
birthday: %Schema{
- type: :string,
nullable: true,
description: "User's birthday",
- format: :date
+ anyOf: [
+ %Schema{
+ type: :string,
+ format: :date
+ },
+ %Schema{
+ type: :string,
+ maxLength: 0
+ }
+ ]
},
show_birthday: %Schema{
allOf: [BooleanLike],
@@ -861,10 +948,15 @@ defp mute_request do
description: "Mute notifications in addition to statuses? Defaults to true.",
default: true
},
+ duration: %Schema{
+ type: :integer,
+ nullable: true,
+ description: "Expire the mute in `expires_in` seconds. Default 0 for infinity"
+ },
expires_in: %Schema{
type: :integer,
nullable: true,
- description: "Expire the mute in `expires_in` seconds. Default 0 for infinity",
+ description: "Deprecated, use `duration` instead",
default: 0
}
},
diff --git a/lib/pleroma/web/api_spec/operations/admin/announcement_operation.ex b/lib/pleroma/web/api_spec/operations/admin/announcement_operation.ex
index 58a039e72..49850e5d2 100644
--- a/lib/pleroma/web/api_spec/operations/admin/announcement_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/admin/announcement_operation.ex
@@ -17,7 +17,7 @@ def open_api_operation(action) do
def index_operation do
%Operation{
- tags: ["Announcement managment"],
+ tags: ["Announcement management"],
summary: "Retrieve a list of announcements",
operationId: "AdminAPI.AnnouncementController.index",
security: [%{"oAuth" => ["admin:read"]}],
@@ -46,7 +46,7 @@ def index_operation do
def show_operation do
%Operation{
- tags: ["Announcement managment"],
+ tags: ["Announcement management"],
summary: "Display one announcement",
operationId: "AdminAPI.AnnouncementController.show",
security: [%{"oAuth" => ["admin:read"]}],
@@ -69,7 +69,7 @@ def show_operation do
def delete_operation do
%Operation{
- tags: ["Announcement managment"],
+ tags: ["Announcement management"],
summary: "Delete one announcement",
operationId: "AdminAPI.AnnouncementController.delete",
security: [%{"oAuth" => ["admin:write"]}],
@@ -92,7 +92,7 @@ def delete_operation do
def create_operation do
%Operation{
- tags: ["Announcement managment"],
+ tags: ["Announcement management"],
summary: "Create one announcement",
operationId: "AdminAPI.AnnouncementController.create",
security: [%{"oAuth" => ["admin:write"]}],
@@ -107,7 +107,7 @@ def create_operation do
def change_operation do
%Operation{
- tags: ["Announcement managment"],
+ tags: ["Announcement management"],
summary: "Change one announcement",
operationId: "AdminAPI.AnnouncementController.change",
security: [%{"oAuth" => ["admin:write"]}],
diff --git a/lib/pleroma/web/api_spec/operations/admin/frontend_operation.ex b/lib/pleroma/web/api_spec/operations/admin/frontend_operation.ex
index 4bfe5ac5a..e17881b49 100644
--- a/lib/pleroma/web/api_spec/operations/admin/frontend_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/admin/frontend_operation.ex
@@ -16,7 +16,7 @@ def open_api_operation(action) do
def index_operation do
%Operation{
- tags: ["Frontend managment"],
+ tags: ["Frontend management"],
summary: "Retrieve a list of available frontends",
operationId: "AdminAPI.FrontendController.index",
security: [%{"oAuth" => ["admin:read"]}],
@@ -29,7 +29,7 @@ def index_operation do
def install_operation do
%Operation{
- tags: ["Frontend managment"],
+ tags: ["Frontend management"],
summary: "Install a frontend",
operationId: "AdminAPI.FrontendController.install",
security: [%{"oAuth" => ["admin:read"]}],
@@ -51,8 +51,9 @@ defp list_of_frontends do
name: %Schema{type: :string},
git: %Schema{type: :string, format: :uri, nullable: true},
build_url: %Schema{type: :string, format: :uri, nullable: true},
- ref: %Schema{type: :string},
- installed: %Schema{type: :boolean}
+ ref: %Schema{type: :string, nullable: true},
+ installed: %Schema{type: :boolean},
+ installed_refs: %Schema{type: :array, items: %Schema{type: :string}}
}
}
}
diff --git a/lib/pleroma/web/api_spec/operations/admin/o_auth_app_operation.ex b/lib/pleroma/web/api_spec/operations/admin/o_auth_app_operation.ex
index 1a05aff6a..2b2496c26 100644
--- a/lib/pleroma/web/api_spec/operations/admin/o_auth_app_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/admin/o_auth_app_operation.ex
@@ -17,7 +17,7 @@ def open_api_operation(action) do
def index_operation do
%Operation{
summary: "Retrieve a list of OAuth applications",
- tags: ["OAuth application managment"],
+ tags: ["OAuth application management"],
operationId: "AdminAPI.OAuthAppController.index",
security: [%{"oAuth" => ["admin:write"]}],
parameters: [
@@ -69,7 +69,7 @@ def index_operation do
def create_operation do
%Operation{
- tags: ["OAuth application managment"],
+ tags: ["OAuth application management"],
summary: "Create an OAuth application",
operationId: "AdminAPI.OAuthAppController.create",
requestBody: request_body("Parameters", create_request()),
@@ -84,7 +84,7 @@ def create_operation do
def update_operation do
%Operation{
- tags: ["OAuth application managment"],
+ tags: ["OAuth application management"],
summary: "Update OAuth application",
operationId: "AdminAPI.OAuthAppController.update",
parameters: [id_param() | admin_api_params()],
@@ -102,7 +102,7 @@ def update_operation do
def delete_operation do
%Operation{
- tags: ["OAuth application managment"],
+ tags: ["OAuth application management"],
summary: "Delete OAuth application",
operationId: "AdminAPI.OAuthAppController.delete",
parameters: [id_param() | admin_api_params()],
diff --git a/lib/pleroma/web/api_spec/operations/admin/report_operation.ex b/lib/pleroma/web/api_spec/operations/admin/report_operation.ex
index 312e091a5..25a604beb 100644
--- a/lib/pleroma/web/api_spec/operations/admin/report_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/admin/report_operation.ex
@@ -19,7 +19,7 @@ def open_api_operation(action) do
def index_operation do
%Operation{
- tags: ["Report managment"],
+ tags: ["Report management"],
summary: "Retrieve a list of reports",
operationId: "AdminAPI.ReportController.index",
security: [%{"oAuth" => ["admin:read:reports"]}],
@@ -30,6 +30,12 @@ def index_operation do
report_state(),
"Filter by report state"
),
+ Operation.parameter(
+ :rule_id,
+ :query,
+ %Schema{type: :string},
+ "Filter by selected rule id"
+ ),
Operation.parameter(
:limit,
:query,
@@ -69,7 +75,7 @@ def index_operation do
def show_operation do
%Operation{
- tags: ["Report managment"],
+ tags: ["Report management"],
summary: "Retrieve a report",
operationId: "AdminAPI.ReportController.show",
parameters: [id_param() | admin_api_params()],
@@ -83,7 +89,7 @@ def show_operation do
def update_operation do
%Operation{
- tags: ["Report managment"],
+ tags: ["Report management"],
summary: "Change state of specified reports",
operationId: "AdminAPI.ReportController.update",
security: [%{"oAuth" => ["admin:write:reports"]}],
@@ -99,7 +105,7 @@ def update_operation do
def notes_create_operation do
%Operation{
- tags: ["Report managment"],
+ tags: ["Report management"],
summary: "Add a note to the report",
operationId: "AdminAPI.ReportController.notes_create",
parameters: [id_param() | admin_api_params()],
@@ -120,7 +126,7 @@ def notes_create_operation do
def notes_delete_operation do
%Operation{
- tags: ["Report managment"],
+ tags: ["Report management"],
summary: "Delete note attached to the report",
operationId: "AdminAPI.ReportController.notes_delete",
parameters: [
@@ -141,7 +147,7 @@ def report_state do
end
def id_param do
- Operation.parameter(:id, :path, FlakeID, "Report ID",
+ Operation.parameter(:id, :path, FlakeID.schema(), "Report ID",
example: "9umDrYheeY451cQnEe",
required: true
)
@@ -169,6 +175,17 @@ defp report do
inserted_at: %Schema{type: :string, format: :"date-time"}
}
}
+ },
+ rules: %Schema{
+ type: :array,
+ items: %Schema{
+ type: :object,
+ properties: %{
+ id: %Schema{type: :string},
+ text: %Schema{type: :string},
+ hint: %Schema{type: :string, nullable: true}
+ }
+ }
}
}
}
diff --git a/lib/pleroma/web/api_spec/operations/admin/rule_operation.ex b/lib/pleroma/web/api_spec/operations/admin/rule_operation.ex
new file mode 100644
index 000000000..c3a3ecc7c
--- /dev/null
+++ b/lib/pleroma/web/api_spec/operations/admin/rule_operation.ex
@@ -0,0 +1,115 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.ApiSpec.Admin.RuleOperation do
+ alias OpenApiSpex.Operation
+ alias OpenApiSpex.Schema
+ alias Pleroma.Web.ApiSpec.Schemas.ApiError
+
+ import Pleroma.Web.ApiSpec.Helpers
+
+ def open_api_operation(action) do
+ operation = String.to_existing_atom("#{action}_operation")
+ apply(__MODULE__, operation, [])
+ end
+
+ def index_operation do
+ %Operation{
+ tags: ["Instance rule managment"],
+ summary: "Retrieve list of instance rules",
+ operationId: "AdminAPI.RuleController.index",
+ security: [%{"oAuth" => ["admin:read"]}],
+ responses: %{
+ 200 =>
+ Operation.response("Response", "application/json", %Schema{
+ type: :array,
+ items: rule()
+ }),
+ 403 => Operation.response("Forbidden", "application/json", ApiError)
+ }
+ }
+ end
+
+ def create_operation do
+ %Operation{
+ tags: ["Instance rule managment"],
+ summary: "Create new rule",
+ operationId: "AdminAPI.RuleController.create",
+ security: [%{"oAuth" => ["admin:write"]}],
+ parameters: admin_api_params(),
+ requestBody: request_body("Parameters", create_request(), required: true),
+ responses: %{
+ 200 => Operation.response("Response", "application/json", rule()),
+ 400 => Operation.response("Bad Request", "application/json", ApiError),
+ 403 => Operation.response("Forbidden", "application/json", ApiError)
+ }
+ }
+ end
+
+ def update_operation do
+ %Operation{
+ tags: ["Instance rule managment"],
+ summary: "Modify existing rule",
+ operationId: "AdminAPI.RuleController.update",
+ security: [%{"oAuth" => ["admin:write"]}],
+ parameters: [Operation.parameter(:id, :path, :string, "Rule ID")],
+ requestBody: request_body("Parameters", update_request(), required: true),
+ responses: %{
+ 200 => Operation.response("Response", "application/json", rule()),
+ 400 => Operation.response("Bad Request", "application/json", ApiError),
+ 403 => Operation.response("Forbidden", "application/json", ApiError)
+ }
+ }
+ end
+
+ def delete_operation do
+ %Operation{
+ tags: ["Instance rule managment"],
+ summary: "Delete rule",
+ operationId: "AdminAPI.RuleController.delete",
+ parameters: [Operation.parameter(:id, :path, :string, "Rule ID")],
+ security: [%{"oAuth" => ["admin:write"]}],
+ responses: %{
+ 200 => empty_object_response(),
+ 404 => Operation.response("Not Found", "application/json", ApiError),
+ 403 => Operation.response("Forbidden", "application/json", ApiError)
+ }
+ }
+ end
+
+ defp create_request do
+ %Schema{
+ type: :object,
+ required: [:text],
+ properties: %{
+ priority: %Schema{type: :integer},
+ text: %Schema{type: :string},
+ hint: %Schema{type: :string}
+ }
+ }
+ end
+
+ defp update_request do
+ %Schema{
+ type: :object,
+ properties: %{
+ priority: %Schema{type: :integer},
+ text: %Schema{type: :string},
+ hint: %Schema{type: :string}
+ }
+ }
+ end
+
+ defp rule do
+ %Schema{
+ type: :object,
+ properties: %{
+ id: %Schema{type: :string},
+ priority: %Schema{type: :integer},
+ text: %Schema{type: :string},
+ hint: %Schema{type: :string, nullable: true}
+ }
+ }
+ end
+end
diff --git a/lib/pleroma/web/api_spec/operations/admin/status_operation.ex b/lib/pleroma/web/api_spec/operations/admin/status_operation.ex
index 229912dd7..17383f1d0 100644
--- a/lib/pleroma/web/api_spec/operations/admin/status_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/admin/status_operation.ex
@@ -70,7 +70,7 @@ def index_operation do
def show_operation do
%Operation{
- tags: ["Status adminitration)"],
+ tags: ["Status administration"],
summary: "Get status",
operationId: "AdminAPI.StatusController.show",
parameters: [id_param() | admin_api_params()],
@@ -84,7 +84,7 @@ def show_operation do
def update_operation do
%Operation{
- tags: ["Status adminitration)"],
+ tags: ["Status administration"],
summary: "Change the scope of a status",
operationId: "AdminAPI.StatusController.update",
parameters: [id_param() | admin_api_params()],
@@ -99,7 +99,7 @@ def update_operation do
def delete_operation do
%Operation{
- tags: ["Status adminitration)"],
+ tags: ["Status administration"],
summary: "Delete status",
operationId: "AdminAPI.StatusController.delete",
parameters: [id_param() | admin_api_params()],
@@ -143,7 +143,7 @@ def admin_account do
}
},
tags: %Schema{type: :string},
- is_confirmed: %Schema{type: :string}
+ is_confirmed: %Schema{type: :boolean}
}
}
end
diff --git a/lib/pleroma/web/api_spec/operations/announcement_operation.ex b/lib/pleroma/web/api_spec/operations/announcement_operation.ex
index 71be0002a..6f7031962 100644
--- a/lib/pleroma/web/api_spec/operations/announcement_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/announcement_operation.ex
@@ -15,7 +15,7 @@ def open_api_operation(action) do
def index_operation do
%Operation{
- tags: ["Announcement"],
+ tags: ["Announcements"],
summary: "Retrieve a list of announcements",
operationId: "MastodonAPI.AnnouncementController.index",
security: [%{"oAuth" => []}],
@@ -28,7 +28,7 @@ def index_operation do
def mark_read_operation do
%Operation{
- tags: ["Announcement"],
+ tags: ["Announcements"],
summary: "Mark one announcement as read",
operationId: "MastodonAPI.AnnouncementController.mark_read",
security: [%{"oAuth" => ["write:accounts"]}],
diff --git a/lib/pleroma/web/api_spec/operations/chat_operation.ex b/lib/pleroma/web/api_spec/operations/chat_operation.ex
index cf6a055fc..f56e57a41 100644
--- a/lib/pleroma/web/api_spec/operations/chat_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/chat_operation.ex
@@ -137,7 +137,12 @@ def index_operation do
"Deprecated due to no support for pagination. Using [/api/v2/pleroma/chats](#operation/ChatController.index2) instead is recommended.",
operationId: "ChatController.index",
parameters: [
- Operation.parameter(:with_muted, :query, BooleanLike, "Include chats from muted users")
+ Operation.parameter(
+ :with_muted,
+ :query,
+ BooleanLike.schema(),
+ "Include chats from muted users"
+ )
],
responses: %{
200 => Operation.response("The chats of the user", "application/json", chats_response())
@@ -156,7 +161,12 @@ def index2_operation do
summary: "Retrieve list of chats",
operationId: "ChatController.index2",
parameters: [
- Operation.parameter(:with_muted, :query, BooleanLike, "Include chats from muted users")
+ Operation.parameter(
+ :with_muted,
+ :query,
+ BooleanLike.schema(),
+ "Include chats from muted users"
+ )
| pagination_params()
],
responses: %{
diff --git a/lib/pleroma/web/api_spec/operations/directory_operation.ex b/lib/pleroma/web/api_spec/operations/directory_operation.ex
index 55752fa62..2eca17664 100644
--- a/lib/pleroma/web/api_spec/operations/directory_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/directory_operation.ex
@@ -17,7 +17,7 @@ def open_api_operation(action) do
def index_operation do
%Operation{
- tags: ["Directory"],
+ tags: ["Others"],
summary: "Profile directory",
operationId: "DirectoryController.index",
parameters:
@@ -29,7 +29,7 @@ def index_operation do
"Order by recent activity or account creation",
required: nil
),
- Operation.parameter(:local, :query, BooleanLike, "Include local users only")
+ Operation.parameter(:local, :query, BooleanLike.schema(), "Include local users only")
] ++ pagination_params(),
responses: %{
200 =>
diff --git a/lib/pleroma/web/api_spec/operations/emoji_reaction_operation.ex b/lib/pleroma/web/api_spec/operations/emoji_reaction_operation.ex
index 74341d64f..8d6be89a7 100644
--- a/lib/pleroma/web/api_spec/operations/emoji_reaction_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/emoji_reaction_operation.ex
@@ -21,7 +21,7 @@ def index_operation do
summary:
"Get an object of emoji to account mappings with accounts that reacted to the post",
parameters: [
- Operation.parameter(:id, :path, FlakeID, "Status ID", required: true),
+ Operation.parameter(:id, :path, FlakeID.schema(), "Status ID", required: true),
Operation.parameter(:emoji, :path, :string, "Filter by a single unicode emoji",
required: nil
),
@@ -45,7 +45,7 @@ def create_operation do
tags: ["Emoji reactions"],
summary: "React to a post with a unicode emoji",
parameters: [
- Operation.parameter(:id, :path, FlakeID, "Status ID", required: true),
+ Operation.parameter(:id, :path, FlakeID.schema(), "Status ID", required: true),
Operation.parameter(:emoji, :path, :string, "A single character unicode emoji",
required: true
)
@@ -64,7 +64,7 @@ def delete_operation do
tags: ["Emoji reactions"],
summary: "Remove a reaction to a post with a unicode emoji",
parameters: [
- Operation.parameter(:id, :path, FlakeID, "Status ID", required: true),
+ Operation.parameter(:id, :path, FlakeID.schema(), "Status ID", required: true),
Operation.parameter(:emoji, :path, :string, "A single character unicode emoji",
required: true
)
diff --git a/lib/pleroma/web/api_spec/operations/instance_operation.ex b/lib/pleroma/web/api_spec/operations/instance_operation.ex
index 3c4b504fe..7d7a5ecc1 100644
--- a/lib/pleroma/web/api_spec/operations/instance_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/instance_operation.ex
@@ -13,7 +13,7 @@ def open_api_operation(action) do
def show_operation do
%Operation{
- tags: ["Instance"],
+ tags: ["Instance misc"],
summary: "Retrieve instance information",
description: "Information about the server",
operationId: "InstanceController.show",
@@ -23,9 +23,21 @@ def show_operation do
}
end
+ def show2_operation do
+ %Operation{
+ tags: ["Instance misc"],
+ summary: "Retrieve instance information",
+ description: "Information about the server",
+ operationId: "InstanceController.show2",
+ responses: %{
+ 200 => Operation.response("Instance", "application/json", instance2())
+ }
+ }
+ end
+
def peers_operation do
%Operation{
- tags: ["Instance"],
+ tags: ["Instance misc"],
summary: "Retrieve list of known instances",
operationId: "InstanceController.peers",
responses: %{
@@ -34,10 +46,30 @@ def peers_operation do
}
end
+ def rules_operation do
+ %Operation{
+ tags: ["Instance misc"],
+ summary: "Retrieve list of instance rules",
+ operationId: "InstanceController.rules",
+ responses: %{
+ 200 => Operation.response("Array of domains", "application/json", array_of_rules())
+ }
+ }
+ end
+
defp instance do
%Schema{
type: :object,
properties: %{
+ accounts: %Schema{
+ type: :object,
+ properties: %{
+ max_featured_tags: %Schema{
+ type: :integer,
+ description: "The maximum number of featured tags allowed for each account."
+ }
+ }
+ },
uri: %Schema{type: :string, description: "The domain name of the instance"},
title: %Schema{type: :string, description: "The title of the website"},
description: %Schema{
@@ -89,7 +121,7 @@ defp instance do
languages: %Schema{
type: :array,
items: %Schema{type: :string},
- description: "Primary langauges of the website and its staff"
+ description: "Primary languages of the website and its staff"
},
registrations: %Schema{type: :boolean, description: "Whether registrations are enabled"},
# Extra (not present in Mastodon):
@@ -160,7 +192,186 @@ defp instance do
"urls" => %{
"streaming_api" => "wss://lain.com"
},
- "version" => "2.7.2 (compatible; Pleroma 2.0.50-536-g25eec6d7-develop)"
+ "version" => "2.7.2 (compatible; Pleroma 2.0.50-536-g25eec6d7-develop)",
+ "rules" => array_of_rules()
+ }
+ }
+ end
+
+ defp instance2 do
+ %Schema{
+ type: :object,
+ properties: %{
+ domain: %Schema{type: :string, description: "The domain name of the instance"},
+ title: %Schema{type: :string, description: "The title of the website"},
+ version: %Schema{
+ type: :string,
+ description: "The version of Pleroma installed on the instance"
+ },
+ source_url: %Schema{
+ type: :string,
+ description: "The version of Pleroma installed on the instance"
+ },
+ description: %Schema{
+ type: :string,
+ description: "Admin-defined description of the Pleroma site"
+ },
+ usage: %Schema{
+ type: :object,
+ description: "Instance usage statistics",
+ properties: %{
+ users: %Schema{
+ type: :object,
+ description: "User count statistics",
+ properties: %{
+ active_month: %Schema{
+ type: :integer,
+ description: "Monthly active users"
+ }
+ }
+ }
+ }
+ },
+ email: %Schema{
+ type: :string,
+ description: "An email that may be contacted for any inquiries",
+ format: :email
+ },
+ urls: %Schema{
+ type: :object,
+ description: "URLs of interest for clients apps",
+ properties: %{}
+ },
+ stats: %Schema{
+ type: :object,
+ description: "Statistics about how much information the instance contains",
+ properties: %{
+ user_count: %Schema{
+ type: :integer,
+ description: "Users registered on this instance"
+ },
+ status_count: %Schema{
+ type: :integer,
+ description: "Statuses authored by users on instance"
+ },
+ domain_count: %Schema{
+ type: :integer,
+ description: "Domains federated with this instance"
+ }
+ }
+ },
+ thumbnail: %Schema{
+ type: :object,
+ properties: %{
+ url: %Schema{
+ type: :string,
+ description: "Banner image for the website",
+ nullable: true
+ }
+ }
+ },
+ languages: %Schema{
+ type: :array,
+ items: %Schema{type: :string},
+ description: "Primary languages of the website and its staff"
+ },
+ registrations: %Schema{
+ type: :object,
+ description: "Registrations-related configuration",
+ properties: %{
+ enabled: %Schema{
+ type: :boolean,
+ description: "Whether registrations are enabled"
+ },
+ approval_required: %Schema{
+ type: :boolean,
+ description: "Whether users need to be manually approved by admin"
+ }
+ }
+ },
+ configuration: %Schema{
+ type: :object,
+ description: "Instance configuration",
+ properties: %{
+ accounts: %Schema{
+ type: :object,
+ properties: %{
+ max_featured_tags: %Schema{
+ type: :integer,
+ description: "The maximum number of featured tags allowed for each account."
+ },
+ max_pinned_statuses: %Schema{
+ type: :integer,
+ description: "The maximum number of pinned statuses for each account."
+ }
+ }
+ },
+ urls: %Schema{
+ type: :object,
+ properties: %{
+ streaming: %Schema{
+ type: :string,
+ description: "Websockets address for push streaming"
+ }
+ }
+ },
+ statuses: %Schema{
+ type: :object,
+ description: "A map with poll limits for local statuses",
+ properties: %{
+ characters_reserved_per_url: %Schema{
+ type: :integer,
+ description:
+ "Each URL in a status will be assumed to be exactly this many characters."
+ },
+ max_characters: %Schema{
+ type: :integer,
+ description: "Posts character limit (CW/Subject included in the counter)"
+ },
+ max_media_attachments: %Schema{
+ type: :integer,
+ description: "Media attachment limit"
+ }
+ }
+ },
+ media_attachments: %Schema{
+ type: :object,
+ description: "A map with poll limits for media attachments",
+ properties: %{
+ image_size_limit: %Schema{
+ type: :integer,
+ description: "File size limit of uploaded images"
+ },
+ video_size_limit: %Schema{
+ type: :integer,
+ description: "File size limit of uploaded videos"
+ }
+ }
+ },
+ polls: %Schema{
+ type: :object,
+ description: "A map with poll limits for local polls",
+ properties: %{
+ max_options: %Schema{
+ type: :integer,
+ description: "Maximum number of options."
+ },
+ max_characters_per_option: %Schema{
+ type: :integer,
+ description: "Maximum number of characters per option."
+ },
+ min_expiration: %Schema{
+ type: :integer,
+ description: "Minimum expiration time (in seconds)."
+ },
+ max_expiration: %Schema{
+ type: :integer,
+ description: "Maximum expiration time (in seconds)."
+ }
+ }
+ }
+ }
+ }
}
}
end
@@ -172,4 +383,18 @@ defp array_of_domains do
example: ["pleroma.site", "lain.com", "bikeshed.party"]
}
end
+
+ defp array_of_rules do
+ %Schema{
+ type: :array,
+ items: %Schema{
+ type: :object,
+ properties: %{
+ id: %Schema{type: :string},
+ text: %Schema{type: :string},
+ hint: %Schema{type: :string}
+ }
+ }
+ }
+ end
end
diff --git a/lib/pleroma/web/api_spec/operations/notification_operation.ex b/lib/pleroma/web/api_spec/operations/notification_operation.ex
index 7f2336ff6..a79eb8f74 100644
--- a/lib/pleroma/web/api_spec/operations/notification_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/notification_operation.ex
@@ -51,12 +51,18 @@ def index_operation do
:include_types,
:query,
%Schema{type: :array, items: notification_type()},
+ "Deprecated, use `types` instead"
+ ),
+ Operation.parameter(
+ :types,
+ :query,
+ %Schema{type: :array, items: notification_type()},
"Include the notifications for activities with the given types"
),
Operation.parameter(
:with_muted,
:query,
- BooleanLike,
+ BooleanLike.schema(),
"Include the notifications from muted users"
)
] ++ pagination_params(),
@@ -196,7 +202,8 @@ defp notification_type do
"pleroma:report",
"move",
"follow_request",
- "poll"
+ "poll",
+ "status"
],
description: """
The type of event that resulted in the notification.
@@ -210,6 +217,7 @@ defp notification_type do
- `pleroma:emoji_reaction` - Someone reacted with emoji to your status
- `pleroma:chat_mention` - Someone mentioned you in a chat message
- `pleroma:report` - Someone was reported
+ - `status` - Someone you are subscribed to created a status
"""
}
end
diff --git a/lib/pleroma/web/api_spec/operations/pleroma_account_operation.ex b/lib/pleroma/web/api_spec/operations/pleroma_account_operation.ex
index 5375c5b15..7340653fb 100644
--- a/lib/pleroma/web/api_spec/operations/pleroma_account_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/pleroma_account_operation.ex
@@ -142,7 +142,7 @@ def birthdays_operation do
end
defp id_param do
- Operation.parameter(:id, :path, FlakeID, "Account ID",
+ Operation.parameter(:id, :path, FlakeID.schema(), "Account ID",
example: "9umDrYheeY451cQnEe",
required: true
)
diff --git a/lib/pleroma/web/api_spec/operations/pleroma_backup_operation.ex b/lib/pleroma/web/api_spec/operations/pleroma_backup_operation.ex
index 82ec1e7bb..400f3825d 100644
--- a/lib/pleroma/web/api_spec/operations/pleroma_backup_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/pleroma_backup_operation.ex
@@ -16,7 +16,7 @@ def index_operation do
%Operation{
tags: ["Backups"],
summary: "List backups",
- security: [%{"oAuth" => ["read:account"]}],
+ security: [%{"oAuth" => ["read:backups"]}],
operationId: "PleromaAPI.BackupController.index",
responses: %{
200 =>
@@ -37,7 +37,7 @@ def create_operation do
%Operation{
tags: ["Backups"],
summary: "Create a backup",
- security: [%{"oAuth" => ["read:account"]}],
+ security: [%{"oAuth" => ["read:backups"]}],
operationId: "PleromaAPI.BackupController.create",
responses: %{
200 =>
@@ -64,7 +64,13 @@ defp backup do
content_type: %Schema{type: :string},
file_name: %Schema{type: :string},
file_size: %Schema{type: :integer},
- processed: %Schema{type: :boolean}
+ processed: %Schema{type: :boolean, description: "whether this backup has succeeded"},
+ state: %Schema{
+ type: :string,
+ description: "the state of the backup",
+ enum: ["pending", "running", "complete", "failed"]
+ },
+ processed_number: %Schema{type: :integer, description: "the number of records processed"}
},
example: %{
"content_type" => "application/zip",
@@ -72,7 +78,9 @@ defp backup do
"https://cofe.fe:4000/media/backups/archive-foobar-20200908T164207-Yr7vuT5Wycv-sN3kSN2iJ0k-9pMo60j9qmvRCdDqIew.zip",
"file_size" => 4105,
"inserted_at" => "2020-09-08T16:42:07.000Z",
- "processed" => true
+ "processed" => true,
+ "state" => "complete",
+ "processed_number" => 20
}
}
end
diff --git a/lib/pleroma/web/api_spec/operations/pleroma_bookmark_folder_operation.ex b/lib/pleroma/web/api_spec/operations/pleroma_bookmark_folder_operation.ex
new file mode 100644
index 000000000..eaa683125
--- /dev/null
+++ b/lib/pleroma/web/api_spec/operations/pleroma_bookmark_folder_operation.ex
@@ -0,0 +1,125 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2024 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.ApiSpec.PleromaBookmarkFolderOperation do
+ alias OpenApiSpex.Operation
+ alias OpenApiSpex.Schema
+ alias Pleroma.Web.ApiSpec.Schemas.ApiError
+ alias Pleroma.Web.ApiSpec.Schemas.BookmarkFolder
+ alias Pleroma.Web.ApiSpec.Schemas.FlakeID
+
+ import Pleroma.Web.ApiSpec.Helpers
+
+ @spec open_api_operation(any()) :: any()
+ def open_api_operation(action) do
+ operation = String.to_existing_atom("#{action}_operation")
+ apply(__MODULE__, operation, [])
+ end
+
+ def index_operation do
+ %Operation{
+ tags: ["Bookmark folders"],
+ summary: "All bookmark folders",
+ security: [%{"oAuth" => ["read:bookmarks"]}],
+ operationId: "PleromaAPI.BookmarkFolderController.index",
+ responses: %{
+ 200 =>
+ Operation.response("Array of Bookmark Folders", "application/json", %Schema{
+ type: :array,
+ items: BookmarkFolder
+ })
+ }
+ }
+ end
+
+ def create_operation do
+ %Operation{
+ tags: ["Bookmark folders"],
+ summary: "Create a bookmark folder",
+ security: [%{"oAuth" => ["write:bookmarks"]}],
+ operationId: "PleromaAPI.BookmarkFolderController.create",
+ requestBody: request_body("Parameters", create_request(), required: true),
+ responses: %{
+ 200 => Operation.response("Bookmark Folder", "application/json", BookmarkFolder),
+ 422 => Operation.response("Error", "application/json", ApiError)
+ }
+ }
+ end
+
+ def update_operation do
+ %Operation{
+ tags: ["Bookmark folders"],
+ summary: "Update a bookmark folder",
+ security: [%{"oAuth" => ["write:bookmarks"]}],
+ operationId: "PleromaAPI.BookmarkFolderController.update",
+ parameters: [id_param()],
+ requestBody: request_body("Parameters", update_request(), required: true),
+ responses: %{
+ 200 => Operation.response("Bookmark Folder", "application/json", BookmarkFolder),
+ 403 => Operation.response("Forbidden", "application/json", ApiError),
+ 404 => Operation.response("Not Found", "application/json", ApiError),
+ 422 => Operation.response("Error", "application/json", ApiError)
+ }
+ }
+ end
+
+ def delete_operation do
+ %Operation{
+ tags: ["Bookmark folders"],
+ summary: "Delete a bookmark folder",
+ security: [%{"oAuth" => ["write:bookmarks"]}],
+ operationId: "PleromaAPI.BookmarkFolderController.delete",
+ parameters: [id_param()],
+ responses: %{
+ 200 => Operation.response("Bookmark Folder", "application/json", BookmarkFolder),
+ 403 => Operation.response("Forbidden", "application/json", ApiError),
+ 404 => Operation.response("Not Found", "application/json", ApiError)
+ }
+ }
+ end
+
+ defp create_request do
+ %Schema{
+ title: "BookmarkFolderCreateRequest",
+ type: :object,
+ properties: %{
+ name: %Schema{
+ type: :string,
+ description: "Folder name"
+ },
+ emoji: %Schema{
+ type: :string,
+ nullable: true,
+ description: "Folder emoji"
+ }
+ }
+ }
+ end
+
+ defp update_request do
+ %Schema{
+ title: "BookmarkFolderUpdateRequest",
+ type: :object,
+ properties: %{
+ name: %Schema{
+ type: :string,
+ nullable: true,
+ description: "Folder name"
+ },
+ emoji: %Schema{
+ type: :string,
+ nullable: true,
+ description: "Folder emoji"
+ }
+ }
+ }
+ end
+
+ def id_param do
+ Operation.parameter(:id, :path, FlakeID.schema(), "Bookmark Folder ID",
+ example: "9umDrYheeY451cQnEe",
+ required: true
+ )
+ end
+end
diff --git a/lib/pleroma/web/api_spec/operations/pleroma_emoji_file_operation.ex b/lib/pleroma/web/api_spec/operations/pleroma_emoji_file_operation.ex
index d09c1c10e..b05bad197 100644
--- a/lib/pleroma/web/api_spec/operations/pleroma_emoji_file_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/pleroma_emoji_file_operation.ex
@@ -133,7 +133,11 @@ defp name_param do
defp files_object do
%Schema{
type: :object,
- additionalProperties: %Schema{type: :string},
+ additionalProperties: %Schema{
+ type: :string,
+ description: "Filename of the emoji",
+ extensions: %{"x-additionalPropertiesName": "Emoji name"}
+ },
description: "Object with emoji names as keys and filenames as values"
}
end
diff --git a/lib/pleroma/web/api_spec/operations/pleroma_emoji_pack_operation.ex b/lib/pleroma/web/api_spec/operations/pleroma_emoji_pack_operation.ex
index 6add3ff33..efa36ffdc 100644
--- a/lib/pleroma/web/api_spec/operations/pleroma_emoji_pack_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/pleroma_emoji_pack_operation.ex
@@ -227,13 +227,29 @@ defp ok_response do
defp emoji_packs_response do
Operation.response(
- "Object with pack names as keys and pack contents as values",
+ "Emoji packs and the count",
"application/json",
%Schema{
type: :object,
- additionalProperties: emoji_pack(),
+ properties: %{
+ packs: %Schema{
+ type: :object,
+ description: "Object with pack names as keys and pack contents as values",
+ additionalProperties: %Schema{
+ emoji_pack()
+ | extensions: %{"x-additionalPropertiesName": "Pack name"}
+ }
+ },
+ count: %Schema{
+ type: :integer,
+ description: "Number of emoji packs"
+ }
+ },
example: %{
- "emojos" => emoji_pack().example
+ "packs" => %{
+ "emojos" => emoji_pack().example
+ },
+ "count" => 1
}
}
)
@@ -274,7 +290,11 @@ defp emoji_pack do
defp files_object do
%Schema{
type: :object,
- additionalProperties: %Schema{type: :string},
+ additionalProperties: %Schema{
+ type: :string,
+ description: "Filename",
+ extensions: %{"x-additionalPropertiesName": "Emoji name"}
+ },
description: "Object with emoji names as keys and filenames as values"
}
end
diff --git a/lib/pleroma/web/api_spec/operations/pleroma_instances_operation.ex b/lib/pleroma/web/api_spec/operations/pleroma_instances_operation.ex
index 82db4e1a8..e9319f3fb 100644
--- a/lib/pleroma/web/api_spec/operations/pleroma_instances_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/pleroma_instances_operation.ex
@@ -13,7 +13,7 @@ def open_api_operation(action) do
def show_operation do
%Operation{
- tags: ["Instance"],
+ tags: ["Instance misc"],
summary: "Retrieve federation status",
description: "Information about instances deemed unreachable by the server",
operationId: "PleromaInstances.show",
diff --git a/lib/pleroma/web/api_spec/operations/pleroma_notification_operation.ex b/lib/pleroma/web/api_spec/operations/pleroma_notification_operation.ex
index a994345db..0e2865191 100644
--- a/lib/pleroma/web/api_spec/operations/pleroma_notification_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/pleroma_notification_operation.ex
@@ -5,7 +5,6 @@
defmodule Pleroma.Web.ApiSpec.PleromaNotificationOperation do
alias OpenApiSpex.Operation
alias OpenApiSpex.Schema
- alias Pleroma.Web.ApiSpec.NotificationOperation
alias Pleroma.Web.ApiSpec.Schemas.ApiError
import Pleroma.Web.ApiSpec.Helpers
@@ -35,12 +34,7 @@ def mark_as_read_operation do
Operation.response(
"A Notification or array of Notifications",
"application/json",
- %Schema{
- anyOf: [
- %Schema{type: :array, items: NotificationOperation.notification()},
- NotificationOperation.notification()
- ]
- }
+ %Schema{type: :string}
),
400 => Operation.response("Bad Request", "application/json", ApiError)
}
diff --git a/lib/pleroma/web/api_spec/operations/pleroma_scrobble_operation.ex b/lib/pleroma/web/api_spec/operations/pleroma_scrobble_operation.ex
index b6273bfcf..f595583b6 100644
--- a/lib/pleroma/web/api_spec/operations/pleroma_scrobble_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/pleroma_scrobble_operation.ex
@@ -22,7 +22,8 @@ def create_operation do
summary: "Creates a new Listen activity for an account",
security: [%{"oAuth" => ["write"]}],
operationId: "PleromaAPI.ScrobbleController.create",
- requestBody: request_body("Parameters", create_request(), requried: true),
+ deprecated: true,
+ requestBody: request_body("Parameters", create_request(), required: true),
responses: %{
200 => Operation.response("Scrobble", "application/json", scrobble())
}
@@ -34,6 +35,7 @@ def index_operation do
tags: ["Scrobbles"],
summary: "Requests a list of current and recent Listen activities for an account",
operationId: "PleromaAPI.ScrobbleController.index",
+ deprecated: true,
parameters: [
%Reference{"$ref": "#/components/parameters/accountIdOrNickname"} | pagination_params()
],
@@ -57,6 +59,7 @@ defp create_request do
album: %Schema{type: :string, description: "The album of the media playing"},
artist: %Schema{type: :string, description: "The artist of the media playing"},
length: %Schema{type: :integer, description: "The length of the media playing"},
+ externalLink: %Schema{type: :string, description: "A URL referencing the media playing"},
visibility: %Schema{
allOf: [VisibilityScope],
default: "public",
@@ -67,7 +70,8 @@ defp create_request do
"title" => "Some Title",
"artist" => "Some Artist",
"album" => "Some Album",
- "length" => 180_000
+ "length" => 180_000,
+ "externalLink" => "https://www.last.fm/music/Some+Artist/_/Some+Title"
}
}
end
@@ -81,6 +85,7 @@ defp scrobble do
title: %Schema{type: :string, description: "The title of the media playing"},
album: %Schema{type: :string, description: "The album of the media playing"},
artist: %Schema{type: :string, description: "The artist of the media playing"},
+ externalLink: %Schema{type: :string, description: "A URL referencing the media playing"},
length: %Schema{
type: :integer,
description: "The length of the media playing",
@@ -95,6 +100,7 @@ defp scrobble do
"artist" => "Some Artist",
"album" => "Some Album",
"length" => 180_000,
+ "externalLink" => "https://www.last.fm/music/Some+Artist/_/Some+Title",
"created_at" => "2019-09-28T12:40:45.000Z"
}
}
diff --git a/lib/pleroma/web/api_spec/operations/pleroma_settings_operation.ex b/lib/pleroma/web/api_spec/operations/pleroma_settings_operation.ex
new file mode 100644
index 000000000..e2cef4f67
--- /dev/null
+++ b/lib/pleroma/web/api_spec/operations/pleroma_settings_operation.ex
@@ -0,0 +1,72 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.ApiSpec.PleromaSettingsOperation do
+ alias OpenApiSpex.Operation
+ alias OpenApiSpex.Schema
+
+ import Pleroma.Web.ApiSpec.Helpers
+
+ def open_api_operation(action) do
+ operation = String.to_existing_atom("#{action}_operation")
+ apply(__MODULE__, operation, [])
+ end
+
+ def show_operation do
+ %Operation{
+ tags: ["Settings"],
+ summary: "Get settings for an application",
+ description: "Get synchronized settings for an application",
+ operationId: "SettingsController.show",
+ parameters: [app_name_param()],
+ security: [%{"oAuth" => ["read:accounts"]}],
+ responses: %{
+ 200 => Operation.response("object", "application/json", object())
+ }
+ }
+ end
+
+ def update_operation do
+ %Operation{
+ tags: ["Settings"],
+ summary: "Update settings for an application",
+ description: "Update synchronized settings for an application",
+ operationId: "SettingsController.update",
+ parameters: [app_name_param()],
+ security: [%{"oAuth" => ["write:accounts"]}],
+ requestBody: request_body("Parameters", update_request(), required: true),
+ responses: %{
+ 200 => Operation.response("object", "application/json", object())
+ }
+ }
+ end
+
+ def app_name_param do
+ Operation.parameter(:app, :path, %Schema{type: :string}, "Application name",
+ example: "pleroma-fe",
+ required: true
+ )
+ end
+
+ def object do
+ %Schema{
+ title: "Settings object",
+ description: "The object that contains settings for the application.",
+ type: :object
+ }
+ end
+
+ def update_request do
+ %Schema{
+ title: "SettingsUpdateRequest",
+ type: :object,
+ description:
+ "The settings object to be merged with the current settings. To remove a field, set it to null.",
+ example: %{
+ "config1" => true,
+ "config2_to_unset" => nil
+ }
+ }
+ end
+end
diff --git a/lib/pleroma/web/api_spec/operations/pleroma_status_operation.ex b/lib/pleroma/web/api_spec/operations/pleroma_status_operation.ex
new file mode 100644
index 000000000..77c604952
--- /dev/null
+++ b/lib/pleroma/web/api_spec/operations/pleroma_status_operation.ex
@@ -0,0 +1,45 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.ApiSpec.PleromaStatusOperation do
+ alias OpenApiSpex.Operation
+ alias Pleroma.Web.ApiSpec.Schemas.ApiError
+ alias Pleroma.Web.ApiSpec.Schemas.FlakeID
+ alias Pleroma.Web.ApiSpec.StatusOperation
+
+ import Pleroma.Web.ApiSpec.Helpers
+
+ def open_api_operation(action) do
+ operation = String.to_existing_atom("#{action}_operation")
+ apply(__MODULE__, operation, [])
+ end
+
+ def quotes_operation do
+ %Operation{
+ tags: ["Retrieve status information"],
+ summary: "Quoted by",
+ description: "View quotes for a given status",
+ operationId: "PleromaAPI.StatusController.quotes",
+ parameters: [id_param() | pagination_params()],
+ security: [%{"oAuth" => ["read:statuses"]}],
+ responses: %{
+ 200 =>
+ Operation.response(
+ "Array of Status",
+ "application/json",
+ StatusOperation.array_of_statuses()
+ ),
+ 403 => Operation.response("Forbidden", "application/json", ApiError),
+ 404 => Operation.response("Not Found", "application/json", ApiError)
+ }
+ }
+ end
+
+ def id_param do
+ Operation.parameter(:id, :path, FlakeID.schema(), "Status ID",
+ example: "9umDrYheeY451cQnEe",
+ required: true
+ )
+ end
+end
diff --git a/lib/pleroma/web/api_spec/operations/poll_operation.ex b/lib/pleroma/web/api_spec/operations/poll_operation.ex
index efd784f03..6dd251743 100644
--- a/lib/pleroma/web/api_spec/operations/poll_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/poll_operation.ex
@@ -47,7 +47,7 @@ def vote_operation do
end
defp id_param do
- Operation.parameter(:id, :path, FlakeID, "Poll ID",
+ Operation.parameter(:id, :path, FlakeID.schema(), "Poll ID",
example: "123",
required: true
)
diff --git a/lib/pleroma/web/api_spec/operations/report_operation.ex b/lib/pleroma/web/api_spec/operations/report_operation.ex
index c74ac7d5f..f5f88974c 100644
--- a/lib/pleroma/web/api_spec/operations/report_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/report_operation.ex
@@ -53,6 +53,12 @@ defp create_request do
default: false,
description:
"If the account is remote, should the report be forwarded to the remote admin?"
+ },
+ rule_ids: %Schema{
+ type: :array,
+ nullable: true,
+ items: %Schema{type: :string},
+ description: "Array of rules"
}
},
required: [:account_id],
@@ -60,7 +66,8 @@ defp create_request do
"account_id" => "123",
"status_ids" => ["1337"],
"comment" => "bad status!",
- "forward" => "false"
+ "forward" => "false",
+ "rule_ids" => ["3"]
}
}
end
diff --git a/lib/pleroma/web/api_spec/operations/scheduled_activity_operation.ex b/lib/pleroma/web/api_spec/operations/scheduled_activity_operation.ex
index 802d3b6dd..c7ed02ff3 100644
--- a/lib/pleroma/web/api_spec/operations/scheduled_activity_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/scheduled_activity_operation.ex
@@ -88,7 +88,7 @@ def delete_operation do
end
defp id_param do
- Operation.parameter(:id, :path, FlakeID, "Poll ID",
+ Operation.parameter(:id, :path, FlakeID.schema(), "Poll ID",
example: "123",
required: true
)
diff --git a/lib/pleroma/web/api_spec/operations/search_operation.ex b/lib/pleroma/web/api_spec/operations/search_operation.ex
index 1a7e49be4..539743ba3 100644
--- a/lib/pleroma/web/api_spec/operations/search_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/search_operation.ex
@@ -70,7 +70,7 @@ def search_operation do
Operation.parameter(
:account_id,
:query,
- FlakeID,
+ FlakeID.schema(),
"If provided, statuses returned will be authored only by this account"
),
Operation.parameter(
@@ -116,7 +116,7 @@ def search2_operation do
Operation.parameter(
:account_id,
:query,
- FlakeID,
+ FlakeID.schema(),
"If provided, statuses returned will be authored only by this account"
),
Operation.parameter(
diff --git a/lib/pleroma/web/api_spec/operations/status_operation.ex b/lib/pleroma/web/api_spec/operations/status_operation.ex
index 639f24d49..1717c68c8 100644
--- a/lib/pleroma/web/api_spec/operations/status_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/status_operation.ex
@@ -6,9 +6,13 @@ defmodule Pleroma.Web.ApiSpec.StatusOperation do
alias OpenApiSpex.Operation
alias OpenApiSpex.Schema
alias Pleroma.Web.ApiSpec.AccountOperation
+ alias Pleroma.Web.ApiSpec.Schemas.Account
alias Pleroma.Web.ApiSpec.Schemas.ApiError
+ alias Pleroma.Web.ApiSpec.Schemas.Attachment
alias Pleroma.Web.ApiSpec.Schemas.BooleanLike
+ alias Pleroma.Web.ApiSpec.Schemas.Emoji
alias Pleroma.Web.ApiSpec.Schemas.FlakeID
+ alias Pleroma.Web.ApiSpec.Schemas.Poll
alias Pleroma.Web.ApiSpec.Schemas.ScheduledStatus
alias Pleroma.Web.ApiSpec.Schemas.Status
alias Pleroma.Web.ApiSpec.Schemas.VisibilityScope
@@ -35,7 +39,7 @@ def index_operation do
Operation.parameter(
:with_muted,
:query,
- BooleanLike,
+ BooleanLike.schema(),
"Include reactions from muted acccounts."
)
],
@@ -78,7 +82,7 @@ def show_operation do
Operation.parameter(
:with_muted,
:query,
- BooleanLike,
+ BooleanLike.schema(),
"Include reactions from muted acccounts."
)
],
@@ -252,6 +256,18 @@ def bookmark_operation do
description: "Privately bookmark a status",
operationId: "StatusController.bookmark",
parameters: [id_param()],
+ requestBody:
+ request_body("Parameters", %Schema{
+ title: "StatusUpdateRequest",
+ type: :object,
+ properties: %{
+ folder_id: %Schema{
+ nullable: true,
+ allOf: [FlakeID],
+ description: "ID of bookmarks folder, if any"
+ }
+ }
+ }),
responses: %{
200 => status_response()
}
@@ -426,7 +442,15 @@ def bookmarks_operation do
summary: "Bookmarked statuses",
description: "Statuses the user has bookmarked",
operationId: "StatusController.bookmarks",
- parameters: pagination_params(),
+ parameters: [
+ Operation.parameter(
+ :folder_id,
+ :query,
+ FlakeID.schema(),
+ "If provided, only display bookmarks from given folder"
+ )
+ | pagination_params()
+ ],
security: [%{"oAuth" => ["read:bookmarks"]}],
responses: %{
200 => Operation.response("Array of Statuses", "application/json", array_of_statuses())
@@ -434,6 +458,59 @@ def bookmarks_operation do
}
end
+ def show_history_operation do
+ %Operation{
+ tags: ["Retrieve status information"],
+ summary: "Status history",
+ description: "View history of a status",
+ operationId: "StatusController.show_history",
+ security: [%{"oAuth" => ["read:statuses"]}],
+ parameters: [
+ id_param()
+ ],
+ responses: %{
+ 200 => status_history_response(),
+ 404 => Operation.response("Not Found", "application/json", ApiError)
+ }
+ }
+ end
+
+ def show_source_operation do
+ %Operation{
+ tags: ["Retrieve status information"],
+ summary: "Status source",
+ description: "View source of a status",
+ operationId: "StatusController.show_source",
+ security: [%{"oAuth" => ["read:statuses"]}],
+ parameters: [
+ id_param()
+ ],
+ responses: %{
+ 200 => status_source_response(),
+ 404 => Operation.response("Not Found", "application/json", ApiError)
+ }
+ }
+ end
+
+ def update_operation do
+ %Operation{
+ tags: ["Status actions"],
+ summary: "Update status",
+ description: "Change the content of a status",
+ operationId: "StatusController.update",
+ security: [%{"oAuth" => ["write:statuses"]}],
+ parameters: [
+ id_param()
+ ],
+ requestBody: request_body("Parameters", update_request(), required: true),
+ responses: %{
+ 200 => status_response(),
+ 403 => Operation.response("Forbidden", "application/json", ApiError),
+ 404 => Operation.response("Not Found", "application/json", ApiError)
+ }
+ }
+ end
+
def array_of_statuses do
%Schema{type: :array, items: Status, example: [Status.schema().example]}
end
@@ -477,7 +554,7 @@ defp create_request do
format: :"date-time",
nullable: true,
description:
- "ISO 8601 Datetime at which to schedule a status. Providing this paramter will cause ScheduledStatus to be returned instead of Status. Must be at least 5 minutes in the future."
+ "ISO 8601 Datetime at which to schedule a status. Providing this parameter will cause ScheduledStatus to be returned instead of Status. Must be at least 5 minutes in the future."
},
language: %Schema{
type: :string,
@@ -489,7 +566,7 @@ defp create_request do
allOf: [BooleanLike],
nullable: true,
description:
- "If set to `true` the post won't be actually posted, but the status entitiy would still be rendered back. This could be useful for previewing rich text/custom emoji, for example"
+ "If set to `true` the post won't be actually posted, but the status entity would still be rendered back. This could be useful for previewing rich text/custom emoji, for example"
},
content_type: %Schema{
type: :string,
@@ -524,6 +601,65 @@ defp create_request do
type: :string,
description:
"Will reply to a given conversation, addressing only the people who are part of the recipient set of that conversation. Sets the visibility to `direct`."
+ },
+ quote_id: %Schema{
+ nullable: true,
+ allOf: [FlakeID],
+ description: "ID of the status being quoted, if any"
+ }
+ },
+ example: %{
+ "status" => "What time is it?",
+ "sensitive" => "false",
+ "poll" => %{
+ "options" => ["Cofe", "Adventure"],
+ "expires_in" => 420
+ }
+ }
+ }
+ end
+
+ defp update_request do
+ %Schema{
+ title: "StatusUpdateRequest",
+ type: :object,
+ properties: %{
+ status: %Schema{
+ type: :string,
+ nullable: true,
+ description:
+ "Text content of the status. If `media_ids` is provided, this becomes optional. Attaching a `poll` is optional while `status` is provided."
+ },
+ media_ids: %Schema{
+ nullable: true,
+ type: :array,
+ items: %Schema{type: :string},
+ description: "Array of Attachment ids to be attached as media."
+ },
+ poll: poll_params(),
+ sensitive: %Schema{
+ allOf: [BooleanLike],
+ nullable: true,
+ description: "Mark status and attached media as sensitive?"
+ },
+ spoiler_text: %Schema{
+ type: :string,
+ nullable: true,
+ description:
+ "Text to be shown as a warning or subject before the actual content. Statuses are generally collapsed behind this field."
+ },
+ content_type: %Schema{
+ type: :string,
+ nullable: true,
+ description:
+ "The MIME type of the status, it is transformed into HTML by the backend. You can get the list of the supported MIME types with the nodeinfo endpoint."
+ },
+ to: %Schema{
+ type: :array,
+ nullable: true,
+ items: %Schema{type: :string},
+ description:
+ "A list of nicknames (like `lain@soykaf.club` or `lain` on the local server) that will be used to determine who is going to be addressed by this post. Using this will disable the implicit addressing by mentioned names in the `status` body, only the people in the `to` list will be addressed. The normal rules for for post visibility are not affected by this and will still apply"
}
},
example: %{
@@ -569,7 +705,7 @@ def poll_params do
end
def id_param do
- Operation.parameter(:id, :path, FlakeID, "Status ID",
+ Operation.parameter(:id, :path, FlakeID.schema(), "Status ID",
example: "9umDrYheeY451cQnEe",
required: true
)
@@ -579,6 +715,87 @@ defp status_response do
Operation.response("Status", "application/json", Status)
end
+ defp status_history_response do
+ Operation.response(
+ "Status History",
+ "application/json",
+ %Schema{
+ title: "Status history",
+ description: "Response schema for history of a status",
+ type: :array,
+ items: %Schema{
+ type: :object,
+ properties: %{
+ account: %Schema{
+ allOf: [Account],
+ description: "The account that authored this status"
+ },
+ content: %Schema{
+ type: :string,
+ format: :html,
+ description: "HTML-encoded status content"
+ },
+ sensitive: %Schema{
+ type: :boolean,
+ description: "Is this status marked as sensitive content?"
+ },
+ spoiler_text: %Schema{
+ type: :string,
+ description:
+ "Subject or summary line, below which status content is collapsed until expanded"
+ },
+ created_at: %Schema{
+ type: :string,
+ format: "date-time",
+ description: "The date when this status was created"
+ },
+ media_attachments: %Schema{
+ type: :array,
+ items: Attachment,
+ description: "Media that is attached to this status"
+ },
+ emojis: %Schema{
+ type: :array,
+ items: Emoji,
+ description: "Custom emoji to be used when rendering status content"
+ },
+ poll: %Schema{
+ allOf: [Poll],
+ nullable: true,
+ description: "The poll attached to the status"
+ }
+ }
+ }
+ }
+ )
+ end
+
+ defp status_source_response do
+ Operation.response(
+ "Status Source",
+ "application/json",
+ %Schema{
+ type: :object,
+ properties: %{
+ id: FlakeID,
+ text: %Schema{
+ type: :string,
+ description: "Raw source of status content"
+ },
+ spoiler_text: %Schema{
+ type: :string,
+ description:
+ "Subject or summary line, below which status content is collapsed until expanded"
+ },
+ content_type: %Schema{
+ type: :string,
+ description: "The content type of the source"
+ }
+ }
+ }
+ )
+ end
+
defp context do
%Schema{
title: "StatusContext",
diff --git a/lib/pleroma/web/api_spec/operations/streaming_operation.ex b/lib/pleroma/web/api_spec/operations/streaming_operation.ex
new file mode 100644
index 000000000..b580bc2f0
--- /dev/null
+++ b/lib/pleroma/web/api_spec/operations/streaming_operation.ex
@@ -0,0 +1,464 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.ApiSpec.StreamingOperation do
+ alias OpenApiSpex.Operation
+ alias OpenApiSpex.Response
+ alias OpenApiSpex.Schema
+ alias Pleroma.Web.ApiSpec.NotificationOperation
+ alias Pleroma.Web.ApiSpec.Schemas.Chat
+ alias Pleroma.Web.ApiSpec.Schemas.Conversation
+ alias Pleroma.Web.ApiSpec.Schemas.FlakeID
+ alias Pleroma.Web.ApiSpec.Schemas.Status
+
+ require Pleroma.Constants
+
+ @spec open_api_operation(atom) :: Operation.t()
+ def open_api_operation(action) do
+ operation = String.to_existing_atom("#{action}_operation")
+ apply(__MODULE__, operation, [])
+ end
+
+ @spec streaming_operation() :: Operation.t()
+ def streaming_operation do
+ %Operation{
+ tags: ["Timelines"],
+ summary: "Establish streaming connection",
+ description: """
+ Receive statuses in real-time via WebSocket.
+
+ You can specify the access token on the query string or through the `sec-websocket-protocol` header. Using
+ the query string to authenticate is considered unsafe and should not be used unless you have to (e.g. to maintain
+ your client's compatibility with Mastodon).
+
+ You may specify a stream on the query string. If you do so and you are connecting to a stream that requires logged-in users,
+ you must specify the access token at the time of the connection (i.e. via query string or header).
+
+ Otherwise, you have the option to authenticate after you have established the connection through client-sent events.
+
+ The "Request body" section below describes what events clients can send through WebSocket, and the "Responses" section
+ describes what events server will send through WebSocket.
+ """,
+ security: [%{"oAuth" => ["read:statuses", "read:notifications"]}],
+ operationId: "WebsocketHandler.streaming",
+ parameters:
+ [
+ Operation.parameter(:connection, :header, %Schema{type: :string}, "connection header",
+ required: true
+ ),
+ Operation.parameter(:upgrade, :header, %Schema{type: :string}, "upgrade header",
+ required: true
+ ),
+ Operation.parameter(
+ :"sec-websocket-key",
+ :header,
+ %Schema{type: :string},
+ "sec-websocket-key header",
+ required: true
+ ),
+ Operation.parameter(
+ :"sec-websocket-version",
+ :header,
+ %Schema{type: :string},
+ "sec-websocket-version header",
+ required: true
+ )
+ ] ++ stream_params() ++ access_token_params(),
+ requestBody: request_body("Client-sent events", client_sent_events()),
+ responses: %{
+ 101 => switching_protocols_response(),
+ 200 =>
+ Operation.response(
+ "Server-sent events",
+ "application/json",
+ server_sent_events()
+ )
+ }
+ }
+ end
+
+ defp stream_params do
+ stream_specifier()
+ |> Enum.map(fn {name, schema} ->
+ Operation.parameter(name, :query, schema, get_schema(schema).description)
+ end)
+ end
+
+ defp access_token_params do
+ [
+ Operation.parameter(:access_token, :query, token(), token().description),
+ Operation.parameter(:"sec-websocket-protocol", :header, token(), token().description)
+ ]
+ end
+
+ defp switching_protocols_response do
+ %Response{
+ description: "Switching protocols",
+ headers: %{
+ "connection" => %OpenApiSpex.Header{required: true},
+ "upgrade" => %OpenApiSpex.Header{required: true},
+ "sec-websocket-accept" => %OpenApiSpex.Header{required: true}
+ }
+ }
+ end
+
+ defp server_sent_events do
+ %Schema{
+ oneOf: [
+ update_event(),
+ status_update_event(),
+ notification_event(),
+ chat_update_event(),
+ follow_relationships_update_event(),
+ conversation_event(),
+ delete_event(),
+ pleroma_respond_event()
+ ]
+ }
+ end
+
+ defp stream do
+ %Schema{
+ type: :array,
+ title: "Stream",
+ description: """
+ The stream identifier.
+ The first item is the name of the stream. If the stream needs a differentiator, the second item will be the corresponding identifier.
+ Currently, for the following stream types, there is a second element in the array:
+
+ - `list`: The second element is the id of the list, as a string.
+ - `hashtag`: The second element is the name of the hashtag.
+ - `public:remote:media` and `public:remote`: The second element is the domain of the corresponding instance.
+ """,
+ maxItems: 2,
+ minItems: 1,
+ items: %Schema{type: :string},
+ example: ["hashtag", "mew"]
+ }
+ end
+
+ defp get_schema(%Schema{} = schema), do: schema
+ defp get_schema(schema), do: schema.schema
+
+ defp server_sent_event_helper(name, description, type, payload, opts \\ []) do
+ payload_type = Keyword.get(opts, :payload_type, :json)
+ has_stream = Keyword.get(opts, :has_stream, true)
+
+ stream_properties =
+ if has_stream do
+ %{stream: stream()}
+ else
+ %{}
+ end
+
+ stream_example = if has_stream, do: %{"stream" => get_schema(stream()).example}, else: %{}
+
+ stream_required = if has_stream, do: [:stream], else: []
+
+ payload_schema =
+ if payload_type == :json do
+ %Schema{
+ title: "Event payload",
+ description: "JSON-encoded string of #{get_schema(payload).title}",
+ allOf: [payload]
+ }
+ else
+ payload
+ end
+
+ payload_example =
+ if payload_type == :json do
+ get_schema(payload).example |> Jason.encode!()
+ else
+ get_schema(payload).example
+ end
+
+ %Schema{
+ type: :object,
+ title: name,
+ description: description,
+ required: [:event, :payload] ++ stream_required,
+ properties:
+ %{
+ event: %Schema{
+ title: "Event type",
+ description: "Type of the event.",
+ type: :string,
+ required: true,
+ enum: [type]
+ },
+ payload: payload_schema
+ }
+ |> Map.merge(stream_properties),
+ example:
+ %{
+ "event" => type,
+ "payload" => payload_example
+ }
+ |> Map.merge(stream_example)
+ }
+ end
+
+ defp update_event do
+ server_sent_event_helper("New status", "A newly-posted status.", "update", Status)
+ end
+
+ defp status_update_event do
+ server_sent_event_helper("Edit", "A status that was just edited", "status.update", Status)
+ end
+
+ defp notification_event do
+ server_sent_event_helper(
+ "Notification",
+ "A new notification.",
+ "notification",
+ NotificationOperation.notification()
+ )
+ end
+
+ defp follow_relationships_update_event do
+ server_sent_event_helper(
+ "Follow relationships update",
+ "An update to follow relationships.",
+ "pleroma:follow_relationships_update",
+ %Schema{
+ type: :object,
+ title: "Follow relationships update",
+ required: [:state, :follower, :following],
+ properties: %{
+ state: %Schema{
+ type: :string,
+ description: "Follow state of the relationship.",
+ enum: ["follow_pending", "follow_accept", "follow_reject", "unfollow"]
+ },
+ follower: %Schema{
+ type: :object,
+ description: "Information about the follower.",
+ required: [:id, :follower_count, :following_count],
+ properties: %{
+ id: FlakeID,
+ follower_count: %Schema{type: :integer},
+ following_count: %Schema{type: :integer}
+ }
+ },
+ following: %Schema{
+ type: :object,
+ description: "Information about the following person.",
+ required: [:id, :follower_count, :following_count],
+ properties: %{
+ id: FlakeID,
+ follower_count: %Schema{type: :integer},
+ following_count: %Schema{type: :integer}
+ }
+ }
+ },
+ example: %{
+ "state" => "follow_pending",
+ "follower" => %{
+ "id" => "someUser1",
+ "follower_count" => 1,
+ "following_count" => 1
+ },
+ "following" => %{
+ "id" => "someUser2",
+ "follower_count" => 1,
+ "following_count" => 1
+ }
+ }
+ }
+ )
+ end
+
+ defp chat_update_event do
+ server_sent_event_helper(
+ "Chat update",
+ "A new chat message.",
+ "pleroma:chat_update",
+ Chat
+ )
+ end
+
+ defp conversation_event do
+ server_sent_event_helper(
+ "Conversation update",
+ "An update about a conversation",
+ "conversation",
+ Conversation
+ )
+ end
+
+ defp delete_event do
+ server_sent_event_helper(
+ "Delete",
+ "A status that was just deleted.",
+ "delete",
+ %Schema{
+ type: :string,
+ title: "Status id",
+ description: "Id of the deleted status",
+ allOf: [FlakeID],
+ example: "some-opaque-id"
+ },
+ payload_type: :string,
+ has_stream: false
+ )
+ end
+
+ defp pleroma_respond_event do
+ server_sent_event_helper(
+ "Server response",
+ "A response to a client-sent event.",
+ "pleroma:respond",
+ %Schema{
+ type: :object,
+ title: "Results",
+ required: [:result, :type],
+ properties: %{
+ result: %Schema{
+ type: :string,
+ title: "Result of the request",
+ enum: ["success", "error", "ignored"]
+ },
+ error: %Schema{
+ type: :string,
+ title: "Error code",
+ description: "An error identifier. Only appears if `result` is `error`."
+ },
+ type: %Schema{
+ type: :string,
+ description: "Type of the request."
+ }
+ },
+ example: %{"result" => "success", "type" => "pleroma:authenticate"}
+ },
+ has_stream: false
+ )
+ end
+
+ defp client_sent_events do
+ %Schema{
+ oneOf: [
+ subscribe_event(),
+ unsubscribe_event(),
+ authenticate_event()
+ ]
+ }
+ end
+
+ defp request_body(description, schema, opts \\ []) do
+ %OpenApiSpex.RequestBody{
+ description: description,
+ content: %{
+ "application/json" => %OpenApiSpex.MediaType{
+ schema: schema,
+ example: opts[:example],
+ examples: opts[:examples]
+ }
+ }
+ }
+ end
+
+ defp client_sent_event_helper(name, description, type, properties, opts) do
+ required = opts[:required] || []
+
+ %Schema{
+ type: :object,
+ title: name,
+ required: [:type] ++ required,
+ description: description,
+ properties:
+ %{
+ type: %Schema{type: :string, enum: [type], description: "Type of the event."}
+ }
+ |> Map.merge(properties),
+ example: opts[:example]
+ }
+ end
+
+ defp subscribe_event do
+ client_sent_event_helper(
+ "Subscribe",
+ "Subscribe to a stream.",
+ "subscribe",
+ stream_specifier(),
+ required: [:stream],
+ example: %{"type" => "subscribe", "stream" => "list", "list" => "1"}
+ )
+ end
+
+ defp unsubscribe_event do
+ client_sent_event_helper(
+ "Unsubscribe",
+ "Unsubscribe from a stream.",
+ "unsubscribe",
+ stream_specifier(),
+ required: [:stream],
+ example: %{
+ "type" => "unsubscribe",
+ "stream" => "public:remote:media",
+ "instance" => "example.org"
+ }
+ )
+ end
+
+ defp authenticate_event do
+ client_sent_event_helper(
+ "Authenticate",
+ "Authenticate via an access token.",
+ "pleroma:authenticate",
+ %{
+ token: token()
+ },
+ required: [:token]
+ )
+ end
+
+ defp token do
+ %Schema{
+ type: :string,
+ description: "An OAuth access token with corresponding permissions.",
+ example: "some token"
+ }
+ end
+
+ defp stream_specifier do
+ %{
+ stream: %Schema{
+ type: :string,
+ description: "The name of the stream.",
+ enum:
+ Pleroma.Constants.public_streams() ++
+ [
+ "public:remote",
+ "public:remote:media",
+ "user",
+ "user:pleroma_chat",
+ "user:notification",
+ "direct",
+ "list",
+ "hashtag"
+ ]
+ },
+ list: %Schema{
+ type: :string,
+ title: "List id",
+ description: "The id of the list. Required when `stream` is `list`.",
+ example: "some-id"
+ },
+ tag: %Schema{
+ type: :string,
+ title: "Hashtag name",
+ description: "The name of the hashtag. Required when `stream` is `hashtag`.",
+ example: "mew"
+ },
+ instance: %Schema{
+ type: :string,
+ title: "Domain name",
+ description:
+ "Domain name of the instance. Required when `stream` is `public:remote` or `public:remote:media`.",
+ example: "example.org"
+ }
+ }
+ end
+end
diff --git a/lib/pleroma/web/api_spec/operations/timeline_operation.ex b/lib/pleroma/web/api_spec/operations/timeline_operation.ex
index fbe3f763a..f55e59805 100644
--- a/lib/pleroma/web/api_spec/operations/timeline_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/timeline_operation.ex
@@ -176,7 +176,12 @@ defp instance_param do
end
defp with_muted_param do
- Operation.parameter(:with_muted, :query, BooleanLike, "Include activities by muted users")
+ Operation.parameter(
+ :with_muted,
+ :query,
+ BooleanLike.schema(),
+ "Include activities by muted users"
+ )
end
defp exclude_visibilities_param do
diff --git a/lib/pleroma/web/api_spec/operations/twitter_util_operation.ex b/lib/pleroma/web/api_spec/operations/twitter_util_operation.ex
index c59e3b12a..724d873c0 100644
--- a/lib/pleroma/web/api_spec/operations/twitter_util_operation.ex
+++ b/lib/pleroma/web/api_spec/operations/twitter_util_operation.ex
@@ -17,7 +17,7 @@ def open_api_operation(action) do
def emoji_operation do
%Operation{
- tags: ["Emojis"],
+ tags: ["Custom emojis"],
summary: "List all custom emojis",
operationId: "UtilController.emoji",
parameters: [],
@@ -30,7 +30,8 @@ def emoji_operation do
properties: %{
image_url: %Schema{type: :string},
tags: %Schema{type: :array, items: %Schema{type: :string}}
- }
+ },
+ extensions: %{"x-additionalPropertiesName": "Emoji name"}
},
example: %{
"firefox" => %{
@@ -45,7 +46,7 @@ def emoji_operation do
def frontend_configurations_operation do
%Operation{
- tags: ["Configuration"],
+ tags: ["Others"],
summary: "Dump frontend configurations",
operationId: "UtilController.frontend_configurations",
parameters: [],
@@ -53,7 +54,12 @@ def frontend_configurations_operation do
200 =>
Operation.response("List", "application/json", %Schema{
type: :object,
- additionalProperties: %Schema{type: :object}
+ additionalProperties: %Schema{
+ type: :object,
+ description:
+ "Opaque object representing the instance-wide configuration for the frontend",
+ extensions: %{"x-additionalPropertiesName": "Frontend name"}
+ }
})
}
}
@@ -81,7 +87,7 @@ def change_password_operation do
defp change_password_request do
%Schema{
title: "ChangePasswordRequest",
- description: "POST body for changing the account's passowrd",
+ description: "POST body for changing the account's password",
type: :object,
required: [:password, :new_password, :new_password_confirmation],
properties: %{
@@ -130,23 +136,23 @@ defp change_email_request do
}
end
- def update_notificaton_settings_operation do
+ def update_notification_settings_operation do
%Operation{
- tags: ["Accounts"],
+ tags: ["Settings"],
summary: "Update Notification Settings",
security: [%{"oAuth" => ["write:accounts"]}],
- operationId: "UtilController.update_notificaton_settings",
+ operationId: "UtilController.update_notification_settings",
parameters: [
Operation.parameter(
:block_from_strangers,
:query,
- BooleanLike,
+ BooleanLike.schema(),
"blocks notifications from accounts you do not follow"
),
Operation.parameter(
:hide_notification_contents,
:query,
- BooleanLike,
+ BooleanLike.schema(),
"removes the contents of a message from the push notification"
)
],
@@ -207,6 +213,7 @@ def captcha_operation do
%Operation{
summary: "Get a captcha",
operationId: "UtilController.captcha",
+ tags: ["Others"],
parameters: [],
responses: %{
200 => Operation.response("Success", "application/json", %Schema{type: :object})
@@ -214,9 +221,149 @@ def captcha_operation do
}
end
+ def move_account_operation do
+ %Operation{
+ tags: ["Account credentials"],
+ summary: "Move account",
+ security: [%{"oAuth" => ["write:accounts"]}],
+ operationId: "UtilController.move_account",
+ requestBody: request_body("Parameters", move_account_request(), required: true),
+ responses: %{
+ 200 =>
+ Operation.response("Success", "application/json", %Schema{
+ type: :object,
+ properties: %{status: %Schema{type: :string, example: "success"}}
+ }),
+ 400 => Operation.response("Error", "application/json", ApiError),
+ 403 => Operation.response("Error", "application/json", ApiError),
+ 404 => Operation.response("Error", "application/json", ApiError)
+ }
+ }
+ end
+
+ defp move_account_request do
+ %Schema{
+ title: "MoveAccountRequest",
+ description: "POST body for moving the account",
+ type: :object,
+ required: [:password, :target_account],
+ properties: %{
+ password: %Schema{type: :string, description: "Current password"},
+ target_account: %Schema{
+ type: :string,
+ description: "The nickname of the target account to move to"
+ }
+ }
+ }
+ end
+
+ def list_aliases_operation do
+ %Operation{
+ tags: ["Account credentials"],
+ summary: "List account aliases",
+ security: [%{"oAuth" => ["read:accounts"]}],
+ operationId: "UtilController.list_aliases",
+ responses: %{
+ 200 =>
+ Operation.response("Success", "application/json", %Schema{
+ type: :object,
+ properties: %{
+ aliases: %Schema{
+ type: :array,
+ items: %Schema{type: :string},
+ example: ["foo@example.org"]
+ }
+ }
+ }),
+ 400 => Operation.response("Error", "application/json", ApiError),
+ 403 => Operation.response("Error", "application/json", ApiError)
+ }
+ }
+ end
+
+ def add_alias_operation do
+ %Operation{
+ tags: ["Account credentials"],
+ summary: "Add an alias to this account",
+ security: [%{"oAuth" => ["write:accounts"]}],
+ operationId: "UtilController.add_alias",
+ requestBody: request_body("Parameters", add_alias_request(), required: true),
+ responses: %{
+ 200 =>
+ Operation.response("Success", "application/json", %Schema{
+ type: :object,
+ properties: %{
+ status: %Schema{
+ type: :string,
+ example: "success"
+ }
+ }
+ }),
+ 400 => Operation.response("Error", "application/json", ApiError),
+ 403 => Operation.response("Error", "application/json", ApiError),
+ 404 => Operation.response("Error", "application/json", ApiError)
+ }
+ }
+ end
+
+ defp add_alias_request do
+ %Schema{
+ title: "AddAliasRequest",
+ description: "PUT body for adding aliases",
+ type: :object,
+ required: [:alias],
+ properties: %{
+ alias: %Schema{
+ type: :string,
+ description: "The nickname of the account to add to aliases"
+ }
+ }
+ }
+ end
+
+ def delete_alias_operation do
+ %Operation{
+ tags: ["Account credentials"],
+ summary: "Delete an alias from this account",
+ security: [%{"oAuth" => ["write:accounts"]}],
+ operationId: "UtilController.delete_alias",
+ requestBody: request_body("Parameters", delete_alias_request(), required: true),
+ responses: %{
+ 200 =>
+ Operation.response("Success", "application/json", %Schema{
+ type: :object,
+ properties: %{
+ status: %Schema{
+ type: :string,
+ example: "success"
+ }
+ }
+ }),
+ 400 => Operation.response("Error", "application/json", ApiError),
+ 403 => Operation.response("Error", "application/json", ApiError),
+ 404 => Operation.response("Error", "application/json", ApiError)
+ }
+ }
+ end
+
+ defp delete_alias_request do
+ %Schema{
+ title: "DeleteAliasRequest",
+ description: "PUT body for deleting aliases",
+ type: :object,
+ required: [:alias],
+ properties: %{
+ alias: %Schema{
+ type: :string,
+ description: "The nickname of the account to delete from aliases"
+ }
+ }
+ }
+ end
+
def healthcheck_operation do
%Operation{
- tags: ["Accounts"],
+ tags: ["Others"],
summary: "Quick status check on the instance",
security: [%{"oAuth" => ["write:accounts"]}],
operationId: "UtilController.healthcheck",
@@ -231,7 +378,7 @@ def healthcheck_operation do
def remote_subscribe_operation do
%Operation{
- tags: ["Accounts"],
+ tags: ["Remote interaction"],
summary: "Remote Subscribe",
operationId: "UtilController.remote_subscribe",
parameters: [],
@@ -241,7 +388,7 @@ def remote_subscribe_operation do
def remote_interaction_operation do
%Operation{
- tags: ["Accounts"],
+ tags: ["Remote interaction"],
summary: "Remote interaction",
operationId: "UtilController.remote_interaction",
requestBody: request_body("Parameters", remote_interaction_request(), required: true),
@@ -265,6 +412,16 @@ defp remote_interaction_request do
}
end
+ def show_subscribe_form_operation do
+ %Operation{
+ tags: ["Remote interaction"],
+ summary: "Show remote subscribe form",
+ operationId: "UtilController.show_subscribe_form",
+ parameters: [],
+ responses: %{200 => Operation.response("Web Page", "test/html", %Schema{type: :string})}
+ }
+ end
+
defp delete_account_request do
%Schema{
title: "AccountDeleteRequest",
diff --git a/lib/pleroma/web/api_spec/schemas/account.ex b/lib/pleroma/web/api_spec/schemas/account.ex
index e8a529f2e..8aeb821a8 100644
--- a/lib/pleroma/web/api_spec/schemas/account.ex
+++ b/lib/pleroma/web/api_spec/schemas/account.ex
@@ -33,6 +33,7 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Account do
header: %Schema{type: :string, format: :uri},
id: FlakeID,
locked: %Schema{type: :boolean},
+ mute_expires_at: %Schema{type: :string, format: "date-time", nullable: true},
note: %Schema{type: :string, format: :html},
statuses_count: %Schema{type: :integer},
url: %Schema{type: :string, format: :uri},
diff --git a/lib/pleroma/web/api_spec/schemas/attachment.ex b/lib/pleroma/web/api_spec/schemas/attachment.ex
index 48634a14f..2871b5f99 100644
--- a/lib/pleroma/web/api_spec/schemas/attachment.ex
+++ b/lib/pleroma/web/api_spec/schemas/attachment.ex
@@ -11,7 +11,7 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Attachment do
title: "Attachment",
description: "Represents a file or media attachment that can be added to a status.",
type: :object,
- requried: [:id, :url, :preview_url],
+ required: [:id, :url, :preview_url],
properties: %{
id: %Schema{type: :string, description: "The ID of the attachment in the database."},
url: %Schema{
diff --git a/lib/pleroma/web/api_spec/schemas/bookmark_folder.ex b/lib/pleroma/web/api_spec/schemas/bookmark_folder.ex
new file mode 100644
index 000000000..e8b4f43b7
--- /dev/null
+++ b/lib/pleroma/web/api_spec/schemas/bookmark_folder.ex
@@ -0,0 +1,26 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2024 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.ApiSpec.Schemas.BookmarkFolder do
+ alias OpenApiSpex.Schema
+ alias Pleroma.Web.ApiSpec.Schemas.FlakeID
+
+ require OpenApiSpex
+
+ OpenApiSpex.schema(%{
+ title: "BookmarkFolder",
+ description: "Response schema for a bookmark folder",
+ type: :object,
+ properties: %{
+ id: FlakeID,
+ name: %Schema{type: :string, description: "Folder name"},
+ emoji: %Schema{type: :string, description: "Folder emoji", nullable: true}
+ },
+ example: %{
+ "id" => "9toJCu5YZW7O7gfvH6",
+ "name" => "Read later",
+ "emoji" => nil
+ }
+ })
+end
diff --git a/lib/pleroma/web/api_spec/schemas/poll.ex b/lib/pleroma/web/api_spec/schemas/poll.ex
index 91570582b..20cf5b061 100644
--- a/lib/pleroma/web/api_spec/schemas/poll.ex
+++ b/lib/pleroma/web/api_spec/schemas/poll.ex
@@ -56,6 +56,15 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Poll do
}
},
description: "Possible answers for the poll."
+ },
+ pleroma: %Schema{
+ type: :object,
+ properties: %{
+ non_anonymous: %Schema{
+ type: :boolean,
+ description: "Can voters be publicly identified?"
+ }
+ }
}
},
example: %{
@@ -79,7 +88,10 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Poll do
votes_count: 4
}
],
- emojis: []
+ emojis: [],
+ pleroma: %{
+ non_anonymous: false
+ }
}
})
end
diff --git a/lib/pleroma/web/api_spec/schemas/status.ex b/lib/pleroma/web/api_spec/schemas/status.ex
index 6e6e30315..6e537b5da 100644
--- a/lib/pleroma/web/api_spec/schemas/status.ex
+++ b/lib/pleroma/web/api_spec/schemas/status.ex
@@ -58,6 +58,10 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Status do
format: :uri,
description: "Preview thumbnail"
},
+ image_description: %Schema{
+ type: :string,
+ description: "Alternate text that describes what is in the thumbnail"
+ },
title: %Schema{type: :string, description: "Title of linked resource"},
description: %Schema{type: :string, description: "Description of preview"}
}
@@ -73,6 +77,12 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Status do
format: "date-time",
description: "The date when this status was created"
},
+ edited_at: %Schema{
+ type: :string,
+ format: "date-time",
+ nullable: true,
+ description: "The date when this status was last edited"
+ },
emojis: %Schema{
type: :array,
items: Emoji,
@@ -138,13 +148,23 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Status do
properties: %{
content: %Schema{
type: :object,
- additionalProperties: %Schema{type: :string},
+ additionalProperties: %Schema{
+ type: :string,
+ description: "Alternate representation in the MIME type specified",
+ extensions: %{"x-additionalPropertiesName": "MIME type"}
+ },
description:
"A map consisting of alternate representations of the `content` property with the key being it's mimetype. Currently the only alternate representation supported is `text/plain`"
},
+ context: %Schema{
+ type: :string,
+ description: "The thread identifier the status is associated with"
+ },
conversation_id: %Schema{
type: :integer,
- description: "The ID of the AP context the status is associated with (if any)"
+ deprecated: true,
+ description:
+ "The ID of the AP context the status is associated with (if any); deprecated, please use `context` instead"
},
direct_conversation_id: %Schema{
type: :integer,
@@ -177,13 +197,41 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Status do
nullable: true,
description: "The `acct` property of User entity for replied user (if any)"
},
+ quote: %Schema{
+ allOf: [%OpenApiSpex.Reference{"$ref": "#/components/schemas/Status"}],
+ nullable: true,
+ description: "Quoted status (if any)"
+ },
+ quote_id: %Schema{
+ nullable: true,
+ allOf: [FlakeID],
+ description: "ID of the status being quoted, if any"
+ },
+ quote_url: %Schema{
+ type: :string,
+ format: :uri,
+ nullable: true,
+ description: "URL of the quoted status"
+ },
+ quote_visible: %Schema{
+ type: :boolean,
+ description: "`true` if the quoted post is visible to the user"
+ },
+ quotes_count: %Schema{
+ type: :integer,
+ description: "How many statuses quoted this status"
+ },
local: %Schema{
type: :boolean,
description: "`true` if the post was made on the local instance"
},
spoiler_text: %Schema{
type: :object,
- additionalProperties: %Schema{type: :string},
+ additionalProperties: %Schema{
+ type: :string,
+ description: "Alternate representation in the MIME type specified",
+ extensions: %{"x-additionalPropertiesName": "MIME type"}
+ },
description:
"A map consisting of alternate representations of the `spoiler_text` property with the key being it's mimetype. Currently the only alternate representation supported is `text/plain`."
},
@@ -319,6 +367,7 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Status do
"pinned" => false,
"pleroma" => %{
"content" => %{"text/plain" => "foobar"},
+ "context" => "http://localhost:4001/objects/8b4c0c80-6a37-4d2a-b1b9-05a19e3875aa",
"conversation_id" => 345_972,
"direct_conversation_id" => nil,
"emoji_reactions" => [],
@@ -326,7 +375,8 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Status do
"in_reply_to_account_acct" => nil,
"local" => true,
"spoiler_text" => %{"text/plain" => ""},
- "thread_muted" => false
+ "thread_muted" => false,
+ "quotes_count" => 0
},
"poll" => nil,
"reblog" => nil,
diff --git a/lib/pleroma/web/api_spec/scopes/compiler.ex b/lib/pleroma/web/api_spec/scopes/compiler.ex
new file mode 100644
index 000000000..162edc9a3
--- /dev/null
+++ b/lib/pleroma/web/api_spec/scopes/compiler.ex
@@ -0,0 +1,82 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2023 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.ApiSpec.Scopes.Compiler do
+ defmacro __before_compile__(_env) do
+ strings = __MODULE__.extract_all_scopes()
+
+ quote do
+ def placeholder do
+ unquote do
+ Enum.map(
+ strings,
+ fn string ->
+ quote do
+ Pleroma.Web.Gettext.dgettext_noop(
+ "oauth_scopes",
+ unquote(string)
+ )
+ end
+ end
+ )
+ end
+ end
+ end
+ end
+
+ def extract_all_scopes do
+ extract_all_scopes_from(Pleroma.Web.ApiSpec.spec())
+ end
+
+ def extract_all_scopes_from(specs) do
+ specs.paths
+ |> Enum.reduce([], fn
+ {_path, %{} = path_item}, acc ->
+ extract_routes(path_item)
+ |> Enum.flat_map(fn operation -> process_operation(operation) end)
+ |> Kernel.++(acc)
+
+ {_, _}, acc ->
+ acc
+ end)
+ |> Enum.uniq()
+ end
+
+ defp extract_routes(path_item) do
+ path_item
+ |> Map.from_struct()
+ |> Enum.map(fn {_method, path_item} -> path_item end)
+ |> Enum.filter(fn
+ %OpenApiSpex.Operation{} = _operation -> true
+ _ -> false
+ end)
+ end
+
+ defp process_operation(operation) do
+ operation.security
+ |> Kernel.||([])
+ |> Enum.flat_map(fn
+ %{"oAuth" => scopes} -> process_scopes(scopes)
+ _ -> []
+ end)
+ end
+
+ defp process_scopes(scopes) do
+ scopes
+ |> Enum.flat_map(fn scope ->
+ process_scope(scope)
+ end)
+ end
+
+ def process_scope(scope) do
+ hierarchy = String.split(scope, ":")
+
+ {_, list} =
+ Enum.reduce(hierarchy, {"", []}, fn comp, {cur, list} ->
+ {cur <> comp <> ":", [cur <> comp | list]}
+ end)
+
+ list
+ end
+end
diff --git a/lib/pleroma/web/api_spec/scopes/translator.ex b/lib/pleroma/web/api_spec/scopes/translator.ex
new file mode 100644
index 000000000..54eea3593
--- /dev/null
+++ b/lib/pleroma/web/api_spec/scopes/translator.ex
@@ -0,0 +1,10 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2023 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.ApiSpec.Scopes.Translator do
+ require Pleroma.Web.ApiSpec.Scopes.Compiler
+ require Pleroma.Web.Gettext
+
+ @before_compile Pleroma.Web.ApiSpec.Scopes.Compiler
+end
diff --git a/lib/pleroma/web/auth/authenticator.ex b/lib/pleroma/web/auth/authenticator.ex
index a0bd154db..01bf1575c 100644
--- a/lib/pleroma/web/auth/authenticator.ex
+++ b/lib/pleroma/web/auth/authenticator.ex
@@ -5,7 +5,7 @@
defmodule Pleroma.Web.Auth.Authenticator do
@callback get_user(Plug.Conn.t()) :: {:ok, user :: struct()} | {:error, any()}
@callback create_from_registration(Plug.Conn.t(), registration :: struct()) ::
- {:ok, User.t()} | {:error, any()}
+ {:ok, Pleroma.User.t()} | {:error, any()}
@callback get_registration(Plug.Conn.t()) :: {:ok, registration :: struct()} | {:error, any()}
@callback handle_error(Plug.Conn.t(), any()) :: any()
@callback auth_template() :: String.t() | nil
diff --git a/lib/pleroma/web/common_api.ex b/lib/pleroma/web/common_api.ex
index 1b95ee89c..34e480d73 100644
--- a/lib/pleroma/web/common_api.ex
+++ b/lib/pleroma/web/common_api.ex
@@ -6,7 +6,9 @@ defmodule Pleroma.Web.CommonAPI do
alias Pleroma.Activity
alias Pleroma.Conversation.Participation
alias Pleroma.Formatter
+ alias Pleroma.ModerationLog
alias Pleroma.Object
+ alias Pleroma.Rule
alias Pleroma.ThreadMute
alias Pleroma.User
alias Pleroma.UserRelationship
@@ -32,6 +34,7 @@ def block(blocker, blocked) do
def post_chat_message(%User{} = user, %User{} = recipient, content, opts \\ []) do
with maybe_attachment <- opts[:media_id] && Object.get_by_id(opts[:media_id]),
+ :ok <- validate_chat_attachment_attribution(maybe_attachment, user),
:ok <- validate_chat_content_length(content, !!maybe_attachment),
{_, {:ok, chat_message_data, _meta}} <-
{:build_object,
@@ -70,6 +73,17 @@ defp format_chat_content(content) do
text
end
+ defp validate_chat_attachment_attribution(nil, _), do: :ok
+
+ defp validate_chat_attachment_attribution(attachment, user) do
+ with :ok <- Object.authorize_access(attachment, user) do
+ :ok
+ else
+ e ->
+ e
+ end
+ end
+
defp validate_chat_content_length(_, true), do: :ok
defp validate_chat_content_length(nil, false), do: {:error, :no_content}
@@ -141,12 +155,27 @@ def reject_follow_request(follower, followed) do
def delete(activity_id, user) do
with {_, %Activity{data: %{"object" => _, "type" => "Create"}} = activity} <-
- {:find_activity, Activity.get_by_id(activity_id)},
+ {:find_activity, Activity.get_by_id(activity_id, filter: [])},
{_, %Object{} = object, _} <-
{:find_object, Object.normalize(activity, fetch: false), activity},
- true <- User.superuser?(user) || user.ap_id == object.data["actor"],
+ true <- User.privileged?(user, :messages_delete) || user.ap_id == object.data["actor"],
{:ok, delete_data, _} <- Builder.delete(user, object.data["id"]),
{:ok, delete, _} <- Pipeline.common_pipeline(delete_data, local: true) do
+ if User.privileged?(user, :messages_delete) and user.ap_id != object.data["actor"] do
+ action =
+ if object.data["type"] == "ChatMessage" do
+ "chat_message_delete"
+ else
+ "status_delete"
+ end
+
+ ModerationLog.insert_log(%{
+ action: action,
+ actor: user,
+ subject_id: activity_id
+ })
+ end
+
{:ok, delete}
else
{:find_activity, _} ->
@@ -344,7 +373,7 @@ def public_announce?(_, %{visibility: visibility})
do: visibility in ~w(public unlisted)
def public_announce?(object, _) do
- Visibility.is_public?(object)
+ Visibility.public?(object)
end
def get_visibility(_, _, %Participation{}), do: {"direct", "direct"}
@@ -402,6 +431,41 @@ def post(user, %{status: _} = data) do
end
end
+ def update(user, orig_activity, changes) do
+ with orig_object <- Object.normalize(orig_activity),
+ {:ok, new_object} <- make_update_data(user, orig_object, changes),
+ {:ok, update_data, _} <- Builder.update(user, new_object),
+ {:ok, update, _} <- Pipeline.common_pipeline(update_data, local: true) do
+ {:ok, update}
+ else
+ _ -> {:error, nil}
+ end
+ end
+
+ defp make_update_data(user, orig_object, changes) do
+ kept_params = %{
+ visibility: Visibility.get_visibility(orig_object),
+ in_reply_to_id:
+ with replied_id when is_binary(replied_id) <- orig_object.data["inReplyTo"],
+ %Activity{id: activity_id} <- Activity.get_create_by_object_ap_id(replied_id) do
+ activity_id
+ else
+ _ -> nil
+ end
+ }
+
+ params = Map.merge(changes, kept_params)
+
+ with {:ok, draft} <- ActivityDraft.create(user, params) do
+ change =
+ Object.Updater.make_update_object_data(orig_object.data, draft.object, Utils.make_date())
+
+ {:ok, change}
+ else
+ _ -> {:error, nil}
+ end
+ end
+
@spec pin(String.t(), User.t()) :: {:ok, Activity.t()} | {:error, term()}
def pin(id, %User{} = user) do
with %Activity{} = activity <- create_activity_by_id(id),
@@ -437,12 +501,12 @@ defp object_type_is_allowed_for_pin(%{data: %{"type" => type}}) do
end
defp activity_is_public(activity) do
- with false <- Visibility.is_public?(activity) do
+ with false <- Visibility.public?(activity) do
{:error, :visibility_error}
end
end
- @spec unpin(String.t(), User.t()) :: {:ok, User.t()} | {:error, term()}
+ @spec unpin(String.t(), User.t()) :: {:ok, Activity.t()} | {:error, term()}
def unpin(id, user) do
with %Activity{} = activity <- create_activity_by_id(id),
{:ok, unpin_data, _} <- Builder.unpin(user, activity.object),
@@ -487,7 +551,7 @@ def remove_mute(user_id, activity_id) do
remove_mute(user, activity)
else
{what, result} = error ->
- Logger.warn(
+ Logger.warning(
"CommonAPI.remove_mute/2 failed. #{what}: #{result}, user_id: #{user_id}, activity_id: #{activity_id}"
)
@@ -505,14 +569,16 @@ def thread_muted?(_, _), do: false
def report(user, data) do
with {:ok, account} <- get_reported_account(data.account_id),
{:ok, {content_html, _, _}} <- make_report_content_html(data[:comment]),
- {:ok, statuses} <- get_report_statuses(account, data) do
+ {:ok, statuses} <- get_report_statuses(account, data),
+ rules <- get_report_rules(Map.get(data, :rule_ids, nil)) do
ActivityPub.flag(%{
context: Utils.generate_context_id(),
actor: user,
account: account,
statuses: statuses,
content: content_html,
- forward: Map.get(data, :forward, false)
+ forward: Map.get(data, :forward, false),
+ rules: rules
})
end
end
@@ -524,6 +590,15 @@ defp get_reported_account(account_id) do
end
end
+ defp get_report_rules(nil) do
+ nil
+ end
+
+ defp get_report_rules(rule_ids) do
+ rule_ids
+ |> Enum.filter(&Rule.exists?/1)
+ end
+
def update_report_state(activity_ids, state) when is_list(activity_ids) do
case Utils.update_report_state(activity_ids, state) do
:ok -> {:ok, activity_ids}
@@ -532,7 +607,7 @@ def update_report_state(activity_ids, state) when is_list(activity_ids) do
end
def update_report_state(activity_id, state) do
- with %Activity{} = activity <- Activity.get_by_id(activity_id) do
+ with %Activity{} = activity <- Activity.get_by_id(activity_id, filter: []) do
Utils.update_report_state(activity, state)
else
nil -> {:error, :not_found}
diff --git a/lib/pleroma/web/common_api/activity_draft.ex b/lib/pleroma/web/common_api/activity_draft.ex
index 7c21c8c3a..8aa1e258d 100644
--- a/lib/pleroma/web/common_api/activity_draft.ex
+++ b/lib/pleroma/web/common_api/activity_draft.ex
@@ -7,10 +7,14 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do
alias Pleroma.Conversation.Participation
alias Pleroma.Object
alias Pleroma.Web.ActivityPub.Builder
+ alias Pleroma.Web.ActivityPub.Visibility
alias Pleroma.Web.CommonAPI
alias Pleroma.Web.CommonAPI.Utils
import Pleroma.Web.Gettext
+ import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1]
+
+ @type t :: %__MODULE__{}
defstruct valid?: true,
errors: [],
@@ -22,6 +26,7 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do
attachments: [],
in_reply_to: nil,
in_reply_to_conversation: nil,
+ quote_post: nil,
visibility: nil,
expires_at: nil,
extra: nil,
@@ -53,7 +58,9 @@ def create(user, params) do
|> poll()
|> with_valid(&in_reply_to/1)
|> with_valid(&in_reply_to_conversation/1)
+ |> with_valid("e_post/1)
|> with_valid(&visibility/1)
+ |> with_valid("ing_visibility/1)
|> content()
|> with_valid(&to_and_cc/1)
|> with_valid(&context/1)
@@ -78,7 +85,7 @@ def listen(user, params) do
defp listen_object(draft) do
object =
draft.params
- |> Map.take([:album, :artist, :title, :length])
+ |> Map.take([:album, :artist, :title, :length, :externalLink])
|> Map.new(fn {key, value} -> {to_string(key), value} end)
|> Map.put("type", "Audio")
|> Map.put("to", draft.to)
@@ -111,7 +118,7 @@ defp full_payload(%{status: status, summary: summary} = draft) do
end
defp attachments(%{params: params} = draft) do
- attachments = Utils.attachments_from_ids(params)
+ attachments = Utils.attachments_from_ids(params, draft.user)
draft = %__MODULE__{draft | attachments: attachments}
case Utils.validate_attachments_count(attachments) do
@@ -122,8 +129,22 @@ defp attachments(%{params: params} = draft) do
defp in_reply_to(%{params: %{in_reply_to_status_id: ""}} = draft), do: draft
- defp in_reply_to(%{params: %{in_reply_to_status_id: id}} = draft) when is_binary(id) do
- %__MODULE__{draft | in_reply_to: Activity.get_by_id(id)}
+ defp in_reply_to(%{params: %{in_reply_to_status_id: :deleted}} = draft) do
+ add_error(draft, dgettext("errors", "Cannot reply to a deleted status"))
+ end
+
+ defp in_reply_to(%{params: %{in_reply_to_status_id: id} = params} = draft) when is_binary(id) do
+ activity = Activity.get_by_id(id)
+
+ params =
+ if is_nil(activity) do
+ # Deleted activities are returned as nil
+ Map.put(params, :in_reply_to_status_id, :deleted)
+ else
+ Map.put(params, :in_reply_to_status_id, activity)
+ end
+
+ in_reply_to(%{draft | params: params})
end
defp in_reply_to(%{params: %{in_reply_to_status_id: %Activity{} = in_reply_to}} = draft) do
@@ -132,6 +153,18 @@ defp in_reply_to(%{params: %{in_reply_to_status_id: %Activity{} = in_reply_to}}
defp in_reply_to(draft), do: draft
+ defp quote_post(%{params: %{quote_id: id}} = draft) when not_empty_string(id) do
+ case Activity.get_by_id_with_object(id) do
+ %Activity{} = activity ->
+ %__MODULE__{draft | quote_post: activity}
+
+ _ ->
+ draft
+ end
+ end
+
+ defp quote_post(draft), do: draft
+
defp in_reply_to_conversation(draft) do
in_reply_to_conversation = Participation.get(draft.params[:in_reply_to_conversation_id])
%__MODULE__{draft | in_reply_to_conversation: in_reply_to_conversation}
@@ -147,6 +180,29 @@ defp visibility(%{params: params} = draft) do
end
end
+ defp can_quote?(_draft, _object, visibility) when visibility in ~w(public unlisted local) do
+ true
+ end
+
+ defp can_quote?(draft, object, "private") do
+ draft.user.ap_id == object.data["actor"]
+ end
+
+ defp can_quote?(_, _, _) do
+ false
+ end
+
+ defp quoting_visibility(%{quote_post: %Activity{}} = draft) do
+ with %Object{} = object <- Object.normalize(draft.quote_post, fetch: false),
+ true <- can_quote?(draft, object, Visibility.get_visibility(object)) do
+ draft
+ else
+ _ -> add_error(draft, dgettext("errors", "Cannot quote private message"))
+ end
+ end
+
+ defp quoting_visibility(draft), do: draft
+
defp expires_at(draft) do
case CommonAPI.check_expiry_date(draft.params[:expires_in]) do
{:ok, expires_at} -> %__MODULE__{draft | expires_at: expires_at}
@@ -164,12 +220,15 @@ defp poll(draft) do
end
end
- defp content(draft) do
+ defp content(%{mentions: mentions} = draft) do
{content_html, mentioned_users, tags} = Utils.make_content_html(draft)
+ mentioned_ap_ids =
+ Enum.map(mentioned_users, fn {_, mentioned_user} -> mentioned_user.ap_id end)
+
mentions =
- mentioned_users
- |> Enum.map(fn {_, mentioned_user} -> mentioned_user.ap_id end)
+ mentions
+ |> Kernel.++(mentioned_ap_ids)
|> Utils.get_addressed_users(draft.params[:to])
%__MODULE__{draft | content_html: content_html, mentions: mentions, tags: tags}
@@ -224,7 +283,10 @@ defp object(draft) do
object =
note_data
|> Map.put("emoji", emoji)
- |> Map.put("source", draft.status)
+ |> Map.put("source", %{
+ "content" => draft.status,
+ "mediaType" => Utils.get_content_type(draft.params[:content_type])
+ })
|> Map.put("generator", draft.params[:generator])
%__MODULE__{draft | object: object}
diff --git a/lib/pleroma/web/common_api/utils.ex b/lib/pleroma/web/common_api/utils.ex
index ce850b038..52c08f00f 100644
--- a/lib/pleroma/web/common_api/utils.ex
+++ b/lib/pleroma/web/common_api/utils.ex
@@ -23,21 +23,21 @@ defmodule Pleroma.Web.CommonAPI.Utils do
require Logger
require Pleroma.Constants
- def attachments_from_ids(%{media_ids: ids, descriptions: desc}) do
- attachments_from_ids_descs(ids, desc)
+ def attachments_from_ids(%{media_ids: ids, descriptions: desc}, user) do
+ attachments_from_ids_descs(ids, desc, user)
end
- def attachments_from_ids(%{media_ids: ids}) do
- attachments_from_ids_no_descs(ids)
+ def attachments_from_ids(%{media_ids: ids}, user) do
+ attachments_from_ids_no_descs(ids, user)
end
- def attachments_from_ids(_), do: []
+ def attachments_from_ids(_, _), do: []
- def attachments_from_ids_no_descs([]), do: []
+ def attachments_from_ids_no_descs([], _), do: []
- def attachments_from_ids_no_descs(ids) do
+ def attachments_from_ids_no_descs(ids, user) do
Enum.map(ids, fn media_id ->
- case Repo.get(Object, media_id) do
+ case get_attachment(media_id, user) do
%Object{data: data} -> data
_ -> nil
end
@@ -45,19 +45,29 @@ def attachments_from_ids_no_descs(ids) do
|> Enum.reject(&is_nil/1)
end
- def attachments_from_ids_descs([], _), do: []
+ def attachments_from_ids_descs([], _, _), do: []
- def attachments_from_ids_descs(ids, descs_str) do
+ def attachments_from_ids_descs(ids, descs_str, user) do
{_, descs} = Jason.decode(descs_str)
Enum.map(ids, fn media_id ->
- with %Object{data: data} <- Repo.get(Object, media_id) do
+ with %Object{data: data} <- get_attachment(media_id, user) do
Map.put(data, "name", descs[media_id])
end
end)
|> Enum.reject(&is_nil/1)
end
+ defp get_attachment(media_id, user) do
+ with %Object{data: data} = object <- Repo.get(Object, media_id),
+ %{"type" => type} when type in Pleroma.Constants.upload_object_types() <- data,
+ :ok <- Object.authorize_access(object, user) do
+ object
+ else
+ _ -> nil
+ end
+ end
+
@spec get_to_and_cc(ActivityDraft.t()) :: {list(String.t()), list(String.t())}
def get_to_and_cc(%{in_reply_to_conversation: %Participation{} = participation}) do
@@ -99,7 +109,7 @@ def get_to_and_cc(%{visibility: "private"} = draft) do
def get_to_and_cc(%{visibility: "direct"} = draft) do
# If the OP is a DM already, add the implicit actor.
- if draft.in_reply_to && Visibility.is_direct?(draft.in_reply_to) do
+ if draft.in_reply_to && Visibility.direct?(draft.in_reply_to) do
{Enum.uniq([draft.in_reply_to.data["actor"] | draft.mentions]), []}
else
{draft.mentions, []}
@@ -141,6 +151,8 @@ def make_poll_data(%{poll: %{options: options, expires_in: expires_in}} = data)
when is_list(options) do
limits = Config.get([:instance, :poll_limits])
+ options = options |> Enum.uniq()
+
with :ok <- validate_poll_expiration(expires_in, limits),
:ok <- validate_poll_options_amount(options, limits),
:ok <- validate_poll_options_length(options, limits) do
@@ -176,10 +188,15 @@ def make_poll_data(_data) do
end
defp validate_poll_options_amount(options, %{max_options: max_options}) do
- if Enum.count(options) > max_options do
- {:error, "Poll can't contain more than #{max_options} options"}
- else
- :ok
+ cond do
+ Enum.count(options) < 2 ->
+ {:error, "Poll must contain at least 2 options"}
+
+ Enum.count(options) > max_options ->
+ {:error, "Poll can't contain more than #{max_options} options"}
+
+ true ->
+ :ok
end
end
@@ -219,7 +236,7 @@ def make_content_html(%ActivityDraft{} = draft) do
|> maybe_add_attachments(draft.attachments, attachment_links)
end
- defp get_content_type(content_type) do
+ def get_content_type(content_type) do
if Enum.member?(Config.get([:instance, :allowed_post_formats]), content_type) do
content_type
else
@@ -304,13 +321,13 @@ def date_to_asctime(date) when is_binary(date) do
format_asctime(date)
else
_e ->
- Logger.warn("Date #{date} in wrong format, must be ISO 8601")
+ Logger.warning("Date #{date} in wrong format, must be ISO 8601")
""
end
end
def date_to_asctime(date) do
- Logger.warn("Date #{date} in wrong format, must be ISO 8601")
+ Logger.warning("Date #{date} in wrong format, must be ISO 8601")
""
end
@@ -449,35 +466,6 @@ def get_report_statuses(%User{ap_id: actor}, %{status_ids: status_ids})
def get_report_statuses(_, _), do: {:ok, nil}
- # DEPRECATED mostly, context objects are now created at insertion time.
- def context_to_conversation_id(context) do
- with %Object{id: id} <- Object.get_cached_by_ap_id(context) do
- id
- else
- _e ->
- changeset = Object.context_mapping(context)
-
- case Repo.insert(changeset) do
- {:ok, %{id: id}} ->
- id
-
- # This should be solved by an upsert, but it seems ecto
- # has problems accessing the constraint inside the jsonb.
- {:error, _} ->
- Object.get_cached_by_ap_id(context).id
- end
- end
- end
-
- def conversation_id_to_context(id) do
- with %Object{data: %{"id" => context}} <- Repo.get(Object, id) do
- context
- else
- _e ->
- {:error, dgettext("errors", "No such conversation")}
- end
- end
-
def validate_character_limit("" = _full_payload, [] = _attachments) do
{:error, dgettext("errors", "Cannot post an empty status without attachments")}
end
diff --git a/lib/pleroma/web/controller_helper.ex b/lib/pleroma/web/controller_helper.ex
index 0c7fc17f4..1caf0f7e6 100644
--- a/lib/pleroma/web/controller_helper.ex
+++ b/lib/pleroma/web/controller_helper.ex
@@ -20,7 +20,7 @@ def json_response(conn, status, json) do
|> json(json)
end
- @spec fetch_integer_param(map(), String.t(), integer() | nil) :: integer() | nil
+ @spec fetch_integer_param(map(), String.t() | atom(), integer() | nil) :: integer() | nil
def fetch_integer_param(params, name, default \\ nil) do
params
|> Map.get(name, default)
@@ -53,10 +53,15 @@ def add_link_headers(conn, entries, extra_params) do
end
end
+ # TODO: Only fetch the params from open_api_spex when everything is converted
@id_keys Pagination.page_keys() -- ["limit", "order"]
defp build_pagination_fields(conn, min_id, max_id, extra_params) do
params =
- conn.params
+ if Map.has_key?(conn.private, :open_api_spex) do
+ get_in(conn, [Access.key(:private), Access.key(:open_api_spex), Access.key(:params)])
+ else
+ conn.params
+ end
|> Map.drop(Map.keys(conn.path_params) |> Enum.map(&String.to_existing_atom/1))
|> Map.merge(extra_params)
|> Map.drop(@id_keys)
@@ -85,18 +90,15 @@ def get_pagination_fields(conn, entries, extra_params \\ %{}) do
end
end
- def assign_account_by_id(conn, _) do
- case Pleroma.User.get_cached_by_id(conn.params.id) do
+ def assign_account_by_id(%{private: %{open_api_spex: %{params: %{id: id}}}} = conn, _) do
+ case Pleroma.User.get_cached_by_id(id) do
%Pleroma.User{} = account -> assign(conn, :account, account)
nil -> Pleroma.Web.MastodonAPI.FallbackController.call(conn, {:error, :not_found}) |> halt()
end
end
def try_render(conn, target, params) when is_binary(target) do
- case render(conn, target, params) do
- nil -> render_error(conn, :not_implemented, "Can't display this activity")
- res -> res
- end
+ render(conn, target, params)
end
def try_render(conn, _, _) do
diff --git a/lib/pleroma/web/embed_controller.ex b/lib/pleroma/web/embed_controller.ex
index 8b9f0a051..2ca4501a6 100644
--- a/lib/pleroma/web/embed_controller.ex
+++ b/lib/pleroma/web/embed_controller.ex
@@ -11,12 +11,10 @@ defmodule Pleroma.Web.EmbedController do
alias Pleroma.Web.ActivityPub.Visibility
- plug(:put_layout, :embed)
-
def show(conn, %{"id" => id}) do
with %Activity{local: true} = activity <-
Activity.get_by_id_with_object(id),
- true <- Visibility.is_public?(activity.object) do
+ true <- Visibility.public?(activity.object) do
{:ok, author} = User.get_or_fetch(activity.object.data["actor"])
conn
diff --git a/lib/pleroma/web/endpoint.ex b/lib/pleroma/web/endpoint.ex
index d8d40cceb..2e2104904 100644
--- a/lib/pleroma/web/endpoint.ex
+++ b/lib/pleroma/web/endpoint.ex
@@ -9,7 +9,31 @@ defmodule Pleroma.Web.Endpoint do
alias Pleroma.Config
- socket("/socket", Pleroma.Web.UserSocket)
+ socket("/api/v1/streaming", Pleroma.Web.MastodonAPI.WebsocketHandler,
+ longpoll: false,
+ websocket: [
+ path: "/",
+ compress: false,
+ error_handler: {Pleroma.Web.MastodonAPI.WebsocketHandler, :handle_error, []},
+ fullsweep_after: 20
+ ]
+ )
+
+ socket("/socket", Pleroma.Web.UserSocket,
+ websocket: [
+ path: "/websocket",
+ serializer: [
+ {Phoenix.Socket.V1.JSONSerializer, "~> 1.0.0"},
+ {Phoenix.Socket.V2.JSONSerializer, "~> 2.0.0"}
+ ],
+ timeout: 60_000,
+ transport_log: false,
+ compress: false,
+ fullsweep_after: 20
+ ],
+ longpoll: false
+ )
+
socket("/live", Phoenix.LiveView.Socket)
plug(Plug.Telemetry, event_prefix: [:phoenix, :endpoint])
@@ -19,7 +43,8 @@ defmodule Pleroma.Web.Endpoint do
plug(Pleroma.Web.Plugs.HTTPSecurityPlug)
plug(Pleroma.Web.Plugs.UploadedMedia)
- @static_cache_control "public, no-cache"
+ @static_cache_control "public, max-age=1209600"
+ @static_cache_disabled "public, no-cache"
# InstanceStatic needs to be before Plug.Static to be able to override shipped-static files
# If you're adding new paths to `only:` you'll need to configure them in InstanceStatic as well
@@ -30,22 +55,32 @@ defmodule Pleroma.Web.Endpoint do
from: :pleroma,
only: ["emoji", "images"],
gzip: true,
- cache_control_for_etags: "public, max-age=1209600",
- headers: %{
- "cache-control" => "public, max-age=1209600"
- }
- )
-
- plug(Pleroma.Web.Plugs.InstanceStatic,
- at: "/",
- gzip: true,
cache_control_for_etags: @static_cache_control,
headers: %{
"cache-control" => @static_cache_control
}
)
- # Careful! No `only` restriction here, as we don't know what frontends contain.
+ plug(Pleroma.Web.Plugs.InstanceStatic,
+ at: "/",
+ gzip: true,
+ cache_control_for_etags: @static_cache_disabled,
+ headers: %{
+ "cache-control" => @static_cache_disabled
+ }
+ )
+
+ plug(Pleroma.Web.Plugs.FrontendStatic,
+ at: "/",
+ frontend_type: :primary,
+ only: ["index.html"],
+ gzip: true,
+ cache_control_for_etags: @static_cache_disabled,
+ headers: %{
+ "cache-control" => @static_cache_disabled
+ }
+ )
+
plug(Pleroma.Web.Plugs.FrontendStatic,
at: "/",
frontend_type: :primary,
@@ -62,9 +97,9 @@ defmodule Pleroma.Web.Endpoint do
at: "/pleroma/admin",
frontend_type: :admin,
gzip: true,
- cache_control_for_etags: @static_cache_control,
+ cache_control_for_etags: @static_cache_disabled,
headers: %{
- "cache-control" => @static_cache_control
+ "cache-control" => @static_cache_disabled
}
)
@@ -79,9 +114,9 @@ defmodule Pleroma.Web.Endpoint do
only: Pleroma.Constants.static_only_files(),
# credo:disable-for-previous-line Credo.Check.Readability.MaxLineLength
gzip: true,
- cache_control_for_etags: @static_cache_control,
+ cache_control_for_etags: @static_cache_disabled,
headers: %{
- "cache-control" => @static_cache_control
+ "cache-control" => @static_cache_disabled
}
)
@@ -101,13 +136,10 @@ defmodule Pleroma.Web.Endpoint do
plug(Plug.Logger, log: :debug)
plug(Plug.Parsers,
- parsers: [
- :urlencoded,
- {:multipart, length: {Config, :get, [[:instance, :upload_limit]]}},
- :json
- ],
+ parsers: [:urlencoded, Pleroma.Web.Multipart, :json],
pass: ["*/*"],
json_decoder: Jason,
+ # Note: this is compile-time only, won't work for database-config
length: Config.get([:instance, :upload_limit]),
body_reader: {Pleroma.Web.Plugs.DigestPlug, :read_body, []}
)
@@ -141,47 +173,6 @@ defmodule Pleroma.Web.Endpoint do
plug(Pleroma.Web.Plugs.RemoteIp)
- defmodule Instrumenter do
- use Prometheus.PhoenixInstrumenter
- end
-
- defmodule PipelineInstrumenter do
- use Prometheus.PlugPipelineInstrumenter
- end
-
- defmodule MetricsExporter do
- use Prometheus.PlugExporter
- end
-
- defmodule MetricsExporterCaller do
- @behaviour Plug
-
- def init(opts), do: opts
-
- def call(conn, opts) do
- prometheus_config = Application.get_env(:prometheus, MetricsExporter, [])
- ip_whitelist = List.wrap(prometheus_config[:ip_whitelist])
-
- cond do
- !prometheus_config[:enabled] ->
- conn
-
- ip_whitelist != [] and
- !Enum.find(ip_whitelist, fn ip ->
- Pleroma.Helpers.InetHelper.parse_address(ip) == {:ok, conn.remote_ip}
- end) ->
- conn
-
- true ->
- MetricsExporter.call(conn, opts)
- end
- end
- end
-
- plug(PipelineInstrumenter)
-
- plug(MetricsExporterCaller)
-
plug(Pleroma.Web.Router)
@doc """
diff --git a/lib/pleroma/web/fallback/redirect_controller.ex b/lib/pleroma/web/fallback/redirect_controller.ex
index 1a86f7a53..4a0885fab 100644
--- a/lib/pleroma/web/fallback/redirect_controller.ex
+++ b/lib/pleroma/web/fallback/redirect_controller.ex
@@ -17,10 +17,28 @@ def api_not_implemented(conn, _params) do
|> json(%{error: "Not implemented"})
end
+ def add_generated_metadata(page_content, extra \\ "") do
+ title = "#{Pleroma.Config.get([:instance, :name])} "
+ favicon = " "
+ manifest = " "
+
+ page_content
+ |> String.replace(
+ "",
+ title <> favicon <> manifest <> extra
+ )
+ end
+
def redirector(conn, _params, code \\ 200) do
+ {:ok, index_content} = File.read(index_file_path())
+
+ response =
+ index_content
+ |> add_generated_metadata()
+
conn
|> put_resp_content_type("text/html")
- |> send_file(code, index_file_path())
+ |> send_resp(code, response)
end
def redirector_with_meta(conn, %{"maybe_nickname_or_id" => maybe_nickname_or_id} = params) do
@@ -34,14 +52,12 @@ def redirector_with_meta(conn, %{"maybe_nickname_or_id" => maybe_nickname_or_id}
def redirector_with_meta(conn, params) do
{:ok, index_content} = File.read(index_file_path())
-
tags = build_tags(conn, params)
preloads = preload_data(conn, params)
- title = "#{Pleroma.Config.get([:instance, :name])} "
response =
index_content
- |> String.replace("", tags <> preloads <> title)
+ |> add_generated_metadata(tags <> preloads)
conn
|> put_resp_content_type("text/html")
@@ -55,11 +71,10 @@ def redirector_with_preload(conn, %{"path" => ["pleroma", "admin"]}) do
def redirector_with_preload(conn, params) do
{:ok, index_content} = File.read(index_file_path())
preloads = preload_data(conn, params)
- title = "#{Pleroma.Config.get([:instance, :name])} "
response =
index_content
- |> String.replace("", preloads <> title)
+ |> add_generated_metadata(preloads)
conn
|> put_resp_content_type("text/html")
diff --git a/lib/pleroma/web/federator.ex b/lib/pleroma/web/federator.ex
index e7feefc07..1f2c3835a 100644
--- a/lib/pleroma/web/federator.ex
+++ b/lib/pleroma/web/federator.ex
@@ -6,10 +6,9 @@ defmodule Pleroma.Web.Federator do
alias Pleroma.Activity
alias Pleroma.Object.Containment
alias Pleroma.User
- alias Pleroma.Web.ActivityPub.ActivityPub
+ alias Pleroma.Web.ActivityPub.Publisher
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.ActivityPub.Utils
- alias Pleroma.Web.Federator.Publisher
alias Pleroma.Workers.PublisherWorker
alias Pleroma.Workers.ReceiverWorker
@@ -36,6 +35,17 @@ def allowed_thread_distance?(distance) do
end
# Client API
+ def incoming_ap_doc(%{params: params, req_headers: req_headers}) do
+ ReceiverWorker.enqueue(
+ "incoming_ap_doc",
+ %{"req_headers" => req_headers, "params" => params, "timeout" => :timer.seconds(20)},
+ priority: 2
+ )
+ end
+
+ def incoming_ap_doc(%{"type" => "Delete"} = params) do
+ ReceiverWorker.enqueue("incoming_ap_doc", %{"params" => params}, priority: 3)
+ end
def incoming_ap_doc(params) do
ReceiverWorker.enqueue("incoming_ap_doc", %{"params" => params})
@@ -47,24 +57,25 @@ def publish(%{id: "pleroma:fakeid"} = activity) do
end
@impl true
- def publish(activity) do
- PublisherWorker.enqueue("publish", %{"activity_id" => activity.id})
+ def publish(%Pleroma.Activity{data: %{"type" => type}} = activity) do
+ PublisherWorker.enqueue("publish", %{"activity_id" => activity.id},
+ priority: publish_priority(type)
+ )
end
+ defp publish_priority("Delete"), do: 3
+ defp publish_priority(_), do: 0
+
# Job Worker Callbacks
- @spec perform(atom(), module(), any()) :: {:ok, any()} | {:error, any()}
- def perform(:publish_one, module, params) do
- apply(module, :publish_one, [params])
- end
+ @spec perform(atom(), any()) :: {:ok, any()} | {:error, any()}
+ def perform(:publish_one, params), do: Publisher.publish_one(params)
def perform(:publish, activity) do
Logger.debug(fn -> "Running publish for #{activity.data["id"]}" end)
- with %User{} = actor <- User.get_cached_by_ap_id(activity.data["actor"]),
- {:ok, actor} <- User.ensure_keys_present(actor) do
- Publisher.publish(actor, activity)
- end
+ %User{} = actor = User.get_cached_by_ap_id(activity.data["actor"])
+ Publisher.publish(actor, activity)
end
def perform(:incoming_ap_doc, params) do
@@ -77,7 +88,7 @@ def perform(:incoming_ap_doc, params) do
# NOTE: we use the actor ID to do the containment, this is fine because an
# actor shouldn't be acting on objects outside their own AP server.
- with {_, {:ok, _user}} <- {:actor, ap_enabled_actor(actor)},
+ with {_, {:ok, _user}} <- {:actor, User.get_or_fetch_by_ap_id(actor)},
nil <- Activity.normalize(params["id"]),
{_, :ok} <-
{:correct_origin?, Containment.contain_origin_from_id(actor, params)},
@@ -107,14 +118,4 @@ def perform(:incoming_ap_doc, params) do
{:error, e}
end
end
-
- def ap_enabled_actor(id) do
- user = User.get_cached_by_ap_id(id)
-
- if User.ap_enabled?(user) do
- {:ok, user}
- else
- ActivityPub.make_user_from_ap_id(id)
- end
- end
end
diff --git a/lib/pleroma/web/federator/publisher.ex b/lib/pleroma/web/federator/publisher.ex
deleted file mode 100644
index a45796e9d..000000000
--- a/lib/pleroma/web/federator/publisher.ex
+++ /dev/null
@@ -1,109 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2022 Pleroma Authors
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Web.Federator.Publisher do
- alias Pleroma.Activity
- alias Pleroma.Config
- alias Pleroma.User
- alias Pleroma.Workers.PublisherWorker
-
- require Logger
-
- @moduledoc """
- Defines the contract used by federation implementations to publish messages to
- their peers.
- """
-
- @doc """
- Determine whether an activity can be relayed using the federation module.
- """
- @callback is_representable?(Pleroma.Activity.t()) :: boolean()
-
- @doc """
- Relays an activity to a specified peer, determined by the parameters. The
- parameters used are controlled by the federation module.
- """
- @callback publish_one(Map.t()) :: {:ok, Map.t()} | {:error, any()}
-
- @doc """
- Enqueue publishing a single activity.
- """
- @spec enqueue_one(module(), Map.t()) :: :ok
- def enqueue_one(module, %{} = params) do
- PublisherWorker.enqueue(
- "publish_one",
- %{"module" => to_string(module), "params" => params}
- )
- end
-
- @doc """
- Relays an activity to all specified peers.
- """
- @callback publish(User.t(), Activity.t()) :: :ok | {:error, any()}
-
- @spec publish(User.t(), Activity.t()) :: :ok
- def publish(%User{} = user, %Activity{} = activity) do
- Config.get([:instance, :federation_publisher_modules])
- |> Enum.each(fn module ->
- if module.is_representable?(activity) do
- Logger.debug("Publishing #{activity.data["id"]} using #{inspect(module)}")
- module.publish(user, activity)
- end
- end)
-
- :ok
- end
-
- @doc """
- Gathers links used by an outgoing federation module for WebFinger output.
- """
- @callback gather_webfinger_links(User.t()) :: list()
-
- @spec gather_webfinger_links(User.t()) :: list()
- def gather_webfinger_links(%User{} = user) do
- Config.get([:instance, :federation_publisher_modules])
- |> Enum.reduce([], fn module, links ->
- links ++ module.gather_webfinger_links(user)
- end)
- end
-
- @doc """
- Gathers nodeinfo protocol names supported by the federation module.
- """
- @callback gather_nodeinfo_protocol_names() :: list()
-
- @spec gather_nodeinfo_protocol_names() :: list()
- def gather_nodeinfo_protocol_names do
- Config.get([:instance, :federation_publisher_modules])
- |> Enum.reduce([], fn module, links ->
- links ++ module.gather_nodeinfo_protocol_names()
- end)
- end
-
- @doc """
- Gathers a set of remote users given an IR envelope.
- """
- def remote_users(%User{id: user_id}, %{data: %{"to" => to} = data}) do
- cc = Map.get(data, "cc", [])
-
- bcc =
- data
- |> Map.get("bcc", [])
- |> Enum.reduce([], fn ap_id, bcc ->
- case Pleroma.List.get_by_ap_id(ap_id) do
- %Pleroma.List{user_id: ^user_id} = list ->
- {:ok, following} = Pleroma.List.get_following(list)
- bcc ++ Enum.map(following, & &1.ap_id)
-
- _ ->
- bcc
- end
- end)
-
- [to, cc, bcc]
- |> Enum.concat()
- |> Enum.map(&User.get_cached_by_ap_id/1)
- |> Enum.filter(fn user -> user && !user.local end)
- end
-end
diff --git a/lib/pleroma/web/feed/feed_view.ex b/lib/pleroma/web/feed/feed_view.ex
index 35a5f9482..e1ee33d62 100644
--- a/lib/pleroma/web/feed/feed_view.ex
+++ b/lib/pleroma/web/feed/feed_view.ex
@@ -6,7 +6,6 @@ defmodule Pleroma.Web.Feed.FeedView do
use Phoenix.HTML
use Pleroma.Web, :view
- alias Pleroma.Formatter
alias Pleroma.Object
alias Pleroma.User
alias Pleroma.Web.Gettext
@@ -14,14 +13,8 @@ defmodule Pleroma.Web.Feed.FeedView do
require Pleroma.Constants
- @spec pub_date(String.t() | DateTime.t()) :: String.t()
- def pub_date(date) when is_binary(date) do
- date
- |> Timex.parse!("{ISO:Extended}")
- |> pub_date
- end
-
- def pub_date(%DateTime{} = date), do: Timex.format!(date, "{RFC822}")
+ @days ~w(Mon Tue Wed Thu Fri Sat Sun)
+ @months ~w(Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec)
def prepare_activity(activity, opts \\ []) do
object = Object.normalize(activity, fetch: false)
@@ -41,13 +34,18 @@ def prepare_activity(activity, opts \\ []) do
def most_recent_update(activities) do
with %{updated_at: updated_at} <- List.first(activities) do
- NaiveDateTime.to_iso8601(updated_at)
+ to_rfc3339(updated_at)
end
end
- def most_recent_update(activities, user) do
+ def most_recent_update(activities, user, :atom) do
(List.first(activities) || user).updated_at
- |> NaiveDateTime.to_iso8601()
+ |> to_rfc3339()
+ end
+
+ def most_recent_update(activities, user, :rss) do
+ (List.first(activities) || user).updated_at
+ |> to_rfc2822()
end
def feed_logo do
@@ -61,6 +59,10 @@ def feed_logo do
|> MediaProxy.url()
end
+ def email(user) do
+ user.nickname <> "@" <> Pleroma.Web.Endpoint.host()
+ end
+
def logo(user) do
user
|> User.avatar_url()
@@ -69,18 +71,35 @@ def logo(user) do
def last_activity(activities), do: List.last(activities)
- def activity_title(%{"content" => content}, opts \\ %{}) do
- content
- |> Pleroma.Web.Metadata.Utils.scrub_html()
- |> Pleroma.Emoji.Formatter.demojify()
- |> Formatter.truncate(opts[:max_length], opts[:omission])
- |> escape()
+ def activity_title(%{"content" => content} = data, opts \\ %{}) do
+ summary = Map.get(data, "summary", "")
+
+ title =
+ cond do
+ summary != "" -> summary
+ content != "" -> activity_content(data)
+ true -> "a post"
+ end
+
+ title
+ |> Pleroma.Web.Metadata.Utils.scrub_html_and_truncate(opts[:max_length], opts[:omission])
+ |> HtmlEntities.encode()
+ end
+
+ def activity_description(data) do
+ content = activity_content(data)
+ summary = data["summary"]
+
+ cond do
+ content != "" -> escape(content)
+ summary != "" -> escape(summary)
+ true -> escape(data["type"])
+ end
end
def activity_content(%{"content" => content}) do
content
|> String.replace(~r/[\n\r]/, "")
- |> escape()
end
def activity_content(_), do: ""
@@ -112,4 +131,60 @@ def escape(html) do
|> html_escape()
|> safe_to_string()
end
+
+ @spec to_rfc3339(String.t() | NaiveDateTime.t()) :: String.t()
+ def to_rfc3339(date) when is_binary(date) do
+ date
+ |> Timex.parse!("{ISO:Extended}")
+ |> to_rfc3339()
+ end
+
+ def to_rfc3339(nd) do
+ nd
+ |> Timex.to_datetime()
+ |> Timex.format!("{RFC3339}")
+ end
+
+ @spec to_rfc2822(String.t() | DateTime.t() | NaiveDateTime.t()) :: String.t()
+ def to_rfc2822(datestr) when is_binary(datestr) do
+ datestr
+ |> Timex.parse!("{ISO:Extended}")
+ |> to_rfc2822()
+ end
+
+ def to_rfc2822(%DateTime{} = date) do
+ date
+ |> DateTime.to_naive()
+ |> NaiveDateTime.to_erl()
+ |> rfc2822_from_erl()
+ end
+
+ def to_rfc2822(nd) do
+ nd
+ |> Timex.to_datetime()
+ |> DateTime.to_naive()
+ |> NaiveDateTime.to_erl()
+ |> rfc2822_from_erl()
+ end
+
+ @doc """
+ Builds a RFC2822 timestamp from an Erlang timestamp
+ [RFC2822 3.3 - Date and Time Specification](https://tools.ietf.org/html/rfc2822#section-3.3)
+ This function always assumes the Erlang timestamp is in Universal time, not Local time
+ """
+ def rfc2822_from_erl({{year, month, day} = date, {hour, minute, second}}) do
+ day_name = Enum.at(@days, :calendar.day_of_the_week(date) - 1)
+ month_name = Enum.at(@months, month - 1)
+
+ date_part = "#{day_name}, #{day} #{month_name} #{year}"
+ time_part = "#{pad(hour)}:#{pad(minute)}:#{pad(second)}"
+
+ date_part <> " " <> time_part <> " +0000"
+ end
+
+ defp pad(num) do
+ num
+ |> Integer.to_string()
+ |> String.pad_leading(2, "0")
+ end
end
diff --git a/lib/pleroma/web/gettext.ex b/lib/pleroma/web/gettext.ex
index 5ef49d841..1fa3f9768 100644
--- a/lib/pleroma/web/gettext.ex
+++ b/lib/pleroma/web/gettext.ex
@@ -85,12 +85,12 @@ def get_locales do
Process.get({Pleroma.Web.Gettext, :locales}, [])
end
- def is_locale_list(locales) do
+ def locale_list?(locales) do
Enum.all?(locales, &is_binary/1)
end
def put_locales(locales) do
- if is_locale_list(locales) do
+ if locale_list?(locales) do
Process.put({Pleroma.Web.Gettext, :locales}, Enum.uniq(locales))
Gettext.put_locale(Enum.at(locales, 0, Gettext.get_locale()))
:ok
diff --git a/lib/pleroma/web/mastodon_api/controllers/account_controller.ex b/lib/pleroma/web/mastodon_api/controllers/account_controller.ex
index 50c12a1b1..47e6f0a64 100644
--- a/lib/pleroma/web/mastodon_api/controllers/account_controller.ex
+++ b/lib/pleroma/web/mastodon_api/controllers/account_controller.ex
@@ -30,7 +30,7 @@ defmodule Pleroma.Web.MastodonAPI.AccountController do
alias Pleroma.Web.TwitterAPI.TwitterAPI
alias Pleroma.Web.Utils.Params
- plug(Pleroma.Web.ApiSpec.CastAndValidate)
+ plug(Pleroma.Web.ApiSpec.CastAndValidate, replace_params: false)
plug(:skip_auth when action in [:create, :lookup])
@@ -72,25 +72,30 @@ defmodule Pleroma.Web.MastodonAPI.AccountController do
%{scopes: ["follow", "write:blocks"]} when action in [:block, :unblock]
)
- plug(OAuthScopesPlug, %{scopes: ["read:follows"]} when action == :relationships)
+ plug(
+ OAuthScopesPlug,
+ %{scopes: ["read:follows"]} when action in [:relationships, :familiar_followers]
+ )
plug(
OAuthScopesPlug,
- %{scopes: ["follow", "write:follows"]} when action in [:follow_by_uri, :follow, :unfollow]
+ %{scopes: ["follow", "write:follows"]}
+ when action in [:follow_by_uri, :follow, :unfollow, :remove_from_followers]
)
plug(OAuthScopesPlug, %{scopes: ["follow", "read:mutes"]} when action == :mutes)
plug(OAuthScopesPlug, %{scopes: ["follow", "write:mutes"]} when action in [:mute, :unmute])
- @relationship_actions [:follow, :unfollow]
+ @relationship_actions [:follow, :unfollow, :remove_from_followers]
@needs_account ~W(
- followers following lists follow unfollow mute unmute block unblock note endorse unendorse
+ followers following lists follow unfollow mute unmute block unblock
+ note endorse unendorse remove_from_followers
)a
plug(
RateLimiter,
- [name: :relation_id_action, params: [:id, :uri]] when action in @relationship_actions
+ [name: :relation_id_action, params: ["id", "uri"]] when action in @relationship_actions
)
plug(RateLimiter, [name: :relations_actions] when action in @relationship_actions)
@@ -102,7 +107,10 @@ defmodule Pleroma.Web.MastodonAPI.AccountController do
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.AccountOperation
@doc "POST /api/v1/accounts"
- def create(%{assigns: %{app: app}, body_params: params} = conn, _params) do
+ def create(
+ %{assigns: %{app: app}, private: %{open_api_spex: %{body_params: params}}} = conn,
+ _params
+ ) do
with :ok <- validate_email_param(params),
:ok <- TwitterAPI.validate_captcha(app, params),
{:ok, user} <- TwitterAPI.register_user(params),
@@ -166,7 +174,10 @@ def verify_credentials(%{assigns: %{user: user}} = conn, _) do
end
@doc "PATCH /api/v1/accounts/update_credentials"
- def update_credentials(%{assigns: %{user: user}, body_params: params} = conn, _params) do
+ def update_credentials(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{body_params: params}}} = conn,
+ _params
+ ) do
params =
params
|> Enum.filter(fn {_, value} -> not is_nil(value) end)
@@ -233,7 +244,7 @@ def update_credentials(%{assigns: %{user: user}, body_params: params} = conn, _p
# So we first build the normal local changeset, then apply it to the
# user data, but don't persist it. With this, we generate the object
# data for our update activity. We feed this and the changeset as meta
- # inforation into the pipeline, where they will be properly updated and
+ # information into the pipeline, where they will be properly updated and
# federated.
with changeset <- User.update_changeset(user, user_params),
{:ok, unpersisted_user} <- Ecto.Changeset.apply_action(changeset, :update),
@@ -252,23 +263,45 @@ def update_credentials(%{assigns: %{user: user}, body_params: params} = conn, _p
with_pleroma_settings: true
)
else
- _e -> render_error(conn, :forbidden, "Invalid request")
+ {:error, %Ecto.Changeset{errors: [avatar: {"file is too large", _}]}} ->
+ render_error(conn, :request_entity_too_large, "File is too large")
+
+ {:error, %Ecto.Changeset{errors: [banner: {"file is too large", _}]}} ->
+ render_error(conn, :request_entity_too_large, "File is too large")
+
+ {:error, %Ecto.Changeset{errors: [background: {"file is too large", _}]}} ->
+ render_error(conn, :request_entity_too_large, "File is too large")
+
+ {:error, %Ecto.Changeset{errors: [{:bio, {_, _}} | _]}} ->
+ render_error(conn, :request_entity_too_large, "Bio is too long")
+
+ {:error, %Ecto.Changeset{errors: [{:name, {_, _}} | _]}} ->
+ render_error(conn, :request_entity_too_large, "Name is too long")
+
+ {:error, %Ecto.Changeset{errors: [{:fields, {"invalid", _}} | _]}} ->
+ render_error(conn, :request_entity_too_large, "One or more field entries are too long")
+
+ {:error, %Ecto.Changeset{errors: [{:fields, {_, _}} | _]}} ->
+ render_error(conn, :request_entity_too_large, "Too many field entries")
+
+ _e ->
+ render_error(conn, :forbidden, "Invalid request")
end
end
defp normalize_fields_attributes(fields) do
- if Enum.all?(fields, &is_tuple/1) do
- Enum.map(fields, fn {_, v} -> v end)
- else
- Enum.map(fields, fn
- %{} = field -> %{"name" => field.name, "value" => field.value}
- field -> field
- end)
- end
+ if(Enum.all?(fields, &is_tuple/1), do: Enum.map(fields, fn {_, v} -> v end), else: fields)
+ |> Enum.map(fn
+ %{} = field -> %{"name" => field.name, "value" => field.value}
+ field -> field
+ end)
end
@doc "GET /api/v1/accounts/relationships"
- def relationships(%{assigns: %{user: user}} = conn, %{id: id}) do
+ def relationships(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: id}}}} = conn,
+ _
+ ) do
targets = User.get_all_by_ids(List.wrap(id))
render(conn, "relationships.json", user: user, targets: targets)
@@ -278,7 +311,13 @@ def relationships(%{assigns: %{user: user}} = conn, %{id: id}) do
def relationships(%{assigns: %{user: _user}} = conn, _), do: json(conn, [])
@doc "GET /api/v1/accounts/:id"
- def show(%{assigns: %{user: for_user}} = conn, %{id: nickname_or_id} = params) do
+ def show(
+ %{
+ assigns: %{user: for_user},
+ private: %{open_api_spex: %{params: %{id: nickname_or_id} = params}}
+ } = conn,
+ _params
+ ) do
with %User{} = user <- User.get_cached_by_nickname_or_id(nickname_or_id, for: for_user),
:visible <- User.visible_for(user, for_user) do
render(conn, "show.json",
@@ -292,7 +331,10 @@ def show(%{assigns: %{user: for_user}} = conn, %{id: nickname_or_id} = params) d
end
@doc "GET /api/v1/accounts/:id/statuses"
- def statuses(%{assigns: %{user: reading_user}} = conn, params) do
+ def statuses(
+ %{assigns: %{user: reading_user}, private: %{open_api_spex: %{params: params}}} = conn,
+ _params
+ ) do
with %User{} = user <- User.get_cached_by_nickname_or_id(params.id, for: reading_user),
:visible <- User.visible_for(user, reading_user) do
params =
@@ -327,7 +369,11 @@ defp user_visibility_error(conn, error) do
end
@doc "GET /api/v1/accounts/:id/followers"
- def followers(%{assigns: %{user: for_user, account: user}} = conn, params) do
+ def followers(
+ %{assigns: %{user: for_user, account: user}, private: %{open_api_spex: %{params: params}}} =
+ conn,
+ _params
+ ) do
params =
params
|> Enum.map(fn {key, value} -> {to_string(key), value} end)
@@ -352,7 +398,11 @@ def followers(%{assigns: %{user: for_user, account: user}} = conn, params) do
end
@doc "GET /api/v1/accounts/:id/following"
- def following(%{assigns: %{user: for_user, account: user}} = conn, params) do
+ def following(
+ %{assigns: %{user: for_user, account: user}, private: %{open_api_spex: %{params: params}}} =
+ conn,
+ _params
+ ) do
params =
params
|> Enum.map(fn {key, value} -> {to_string(key), value} end)
@@ -390,7 +440,13 @@ def follow(%{assigns: %{user: %{id: id}, account: %{id: id}}}, _params) do
{:error, "Can not follow yourself"}
end
- def follow(%{body_params: params, assigns: %{user: follower, account: followed}} = conn, _) do
+ def follow(
+ %{
+ assigns: %{user: follower, account: followed},
+ private: %{open_api_spex: %{body_params: params}}
+ } = conn,
+ _
+ ) do
with {:ok, follower} <- MastodonAPI.follow(follower, followed, params) do
render(conn, "relationship.json", user: follower, target: followed)
else
@@ -410,7 +466,17 @@ def unfollow(%{assigns: %{user: follower, account: followed}} = conn, _params) d
end
@doc "POST /api/v1/accounts/:id/mute"
- def mute(%{assigns: %{user: muter, account: muted}, body_params: params} = conn, _params) do
+ def mute(
+ %{
+ assigns: %{user: muter, account: muted},
+ private: %{open_api_spex: %{body_params: params}}
+ } = conn,
+ _params
+ ) do
+ params =
+ params
+ |> Map.put_new(:duration, Map.get(params, :expires_in, 0))
+
with {:ok, _user_relationships} <- User.mute(muter, muted, params) do
render(conn, "relationship.json", user: muter, target: muted)
else
@@ -447,7 +513,10 @@ def unblock(%{assigns: %{user: blocker, account: blocked}} = conn, _params) do
@doc "POST /api/v1/accounts/:id/note"
def note(
- %{assigns: %{user: noter, account: target}, body_params: %{comment: comment}} = conn,
+ %{
+ assigns: %{user: noter, account: target},
+ private: %{open_api_spex: %{body_params: %{comment: comment}}}
+ } = conn,
_params
) do
with {:ok, _user_note} <- UserNote.create(noter, target, comment) do
@@ -473,8 +542,22 @@ def unendorse(%{assigns: %{user: endorser, account: endorsed}} = conn, _params)
end
end
+ @doc "POST /api/v1/accounts/:id/remove_from_followers"
+ def remove_from_followers(%{assigns: %{user: %{id: id}, account: %{id: id}}}, _params) do
+ {:error, "Can not unfollow yourself"}
+ end
+
+ def remove_from_followers(%{assigns: %{user: followed, account: follower}} = conn, _params) do
+ with {:ok, follower} <- CommonAPI.reject_follow_request(follower, followed) do
+ render(conn, "relationship.json", user: followed, target: follower)
+ else
+ nil ->
+ render_error(conn, :not_found, "Record not found")
+ end
+ end
+
@doc "POST /api/v1/follows"
- def follow_by_uri(%{body_params: %{uri: uri}} = conn, _) do
+ def follow_by_uri(%{private: %{open_api_spex: %{body_params: %{uri: uri}}}} = conn, _) do
case User.get_cached_by_nickname(uri) do
%User{} = user ->
conn
@@ -491,7 +574,25 @@ def mutes(%{assigns: %{user: user}} = conn, params) do
users =
user
|> User.muted_users_relation(_restrict_deactivated = true)
- |> Pleroma.Pagination.fetch_paginated(Map.put(params, :skip_order, true))
+ |> Pleroma.Pagination.fetch_paginated(params)
+
+ conn
+ |> add_link_headers(users)
+ |> render("index.json",
+ users: users,
+ for: user,
+ as: :user,
+ embed_relationships: embed_relationships?(params),
+ mutes: true
+ )
+ end
+
+ @doc "GET /api/v1/blocks"
+ def blocks(%{assigns: %{user: user}} = conn, params) do
+ users =
+ user
+ |> User.blocked_users_relation(_restrict_deactivated = true)
+ |> Pleroma.Pagination.fetch_paginated(params)
conn
|> add_link_headers(users)
@@ -503,20 +604,8 @@ def mutes(%{assigns: %{user: user}} = conn, params) do
)
end
- @doc "GET /api/v1/blocks"
- def blocks(%{assigns: %{user: user}} = conn, params) do
- users =
- user
- |> User.blocked_users_relation(_restrict_deactivated = true)
- |> Pleroma.Pagination.fetch_paginated(Map.put(params, :skip_order, true))
-
- conn
- |> add_link_headers(users)
- |> render("index.json", users: users, for: user, as: :user)
- end
-
@doc "GET /api/v1/accounts/lookup"
- def lookup(conn, %{acct: nickname} = _params) do
+ def lookup(%{private: %{open_api_spex: %{params: %{acct: nickname}}}} = conn, _params) do
with %User{} = user <- User.get_by_nickname(nickname) do
render(conn, "show.json",
user: user,
@@ -543,6 +632,35 @@ def endorsements(%{assigns: %{user: user}} = conn, params) do
)
end
+ @doc "GET /api/v1/accounts/familiar_followers"
+ def familiar_followers(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: id}}}} = conn,
+ _id
+ ) do
+ users =
+ User.get_all_by_ids(List.wrap(id))
+ |> Enum.map(&%{id: &1.id, accounts: get_familiar_followers(&1, user)})
+
+ conn
+ |> render("familiar_followers.json",
+ for: user,
+ users: users,
+ as: :user
+ )
+ end
+
+ defp get_familiar_followers(%{id: id} = user, %{id: id}) do
+ User.get_familiar_followers(user, user)
+ end
+
+ defp get_familiar_followers(%{hide_followers: true}, _current_user) do
+ []
+ end
+
+ defp get_familiar_followers(user, current_user) do
+ User.get_familiar_followers(user, current_user)
+ end
+
@doc "GET /api/v1/identity_proofs"
def identity_proofs(conn, params), do: MastodonAPIController.empty_array(conn, params)
end
diff --git a/lib/pleroma/web/mastodon_api/controllers/directory_controller.ex b/lib/pleroma/web/mastodon_api/controllers/directory_controller.ex
index 253f06cfb..f89425966 100644
--- a/lib/pleroma/web/mastodon_api/controllers/directory_controller.ex
+++ b/lib/pleroma/web/mastodon_api/controllers/directory_controller.ex
@@ -15,7 +15,7 @@ defmodule Pleroma.Web.MastodonAPI.DirectoryController do
plug(Pleroma.Web.ApiSpec.CastAndValidate)
- plug(:skip_auth when action == "index")
+ plug(:skip_auth when action == :index)
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.DirectoryOperation
diff --git a/lib/pleroma/web/mastodon_api/controllers/domain_block_controller.ex b/lib/pleroma/web/mastodon_api/controllers/domain_block_controller.ex
index b2e347ed9..4615794a1 100644
--- a/lib/pleroma/web/mastodon_api/controllers/domain_block_controller.ex
+++ b/lib/pleroma/web/mastodon_api/controllers/domain_block_controller.ex
@@ -8,7 +8,7 @@ defmodule Pleroma.Web.MastodonAPI.DomainBlockController do
alias Pleroma.User
alias Pleroma.Web.Plugs.OAuthScopesPlug
- plug(Pleroma.Web.ApiSpec.CastAndValidate)
+ plug(Pleroma.Web.ApiSpec.CastAndValidate, replace_params: false)
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.DomainBlockOperation
plug(
@@ -27,23 +27,31 @@ def index(%{assigns: %{user: user}} = conn, _) do
end
@doc "POST /api/v1/domain_blocks"
- def create(%{assigns: %{user: blocker}, body_params: %{domain: domain}} = conn, _params) do
+ def create(
+ %{assigns: %{user: blocker}, private: %{open_api_spex: %{body_params: %{domain: domain}}}} =
+ conn,
+ _params
+ ) do
User.block_domain(blocker, domain)
json(conn, %{})
end
- def create(%{assigns: %{user: blocker}} = conn, %{domain: domain}) do
+ def create(%{assigns: %{user: blocker}} = conn, %{"domain" => domain}) do
User.block_domain(blocker, domain)
json(conn, %{})
end
@doc "DELETE /api/v1/domain_blocks"
- def delete(%{assigns: %{user: blocker}, body_params: %{domain: domain}} = conn, _params) do
+ def delete(
+ %{assigns: %{user: blocker}, private: %{open_api_spex: %{body_params: %{domain: domain}}}} =
+ conn,
+ _params
+ ) do
User.unblock_domain(blocker, domain)
json(conn, %{})
end
- def delete(%{assigns: %{user: blocker}} = conn, %{domain: domain}) do
+ def delete(%{assigns: %{user: blocker}} = conn, %{"domain" => domain}) do
User.unblock_domain(blocker, domain)
json(conn, %{})
end
diff --git a/lib/pleroma/web/mastodon_api/controllers/follow_request_controller.ex b/lib/pleroma/web/mastodon_api/controllers/follow_request_controller.ex
index ba6d074cc..6eee55d1b 100644
--- a/lib/pleroma/web/mastodon_api/controllers/follow_request_controller.ex
+++ b/lib/pleroma/web/mastodon_api/controllers/follow_request_controller.ex
@@ -9,7 +9,7 @@ defmodule Pleroma.Web.MastodonAPI.FollowRequestController do
alias Pleroma.Web.CommonAPI
alias Pleroma.Web.Plugs.OAuthScopesPlug
- plug(Pleroma.Web.ApiSpec.CastAndValidate)
+ plug(Pleroma.Web.ApiSpec.CastAndValidate, replace_params: false)
plug(:assign_follower when action != :index)
action_fallback(:errors)
@@ -44,7 +44,7 @@ def reject(%{assigns: %{user: followed, follower: follower}} = conn, _params) do
end
end
- defp assign_follower(%{params: %{id: id}} = conn, _) do
+ defp assign_follower(%{private: %{open_api_spex: %{params: %{id: id}}}} = conn, _) do
case User.get_cached_by_id(id) do
%User{} = follower -> assign(conn, :follower, follower)
nil -> Pleroma.Web.MastodonAPI.FallbackController.call(conn, {:error, :not_found}) |> halt()
diff --git a/lib/pleroma/web/mastodon_api/controllers/instance_controller.ex b/lib/pleroma/web/mastodon_api/controllers/instance_controller.ex
index 6410e872c..b97b0e476 100644
--- a/lib/pleroma/web/mastodon_api/controllers/instance_controller.ex
+++ b/lib/pleroma/web/mastodon_api/controllers/instance_controller.ex
@@ -7,7 +7,7 @@ defmodule Pleroma.Web.MastodonAPI.InstanceController do
plug(Pleroma.Web.ApiSpec.CastAndValidate)
- plug(:skip_auth when action in [:show, :peers])
+ plug(:skip_auth when action in [:show, :show2, :peers])
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.InstanceOperation
@@ -16,8 +16,18 @@ def show(conn, _params) do
render(conn, "show.json")
end
+ @doc "GET /api/v2/instance"
+ def show2(conn, _params) do
+ render(conn, "show2.json")
+ end
+
@doc "GET /api/v1/instance/peers"
def peers(conn, _params) do
json(conn, Pleroma.Stats.get_peers())
end
+
+ @doc "GET /api/v1/instance/rules"
+ def rules(conn, _params) do
+ render(conn, "rules.json")
+ end
end
diff --git a/lib/pleroma/web/mastodon_api/controllers/list_controller.ex b/lib/pleroma/web/mastodon_api/controllers/list_controller.ex
index 2117aae3a..3bfc365a5 100644
--- a/lib/pleroma/web/mastodon_api/controllers/list_controller.ex
+++ b/lib/pleroma/web/mastodon_api/controllers/list_controller.ex
@@ -11,7 +11,7 @@ defmodule Pleroma.Web.MastodonAPI.ListController do
@oauth_read_actions [:index, :show, :list_accounts]
- plug(Pleroma.Web.ApiSpec.CastAndValidate)
+ plug(Pleroma.Web.ApiSpec.CastAndValidate, replace_params: false)
plug(:list_by_id_and_user when action not in [:index, :create])
plug(OAuthScopesPlug, %{scopes: ["read:lists"]} when action in @oauth_read_actions)
plug(OAuthScopesPlug, %{scopes: ["write:lists"]} when action not in @oauth_read_actions)
@@ -21,25 +21,33 @@ defmodule Pleroma.Web.MastodonAPI.ListController do
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.ListOperation
# GET /api/v1/lists
- def index(%{assigns: %{user: user}} = conn, opts) do
- lists = Pleroma.List.for_user(user, opts)
+ def index(%{assigns: %{user: user}, private: %{open_api_spex: %{params: params}}} = conn, _) do
+ lists = Pleroma.List.for_user(user, params)
render(conn, "index.json", lists: lists)
end
# POST /api/v1/lists
- def create(%{assigns: %{user: user}, body_params: %{title: title}} = conn, _) do
+ def create(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{body_params: %{title: title}}}} =
+ conn,
+ _
+ ) do
with {:ok, %Pleroma.List{} = list} <- Pleroma.List.create(title, user) do
render(conn, "show.json", list: list)
end
end
- # GET /api/v1/lists/:id
+ # GET /api/v1/lists/:idOB
def show(%{assigns: %{list: list}} = conn, _) do
render(conn, "show.json", list: list)
end
# PUT /api/v1/lists/:id
- def update(%{assigns: %{list: list}, body_params: %{title: title}} = conn, _) do
+ def update(
+ %{assigns: %{list: list}, private: %{open_api_spex: %{body_params: %{title: title}}}} =
+ conn,
+ _
+ ) do
with {:ok, list} <- Pleroma.List.rename(list, title) do
render(conn, "show.json", list: list)
end
@@ -62,7 +70,13 @@ def list_accounts(%{assigns: %{user: user, list: list}} = conn, _) do
end
# POST /api/v1/lists/:id/accounts
- def add_to_list(%{assigns: %{list: list}, body_params: %{account_ids: account_ids}} = conn, _) do
+ def add_to_list(
+ %{
+ assigns: %{list: list},
+ private: %{open_api_spex: %{body_params: %{account_ids: account_ids}}}
+ } = conn,
+ _
+ ) do
Enum.each(account_ids, fn account_id ->
with %User{} = followed <- User.get_cached_by_id(account_id) do
Pleroma.List.follow(list, followed)
@@ -74,9 +88,22 @@ def add_to_list(%{assigns: %{list: list}, body_params: %{account_ids: account_id
# DELETE /api/v1/lists/:id/accounts
def remove_from_list(
- %{assigns: %{list: list}, params: %{account_ids: account_ids}} = conn,
+ %{
+ private: %{open_api_spex: %{params: %{account_ids: account_ids}}}
+ } = conn,
_
) do
+ do_remove_from_list(conn, account_ids)
+ end
+
+ def remove_from_list(
+ %{private: %{open_api_spex: %{body_params: %{account_ids: account_ids}}}} = conn,
+ _
+ ) do
+ do_remove_from_list(conn, account_ids)
+ end
+
+ defp do_remove_from_list(%{assigns: %{list: list}} = conn, account_ids) do
Enum.each(account_ids, fn account_id ->
with %User{} = followed <- User.get_cached_by_id(account_id) do
Pleroma.List.unfollow(list, followed)
@@ -86,11 +113,10 @@ def remove_from_list(
json(conn, %{})
end
- def remove_from_list(%{body_params: params} = conn, _) do
- remove_from_list(%{conn | params: params}, %{})
- end
-
- defp list_by_id_and_user(%{assigns: %{user: user}, params: %{id: id}} = conn, _) do
+ defp list_by_id_and_user(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: id}}}} = conn,
+ _
+ ) do
case Pleroma.List.get(id, user) do
%Pleroma.List{} = list -> assign(conn, :list, list)
nil -> conn |> render_error(:not_found, "List not found") |> halt()
diff --git a/lib/pleroma/web/mastodon_api/controllers/media_controller.ex b/lib/pleroma/web/mastodon_api/controllers/media_controller.ex
index 7d9a63cf4..056bad844 100644
--- a/lib/pleroma/web/mastodon_api/controllers/media_controller.ex
+++ b/lib/pleroma/web/mastodon_api/controllers/media_controller.ex
@@ -12,7 +12,7 @@ defmodule Pleroma.Web.MastodonAPI.MediaController do
action_fallback(Pleroma.Web.MastodonAPI.FallbackController)
plug(Majic.Plug, [pool: Pleroma.MajicPool] when action in [:create, :create2])
- plug(Pleroma.Web.ApiSpec.CastAndValidate)
+ plug(Pleroma.Web.ApiSpec.CastAndValidate, replace_params: false)
plug(OAuthScopesPlug, %{scopes: ["read:media"]} when action == :show)
plug(OAuthScopesPlug, %{scopes: ["write:media"]} when action != :show)
@@ -20,7 +20,11 @@ defmodule Pleroma.Web.MastodonAPI.MediaController do
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.MediaOperation
@doc "POST /api/v1/media"
- def create(%{assigns: %{user: user}, body_params: %{file: file} = data} = conn, _) do
+ def create(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{body_params: %{file: file} = data}}} =
+ conn,
+ _
+ ) do
with {:ok, object} <-
ActivityPub.upload(
file,
@@ -36,7 +40,11 @@ def create(%{assigns: %{user: user}, body_params: %{file: file} = data} = conn,
def create(_conn, _data), do: {:error, :bad_request}
@doc "POST /api/v2/media"
- def create2(%{assigns: %{user: user}, body_params: %{file: file} = data} = conn, _) do
+ def create2(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{body_params: %{file: file} = data}}} =
+ conn,
+ _
+ ) do
with {:ok, object} <-
ActivityPub.upload(
file,
@@ -54,7 +62,15 @@ def create2(%{assigns: %{user: user}, body_params: %{file: file} = data} = conn,
def create2(_conn, _data), do: {:error, :bad_request}
@doc "PUT /api/v1/media/:id"
- def update(%{assigns: %{user: user}, body_params: %{description: description}} = conn, %{id: id}) do
+ def update(
+ %{
+ assigns: %{user: user},
+ private: %{
+ open_api_spex: %{body_params: %{description: description}, params: %{id: id}}
+ }
+ } = conn,
+ _
+ ) do
with %Object{} = object <- Object.get_by_id(id),
:ok <- Object.authorize_access(object, user),
{:ok, %Object{data: data}} <- Object.update_data(object, %{"name" => description}) do
@@ -67,7 +83,7 @@ def update(%{assigns: %{user: user}, body_params: %{description: description}} =
def update(conn, data), do: show(conn, data)
@doc "GET /api/v1/media/:id"
- def show(%{assigns: %{user: user}} = conn, %{id: id}) do
+ def show(%{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: id}}}} = conn, _) do
with %Object{data: data, id: object_id} = object <- Object.get_by_id(id),
:ok <- Object.authorize_access(object, user) do
attachment_data = Map.put(data, "id", object_id)
diff --git a/lib/pleroma/web/mastodon_api/controllers/notification_controller.ex b/lib/pleroma/web/mastodon_api/controllers/notification_controller.ex
index 932bc6423..afd83b785 100644
--- a/lib/pleroma/web/mastodon_api/controllers/notification_controller.ex
+++ b/lib/pleroma/web/mastodon_api/controllers/notification_controller.ex
@@ -13,7 +13,7 @@ defmodule Pleroma.Web.MastodonAPI.NotificationController do
@oauth_read_actions [:show, :index]
- plug(Pleroma.Web.ApiSpec.CastAndValidate)
+ plug(Pleroma.Web.ApiSpec.CastAndValidate, replace_params: false)
plug(
OAuthScopesPlug,
@@ -24,8 +24,21 @@ defmodule Pleroma.Web.MastodonAPI.NotificationController do
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.NotificationOperation
+ @default_notification_types ~w{
+ mention
+ follow
+ follow_request
+ reblog
+ favourite
+ move
+ pleroma:emoji_reaction
+ poll
+ update
+ status
+ }
+
# GET /api/v1/notifications
- def index(conn, %{account_id: account_id} = params) do
+ def index(%{private: %{open_api_spex: %{params: %{account_id: account_id} = params}}} = conn, _) do
case Pleroma.User.get_cached_by_id(account_id) do
%{ap_id: account_ap_id} ->
params =
@@ -33,7 +46,7 @@ def index(conn, %{account_id: account_id} = params) do
|> Map.delete(:account_id)
|> Map.put(:account_ap_id, account_ap_id)
- index(conn, params)
+ do_get_notifications(conn, params)
_ ->
conn
@@ -42,20 +55,14 @@ def index(conn, %{account_id: account_id} = params) do
end
end
- @default_notification_types ~w{
- mention
- follow
- follow_request
- reblog
- favourite
- move
- pleroma:emoji_reaction
- poll
- }
- def index(%{assigns: %{user: user}} = conn, params) do
+ def index(%{private: %{open_api_spex: %{params: params}}} = conn, _) do
+ do_get_notifications(conn, params)
+ end
+
+ defp do_get_notifications(%{assigns: %{user: user}} = conn, params) do
params =
Map.new(params, fn {k, v} -> {to_string(k), v} end)
- |> Map.put_new("include_types", @default_notification_types)
+ |> Map.put_new("types", Map.get(params, :include_types, @default_notification_types))
notifications = MastodonAPI.get_notifications(user, params)
@@ -68,7 +75,7 @@ def index(%{assigns: %{user: user}} = conn, params) do
end
# GET /api/v1/notifications/:id
- def show(%{assigns: %{user: user}} = conn, %{id: id}) do
+ def show(%{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: id}}}} = conn, _) do
with {:ok, notification} <- Notification.get(user, id) do
render(conn, "show.json", notification: notification, for: user)
else
@@ -87,8 +94,20 @@ def clear(%{assigns: %{user: user}} = conn, _params) do
# POST /api/v1/notifications/:id/dismiss
- def dismiss(%{assigns: %{user: user}} = conn, %{id: id} = _params) do
- with {:ok, _notif} <- Notification.dismiss(user, id) do
+ def dismiss(%{private: %{open_api_spex: %{params: %{id: id}}}} = conn, _) do
+ do_dismiss(conn, id)
+ end
+
+ # POST /api/v1/notifications/dismiss (deprecated)
+ def dismiss_via_body(
+ %{private: %{open_api_spex: %{body_params: %{id: id}}}} = conn,
+ _
+ ) do
+ do_dismiss(conn, id)
+ end
+
+ defp do_dismiss(%{assigns: %{user: user}} = conn, notification_id) do
+ with {:ok, _notif} <- Notification.dismiss(user, notification_id) do
json(conn, %{})
else
{:error, reason} ->
@@ -98,13 +117,11 @@ def dismiss(%{assigns: %{user: user}} = conn, %{id: id} = _params) do
end
end
- # POST /api/v1/notifications/dismiss (deprecated)
- def dismiss_via_body(%{body_params: params} = conn, _) do
- dismiss(conn, params)
- end
-
# DELETE /api/v1/notifications/destroy_multiple
- def destroy_multiple(%{assigns: %{user: user}} = conn, %{ids: ids} = _params) do
+ def destroy_multiple(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{params: %{ids: ids}}}} = conn,
+ _
+ ) do
Notification.destroy_multiple(user, ids)
json(conn, %{})
end
diff --git a/lib/pleroma/web/mastodon_api/controllers/poll_controller.ex b/lib/pleroma/web/mastodon_api/controllers/poll_controller.ex
index 002c210d2..b074ee405 100644
--- a/lib/pleroma/web/mastodon_api/controllers/poll_controller.ex
+++ b/lib/pleroma/web/mastodon_api/controllers/poll_controller.ex
@@ -15,7 +15,7 @@ defmodule Pleroma.Web.MastodonAPI.PollController do
action_fallback(Pleroma.Web.MastodonAPI.FallbackController)
- plug(Pleroma.Web.ApiSpec.CastAndValidate)
+ plug(Pleroma.Web.ApiSpec.CastAndValidate, replace_params: false)
plug(
OAuthScopesPlug,
@@ -29,7 +29,7 @@ defmodule Pleroma.Web.MastodonAPI.PollController do
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
@doc "GET /api/v1/polls/:id"
- def show(%{assigns: %{user: user}} = conn, %{id: id}) do
+ def show(%{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: id}}}} = conn, _) do
with %Object{} = object <- Object.get_by_id_and_maybe_refetch(id, interval: 60),
%Activity{} = activity <- Activity.get_create_by_object_ap_id(object.data["id"]),
true <- Visibility.visible_for_user?(activity, user) do
@@ -41,7 +41,13 @@ def show(%{assigns: %{user: user}} = conn, %{id: id}) do
end
@doc "POST /api/v1/polls/:id/votes"
- def vote(%{assigns: %{user: user}, body_params: %{choices: choices}} = conn, %{id: id}) do
+ def vote(
+ %{
+ assigns: %{user: user},
+ private: %{open_api_spex: %{body_params: %{choices: choices}, params: %{id: id}}}
+ } = conn,
+ _
+ ) do
with %Object{data: %{"type" => "Question"}} = object <- Object.get_by_id(id),
%Activity{} = activity <- Activity.get_create_by_object_ap_id(object.data["id"]),
true <- Visibility.visible_for_user?(activity, user),
diff --git a/lib/pleroma/web/mastodon_api/controllers/scheduled_activity_controller.ex b/lib/pleroma/web/mastodon_api/controllers/scheduled_activity_controller.ex
index 0392fcef1..1b7095ec5 100644
--- a/lib/pleroma/web/mastodon_api/controllers/scheduled_activity_controller.ex
+++ b/lib/pleroma/web/mastodon_api/controllers/scheduled_activity_controller.ex
@@ -13,7 +13,7 @@ defmodule Pleroma.Web.MastodonAPI.ScheduledActivityController do
@oauth_read_actions [:show, :index]
- plug(Pleroma.Web.ApiSpec.CastAndValidate)
+ plug(Pleroma.Web.ApiSpec.CastAndValidate, replace_params: false)
plug(OAuthScopesPlug, %{scopes: ["read:statuses"]} when action in @oauth_read_actions)
plug(OAuthScopesPlug, %{scopes: ["write:statuses"]} when action not in @oauth_read_actions)
plug(:assign_scheduled_activity when action != :index)
@@ -23,7 +23,7 @@ defmodule Pleroma.Web.MastodonAPI.ScheduledActivityController do
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.ScheduledActivityOperation
@doc "GET /api/v1/scheduled_statuses"
- def index(%{assigns: %{user: user}} = conn, params) do
+ def index(%{assigns: %{user: user}, private: %{open_api_spex: %{params: params}}} = conn, _) do
params = Map.new(params, fn {key, value} -> {to_string(key), value} end)
with scheduled_activities <- MastodonAPI.get_scheduled_activities(user, params) do
@@ -39,7 +39,13 @@ def show(%{assigns: %{scheduled_activity: scheduled_activity}} = conn, _params)
end
@doc "PUT /api/v1/scheduled_statuses/:id"
- def update(%{assigns: %{scheduled_activity: scheduled_activity}, body_params: params} = conn, _) do
+ def update(
+ %{
+ assigns: %{scheduled_activity: scheduled_activity},
+ private: %{open_api_spex: %{body_params: params}}
+ } = conn,
+ _
+ ) do
with {:ok, scheduled_activity} <- ScheduledActivity.update(scheduled_activity, params) do
render(conn, "show.json", scheduled_activity: scheduled_activity)
end
@@ -52,7 +58,10 @@ def delete(%{assigns: %{scheduled_activity: scheduled_activity}} = conn, _params
end
end
- defp assign_scheduled_activity(%{assigns: %{user: user}, params: %{id: id}} = conn, _) do
+ defp assign_scheduled_activity(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: id}}}} = conn,
+ _
+ ) do
case ScheduledActivity.get(user, id) do
%ScheduledActivity{} = activity -> assign(conn, :scheduled_activity, activity)
nil -> Pleroma.Web.MastodonAPI.FallbackController.call(conn, {:error, :not_found}) |> halt()
diff --git a/lib/pleroma/web/mastodon_api/controllers/search_controller.ex b/lib/pleroma/web/mastodon_api/controllers/search_controller.ex
index 5e6e04734..628aa311b 100644
--- a/lib/pleroma/web/mastodon_api/controllers/search_controller.ex
+++ b/lib/pleroma/web/mastodon_api/controllers/search_controller.ex
@@ -5,7 +5,6 @@
defmodule Pleroma.Web.MastodonAPI.SearchController do
use Pleroma.Web, :controller
- alias Pleroma.Activity
alias Pleroma.Repo
alias Pleroma.User
alias Pleroma.Web.ControllerHelper
@@ -19,7 +18,7 @@ defmodule Pleroma.Web.MastodonAPI.SearchController do
@search_limit 40
- plug(Pleroma.Web.ApiSpec.CastAndValidate)
+ plug(Pleroma.Web.ApiSpec.CastAndValidate, replace_params: false)
# Note: Mastodon doesn't allow unauthenticated access (requires read:accounts / read:search)
plug(OAuthScopesPlug, %{scopes: ["read:search"], fallback: :proceed_unauthenticated})
@@ -30,7 +29,11 @@ defmodule Pleroma.Web.MastodonAPI.SearchController do
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.SearchOperation
- def account_search(%{assigns: %{user: user}} = conn, %{q: query} = params) do
+ def account_search(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{params: %{q: query} = params}}} =
+ conn,
+ _
+ ) do
accounts = User.search(query, search_options(params, user))
conn
@@ -45,7 +48,12 @@ def account_search(%{assigns: %{user: user}} = conn, %{q: query} = params) do
def search2(conn, params), do: do_search(:v2, conn, params)
def search(conn, params), do: do_search(:v1, conn, params)
- defp do_search(version, %{assigns: %{user: user}} = conn, %{q: query} = params) do
+ defp do_search(
+ version,
+ %{assigns: %{user: user}, private: %{open_api_spex: %{params: %{q: query} = params}}} =
+ conn,
+ _
+ ) do
query = String.trim(query)
options = search_options(params, user)
timeout = Keyword.get(Repo.config(), :timeout, 15_000)
@@ -100,7 +108,7 @@ defp resource_search(_, "accounts", query, options) do
end
defp resource_search(_, "statuses", query, options) do
- statuses = with_fallback(fn -> Activity.search(options[:for_user], query, options) end)
+ statuses = with_fallback(fn -> Pleroma.Search.search(query, options) end)
StatusView.render("index.json",
activities: statuses,
@@ -148,7 +156,7 @@ defp prepare_tags(query, options) do
tags
end
- Pleroma.Pagination.paginate(tags, options)
+ Pleroma.Pagination.paginate_list(tags, options)
end
defp add_joined_tag(tags) do
diff --git a/lib/pleroma/web/mastodon_api/controllers/status_controller.ex b/lib/pleroma/web/mastodon_api/controllers/status_controller.ex
index 42a95bdc5..83e1bee54 100644
--- a/lib/pleroma/web/mastodon_api/controllers/status_controller.ex
+++ b/lib/pleroma/web/mastodon_api/controllers/status_controller.ex
@@ -12,6 +12,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusController do
alias Pleroma.Activity
alias Pleroma.Bookmark
+ alias Pleroma.BookmarkFolder
alias Pleroma.Object
alias Pleroma.Repo
alias Pleroma.ScheduledActivity
@@ -25,7 +26,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusController do
alias Pleroma.Web.Plugs.OAuthScopesPlug
alias Pleroma.Web.Plugs.RateLimiter
- plug(Pleroma.Web.ApiSpec.CastAndValidate)
+ plug(Pleroma.Web.ApiSpec.CastAndValidate, replace_params: false)
plug(:skip_public_check when action in [:index, :show])
@@ -37,8 +38,9 @@ defmodule Pleroma.Web.MastodonAPI.StatusController do
when action in [
:index,
:show,
- :card,
- :context
+ :context,
+ :show_history,
+ :show_source
]
)
@@ -49,7 +51,8 @@ defmodule Pleroma.Web.MastodonAPI.StatusController do
:create,
:delete,
:reblog,
- :unreblog
+ :unreblog,
+ :update
]
)
@@ -107,7 +110,11 @@ defmodule Pleroma.Web.MastodonAPI.StatusController do
`ids` query param is required
"""
- def index(%{assigns: %{user: user}} = conn, %{ids: ids} = params) do
+ def index(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{params: %{ids: ids} = params}}} =
+ conn,
+ _
+ ) do
limit = 100
activities =
@@ -131,7 +138,9 @@ def index(%{assigns: %{user: user}} = conn, %{ids: ids} = params) do
def create(
%{
assigns: %{user: user},
- body_params: %{status: _, scheduled_at: scheduled_at} = params
+ private: %{
+ open_api_spex: %{body_params: %{status: _, scheduled_at: scheduled_at} = params}
+ }
} = conn,
_
)
@@ -153,7 +162,13 @@ def create(
else
{:far_enough, _} ->
params = Map.drop(params, [:scheduled_at])
- create(%Plug.Conn{conn | body_params: params}, %{})
+
+ put_in(
+ conn,
+ [Access.key(:private), Access.key(:open_api_spex), Access.key(:body_params)],
+ params
+ )
+ |> do_create
error ->
error
@@ -161,7 +176,35 @@ def create(
end
# Creates a regular status
- def create(%{assigns: %{user: user}, body_params: %{status: _} = params} = conn, _) do
+ def create(
+ %{
+ private: %{open_api_spex: %{body_params: %{status: _}}}
+ } = conn,
+ _
+ ) do
+ do_create(conn)
+ end
+
+ def create(
+ %{
+ assigns: %{user: _user},
+ private: %{open_api_spex: %{body_params: %{media_ids: _} = params}}
+ } = conn,
+ _
+ ) do
+ params = Map.put(params, :status, "")
+
+ put_in(
+ conn,
+ [Access.key(:private), Access.key(:open_api_spex), Access.key(:body_params)],
+ params
+ )
+ |> do_create
+ end
+
+ defp do_create(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{body_params: params}}} = conn
+ ) do
params =
Map.put(params, :in_reply_to_status_id, params[:in_reply_to_id])
|> put_application(conn)
@@ -186,13 +229,74 @@ def create(%{assigns: %{user: user}, body_params: %{status: _} = params} = conn,
end
end
- def create(%{assigns: %{user: _user}, body_params: %{media_ids: _} = params} = conn, _) do
- params = Map.put(params, :status, "")
- create(%Plug.Conn{conn | body_params: params}, %{})
+ @doc "GET /api/v1/statuses/:id/history"
+ def show_history(
+ %{assigns: assigns, private: %{open_api_spex: %{params: %{id: id} = params}}} = conn,
+ _
+ ) do
+ with user = assigns[:user],
+ %Activity{} = activity <- Activity.get_by_id_with_object(id),
+ true <- Visibility.visible_for_user?(activity, user) do
+ try_render(conn, "history.json",
+ activity: activity,
+ for: user,
+ with_direct_conversation_id: true,
+ with_muted: Map.get(params, :with_muted, false)
+ )
+ else
+ _ -> {:error, :not_found}
+ end
+ end
+
+ @doc "GET /api/v1/statuses/:id/source"
+ def show_source(%{assigns: assigns, private: %{open_api_spex: %{params: %{id: id}}}} = conn, _) do
+ with user = assigns[:user],
+ %Activity{} = activity <- Activity.get_by_id_with_object(id),
+ true <- Visibility.visible_for_user?(activity, user) do
+ try_render(conn, "source.json",
+ activity: activity,
+ for: user
+ )
+ else
+ _ -> {:error, :not_found}
+ end
+ end
+
+ @doc "PUT /api/v1/statuses/:id"
+ def update(
+ %{
+ assigns: %{user: user},
+ private: %{open_api_spex: %{body_params: body_params, params: %{id: id} = params}}
+ } = conn,
+ _
+ ) do
+ with {_, %Activity{}} = {_, activity} <- {:activity, Activity.get_by_id_with_object(id)},
+ {_, true} <- {:visible, Visibility.visible_for_user?(activity, user)},
+ {_, true} <- {:is_create, activity.data["type"] == "Create"},
+ actor <- Activity.user_actor(activity),
+ {_, true} <- {:own_status, actor.id == user.id},
+ changes <- body_params |> put_application(conn),
+ {_, {:ok, _update_activity}} <- {:pipeline, CommonAPI.update(user, activity, changes)},
+ {_, %Activity{}} = {_, activity} <- {:refetched, Activity.get_by_id_with_object(id)} do
+ try_render(conn, "show.json",
+ activity: activity,
+ for: user,
+ with_direct_conversation_id: true,
+ with_muted: Map.get(params, :with_muted, false)
+ )
+ else
+ {:own_status, _} -> {:error, :forbidden}
+ {:pipeline, _} -> {:error, :internal_server_error}
+ _ -> {:error, :not_found}
+ end
end
@doc "GET /api/v1/statuses/:id"
- def show(%{assigns: %{user: user}} = conn, %{id: id} = params) do
+ def show(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: id} = params}}} =
+ conn,
+ _
+ ) do
with %Activity{} = activity <- Activity.get_by_id_with_object(id),
true <- Visibility.visible_for_user?(activity, user) do
try_render(conn, "show.json",
@@ -207,7 +311,7 @@ def show(%{assigns: %{user: user}} = conn, %{id: id} = params) do
end
@doc "DELETE /api/v1/statuses/:id"
- def delete(%{assigns: %{user: user}} = conn, %{id: id}) do
+ def delete(%{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: id}}}} = conn, _) do
with %Activity{} = activity <- Activity.get_by_id_with_object(id),
{:ok, %Activity{}} <- CommonAPI.delete(id, user) do
try_render(conn, "show.json",
@@ -222,7 +326,13 @@ def delete(%{assigns: %{user: user}} = conn, %{id: id}) do
end
@doc "POST /api/v1/statuses/:id/reblog"
- def reblog(%{assigns: %{user: user}, body_params: params} = conn, %{id: ap_id_or_id}) do
+ def reblog(
+ %{
+ assigns: %{user: user},
+ private: %{open_api_spex: %{body_params: params, params: %{id: ap_id_or_id}}}
+ } = conn,
+ _
+ ) do
with {:ok, announce} <- CommonAPI.repeat(ap_id_or_id, user, params),
%Activity{} = announce <- Activity.normalize(announce.data) do
try_render(conn, "show.json", %{activity: announce, for: user, as: :activity})
@@ -230,7 +340,11 @@ def reblog(%{assigns: %{user: user}, body_params: params} = conn, %{id: ap_id_or
end
@doc "POST /api/v1/statuses/:id/unreblog"
- def unreblog(%{assigns: %{user: user}} = conn, %{id: activity_id}) do
+ def unreblog(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: activity_id}}}} =
+ conn,
+ _
+ ) do
with {:ok, _unannounce} <- CommonAPI.unrepeat(activity_id, user),
%Activity{} = activity <- Activity.get_by_id(activity_id) do
try_render(conn, "show.json", %{activity: activity, for: user, as: :activity})
@@ -238,7 +352,11 @@ def unreblog(%{assigns: %{user: user}} = conn, %{id: activity_id}) do
end
@doc "POST /api/v1/statuses/:id/favourite"
- def favourite(%{assigns: %{user: user}} = conn, %{id: activity_id}) do
+ def favourite(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: activity_id}}}} =
+ conn,
+ _
+ ) do
with {:ok, _fav} <- CommonAPI.favorite(user, activity_id),
%Activity{} = activity <- Activity.get_by_id(activity_id) do
try_render(conn, "show.json", activity: activity, for: user, as: :activity)
@@ -246,7 +364,11 @@ def favourite(%{assigns: %{user: user}} = conn, %{id: activity_id}) do
end
@doc "POST /api/v1/statuses/:id/unfavourite"
- def unfavourite(%{assigns: %{user: user}} = conn, %{id: activity_id}) do
+ def unfavourite(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: activity_id}}}} =
+ conn,
+ _
+ ) do
with {:ok, _unfav} <- CommonAPI.unfavorite(activity_id, user),
%Activity{} = activity <- Activity.get_by_id(activity_id) do
try_render(conn, "show.json", activity: activity, for: user, as: :activity)
@@ -254,7 +376,11 @@ def unfavourite(%{assigns: %{user: user}} = conn, %{id: activity_id}) do
end
@doc "POST /api/v1/statuses/:id/pin"
- def pin(%{assigns: %{user: user}} = conn, %{id: ap_id_or_id}) do
+ def pin(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: ap_id_or_id}}}} =
+ conn,
+ _
+ ) do
with {:ok, activity} <- CommonAPI.pin(ap_id_or_id, user) do
try_render(conn, "show.json", activity: activity, for: user, as: :activity)
else
@@ -273,24 +399,43 @@ def pin(%{assigns: %{user: user}} = conn, %{id: ap_id_or_id}) do
end
@doc "POST /api/v1/statuses/:id/unpin"
- def unpin(%{assigns: %{user: user}} = conn, %{id: ap_id_or_id}) do
+ def unpin(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: ap_id_or_id}}}} =
+ conn,
+ _
+ ) do
with {:ok, activity} <- CommonAPI.unpin(ap_id_or_id, user) do
try_render(conn, "show.json", activity: activity, for: user, as: :activity)
end
end
@doc "POST /api/v1/statuses/:id/bookmark"
- def bookmark(%{assigns: %{user: user}} = conn, %{id: id}) do
+ def bookmark(
+ %{
+ assigns: %{user: user},
+ private: %{open_api_spex: %{body_params: body_params, params: %{id: id}}}
+ } = conn,
+ _
+ ) do
with %Activity{} = activity <- Activity.get_by_id_with_object(id),
%User{} = user <- User.get_cached_by_nickname(user.nickname),
true <- Visibility.visible_for_user?(activity, user),
- {:ok, _bookmark} <- Bookmark.create(user.id, activity.id) do
+ folder_id <- Map.get(body_params, :folder_id, nil),
+ folder_id <-
+ if(folder_id && BookmarkFolder.belongs_to_user?(folder_id, user.id),
+ do: folder_id,
+ else: nil
+ ),
+ {:ok, _bookmark} <- Bookmark.create(user.id, activity.id, folder_id) do
try_render(conn, "show.json", activity: activity, for: user, as: :activity)
end
end
@doc "POST /api/v1/statuses/:id/unbookmark"
- def unbookmark(%{assigns: %{user: user}} = conn, %{id: id}) do
+ def unbookmark(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: id}}}} = conn,
+ _
+ ) do
with %Activity{} = activity <- Activity.get_by_id_with_object(id),
%User{} = user <- User.get_cached_by_nickname(user.nickname),
true <- Visibility.visible_for_user?(activity, user),
@@ -300,7 +445,13 @@ def unbookmark(%{assigns: %{user: user}} = conn, %{id: id}) do
end
@doc "POST /api/v1/statuses/:id/mute"
- def mute_conversation(%{assigns: %{user: user}, body_params: params} = conn, %{id: id}) do
+ def mute_conversation(
+ %{
+ assigns: %{user: user},
+ private: %{open_api_spex: %{body_params: params, params: %{id: id}}}
+ } = conn,
+ _
+ ) do
with %Activity{} = activity <- Activity.get_by_id(id),
{:ok, activity} <- CommonAPI.add_mute(user, activity, params) do
try_render(conn, "show.json", activity: activity, for: user, as: :activity)
@@ -308,27 +459,24 @@ def mute_conversation(%{assigns: %{user: user}, body_params: params} = conn, %{i
end
@doc "POST /api/v1/statuses/:id/unmute"
- def unmute_conversation(%{assigns: %{user: user}} = conn, %{id: id}) do
+ def unmute_conversation(
+ %{
+ assigns: %{user: user},
+ private: %{open_api_spex: %{params: %{id: id}}}
+ } = conn,
+ _
+ ) do
with %Activity{} = activity <- Activity.get_by_id(id),
{:ok, activity} <- CommonAPI.remove_mute(user, activity) do
try_render(conn, "show.json", activity: activity, for: user, as: :activity)
end
end
- @doc "GET /api/v1/statuses/:id/card"
- @deprecated "https://github.com/tootsuite/mastodon/pull/11213"
- def card(%{assigns: %{user: user}} = conn, %{id: status_id}) do
- with %Activity{} = activity <- Activity.get_by_id(status_id),
- true <- Visibility.visible_for_user?(activity, user) do
- data = Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
- render(conn, "card.json", data)
- else
- _ -> render_error(conn, :not_found, "Record not found")
- end
- end
-
@doc "GET /api/v1/statuses/:id/favourited_by"
- def favourited_by(%{assigns: %{user: user}} = conn, %{id: id}) do
+ def favourited_by(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: id}}}} = conn,
+ _
+ ) do
with true <- Pleroma.Config.get([:instance, :show_reactions]),
%Activity{} = activity <- Activity.get_by_id_with_object(id),
{:visible, true} <- {:visible, Visibility.visible_for_user?(activity, user)},
@@ -349,7 +497,10 @@ def favourited_by(%{assigns: %{user: user}} = conn, %{id: id}) do
end
@doc "GET /api/v1/statuses/:id/reblogged_by"
- def reblogged_by(%{assigns: %{user: user}} = conn, %{id: id}) do
+ def reblogged_by(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: id}}}} = conn,
+ _
+ ) do
with %Activity{} = activity <- Activity.get_by_id_with_object(id),
{:visible, true} <- {:visible, Visibility.visible_for_user?(activity, user)},
%Object{data: %{"announcements" => announces, "id" => ap_id}} <-
@@ -381,7 +532,10 @@ def reblogged_by(%{assigns: %{user: user}} = conn, %{id: id}) do
end
@doc "GET /api/v1/statuses/:id/context"
- def context(%{assigns: %{user: user}} = conn, %{id: id}) do
+ def context(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: id}}}} = conn,
+ _
+ ) do
with %Activity{} = activity <- Activity.get_by_id(id) do
activities =
ActivityPub.fetch_activities_for_context(activity.data["context"], %{
@@ -395,7 +549,10 @@ def context(%{assigns: %{user: user}} = conn, %{id: id}) do
end
@doc "GET /api/v1/favourites"
- def favourites(%{assigns: %{user: %User{} = user}} = conn, params) do
+ def favourites(
+ %{assigns: %{user: %User{} = user}, private: %{open_api_spex: %{params: params}}} = conn,
+ _
+ ) do
activities = ActivityPub.fetch_favourites(user, params)
conn
@@ -408,12 +565,13 @@ def favourites(%{assigns: %{user: %User{} = user}} = conn, params) do
end
@doc "GET /api/v1/bookmarks"
- def bookmarks(%{assigns: %{user: user}} = conn, params) do
+ def bookmarks(%{assigns: %{user: user}, private: %{open_api_spex: %{params: params}}} = conn, _) do
user = User.get_cached_by_id(user.id)
+ folder_id = Map.get(params, :folder_id)
bookmarks =
user.id
- |> Bookmark.for_user_query()
+ |> Bookmark.for_user_query(folder_id)
|> Pleroma.Pagination.fetch_paginated(params)
activities =
diff --git a/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex b/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex
index ba7239476..293c61b41 100644
--- a/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex
+++ b/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex
@@ -112,6 +112,8 @@ def public(%{assigns: %{user: user}} = conn, params) do
|> Map.put(:muting_user, user)
|> Map.put(:reply_filtering_user, user)
|> Map.put(:instance, params[:instance])
+ # Restricts unfederated content to authenticated users
+ |> Map.put(:includes_local_public, not is_nil(user))
|> ActivityPub.fetch_public_activities()
conn
diff --git a/lib/pleroma/web/mastodon_api/mastodon_api.ex b/lib/pleroma/web/mastodon_api/mastodon_api.ex
index 5e32b9611..467dc2fac 100644
--- a/lib/pleroma/web/mastodon_api/mastodon_api.ex
+++ b/lib/pleroma/web/mastodon_api/mastodon_api.ex
@@ -61,11 +61,24 @@ def get_friends(user, params \\ %{}) do
end
def get_notifications(user, params \\ %{}) do
- options = cast_params(params)
+ options =
+ cast_params(params) |> Map.update(:include_types, [], fn include_types -> include_types end)
+
+ options =
+ if ("pleroma:report" not in options.include_types and
+ User.privileged?(user, :reports_manage_reports)) or
+ User.privileged?(user, :reports_manage_reports) do
+ options
+ else
+ options
+ |> Map.update(:exclude_types, ["pleroma:report"], fn current_exclude_types ->
+ current_exclude_types ++ ["pleroma:report"]
+ end)
+ end
user
|> Notification.for_user_query(options)
- |> restrict(:include_types, options)
+ |> restrict(:types, options)
|> restrict(:exclude_types, options)
|> restrict(:account_ap_id, options)
|> Pagination.fetch_paginated(params)
@@ -80,7 +93,7 @@ def get_scheduled_activities(user, params \\ %{}) do
defp cast_params(params) do
param_types = %{
exclude_types: {:array, :string},
- include_types: {:array, :string},
+ types: {:array, :string},
exclude_visibilities: {:array, :string},
reblogs: :boolean,
with_muted: :boolean,
@@ -92,7 +105,7 @@ defp cast_params(params) do
changeset.changes
end
- defp restrict(query, :include_types, %{include_types: mastodon_types = [_ | _]}) do
+ defp restrict(query, :types, %{types: mastodon_types = [_ | _]}) do
where(query, [n], n.type in ^mastodon_types)
end
diff --git a/lib/pleroma/web/mastodon_api/views/account_view.ex b/lib/pleroma/web/mastodon_api/views/account_view.ex
index 988eedbb1..6976ca6e5 100644
--- a/lib/pleroma/web/mastodon_api/views/account_view.ex
+++ b/lib/pleroma/web/mastodon_api/views/account_view.ex
@@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server
-# Copyright © 2017-2022 Pleroma Authors
+# Copyright © 2017-2023 Pleroma Authors
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.MastodonAPI.AccountView do
@@ -193,7 +193,28 @@ def render("relationships.json", %{user: user, targets: targets} = opts) do
render_many(targets, AccountView, "relationship.json", render_opts)
end
+ def render("familiar_followers.json", %{users: users} = opts) do
+ opts =
+ opts
+ |> Map.merge(%{as: :user})
+ |> Map.delete(:users)
+
+ users
+ |> render_many(AccountView, "familiar_followers.json", opts)
+ end
+
+ def render("familiar_followers.json", %{user: %{id: id, accounts: accounts}} = opts) do
+ accounts =
+ accounts
+ |> render_many(AccountView, "show.json", opts)
+ |> Enum.filter(&Enum.any?/1)
+
+ %{id: id, accounts: accounts}
+ end
+
defp do_render("show.json", %{user: user} = opts) do
+ self = opts[:for] == user
+
user = User.sanitize_html(user, User.html_filter_policy(opts[:for]))
display_name = user.name || user.nickname
@@ -203,16 +224,16 @@ defp do_render("show.json", %{user: user} = opts) do
header_static = User.banner_url(user) |> MediaProxy.preview_url(static: true)
following_count =
- if !user.hide_follows_count or !user.hide_follows or opts[:for] == user,
+ if !user.hide_follows_count or !user.hide_follows or self,
do: user.following_count,
else: 0
followers_count =
- if !user.hide_followers_count or !user.hide_followers or opts[:for] == user,
+ if !user.hide_followers_count or !user.hide_followers or self,
do: user.follower_count,
else: 0
- bot = user.actor_type == "Service"
+ bot = bot?(user)
emojis =
Enum.map(user.emoji, fn {shortcode, raw_url} ->
@@ -249,6 +270,10 @@ defp do_render("show.json", %{user: user} = opts) do
nil
end
+ last_status_at =
+ user.last_status_at &&
+ user.last_status_at |> NaiveDateTime.to_date() |> Date.to_iso8601()
+
%{
id: to_string(user.id),
username: username_from_nickname(user.nickname),
@@ -277,7 +302,7 @@ defp do_render("show.json", %{user: user} = opts) do
actor_type: user.actor_type
}
},
- last_status_at: user.last_status_at,
+ last_status_at: last_status_at,
# Pleroma extensions
# Note: it's insecure to output :email but fully-qualified nickname may serve as safe stub
@@ -311,6 +336,7 @@ defp do_render("show.json", %{user: user} = opts) do
|> maybe_put_unread_conversation_count(user, opts[:for])
|> maybe_put_unread_notification_count(user, opts[:for])
|> maybe_put_email_address(user, opts[:for])
+ |> maybe_put_mute_expires_at(user, opts[:for], opts)
|> maybe_show_birthday(user, opts[:for])
end
@@ -369,19 +395,22 @@ defp maybe_put_chat_token(data, %User{id: id}, %User{id: id}, %{
defp maybe_put_chat_token(data, _, _, _), do: data
defp maybe_put_role(data, %User{show_role: true} = user, _) do
- data
- |> Kernel.put_in([:pleroma, :is_admin], user.is_admin)
- |> Kernel.put_in([:pleroma, :is_moderator], user.is_moderator)
+ put_role(data, user)
end
defp maybe_put_role(data, %User{id: user_id} = user, %User{id: user_id}) do
- data
- |> Kernel.put_in([:pleroma, :is_admin], user.is_admin)
- |> Kernel.put_in([:pleroma, :is_moderator], user.is_moderator)
+ put_role(data, user)
end
defp maybe_put_role(data, _, _), do: data
+ defp put_role(data, user) do
+ data
+ |> Kernel.put_in([:pleroma, :is_admin], user.is_admin)
+ |> Kernel.put_in([:pleroma, :is_moderator], user.is_moderator)
+ |> Kernel.put_in([:pleroma, :privileges], User.privileges(user))
+ end
+
defp maybe_put_notification_settings(data, %User{id: user_id} = user, %User{id: user_id}) do
Kernel.put_in(
data,
@@ -398,12 +427,12 @@ defp maybe_put_allow_following_move(data, %User{id: user_id} = user, %User{id: u
defp maybe_put_allow_following_move(data, _, _), do: data
- defp maybe_put_activation_status(data, user, %User{is_admin: true}) do
- Kernel.put_in(data, [:pleroma, :deactivated], !user.is_active)
+ defp maybe_put_activation_status(data, user, user_for) do
+ if User.privileged?(user_for, :users_manage_activation_state),
+ do: Kernel.put_in(data, [:pleroma, :deactivated], !user.is_active),
+ else: data
end
- defp maybe_put_activation_status(data, _, _), do: data
-
defp maybe_put_unread_conversation_count(data, %User{id: user_id} = user, %User{id: user_id}) do
data
|> Kernel.put_in(
@@ -434,6 +463,16 @@ defp maybe_put_email_address(data, %User{id: user_id}, %User{id: user_id} = user
defp maybe_put_email_address(data, _, _), do: data
+ defp maybe_put_mute_expires_at(data, %User{} = user, target, %{mutes: true}) do
+ Map.put(
+ data,
+ :mute_expires_at,
+ UserRelationship.get_mute_expire_date(target, user)
+ )
+ end
+
+ defp maybe_put_mute_expires_at(data, _, _, _), do: data
+
defp maybe_show_birthday(data, %User{id: user_id} = user, %User{id: user_id}) do
data
|> Kernel.put_in([:pleroma, :birthday], user.birthday)
@@ -450,4 +489,12 @@ defp maybe_show_birthday(data, _, _) do
defp image_url(%{"url" => [%{"href" => href} | _]}), do: href
defp image_url(_), do: nil
+
+ defp bot?(user) do
+ # Because older and/or Mastodon clients may not recognize a Group actor properly,
+ # and currently the group actor can only boost things, we should let these clients
+ # think groups are bots.
+ # See https://git.pleroma.social/pleroma/pleroma-meta/-/issues/14
+ user.actor_type == "Service" || user.actor_type == "Group"
+ end
end
diff --git a/lib/pleroma/web/mastodon_api/views/instance_view.ex b/lib/pleroma/web/mastodon_api/views/instance_view.ex
index ee52475d5..99fc6d0c3 100644
--- a/lib/pleroma/web/mastodon_api/views/instance_view.ex
+++ b/lib/pleroma/web/mastodon_api/views/instance_view.ex
@@ -13,11 +13,11 @@ defmodule Pleroma.Web.MastodonAPI.InstanceView do
def render("show.json", _) do
instance = Config.get(:instance)
- %{
- uri: Pleroma.Web.Endpoint.url(),
- title: Keyword.get(instance, :name),
+ common_information(instance)
+ |> Map.merge(%{
+ uri: Pleroma.Web.WebFinger.host(),
description: Keyword.get(instance, :description),
- version: "#{@mastodon_api_level} (compatible; #{Pleroma.Application.named_version()})",
+ short_description: Keyword.get(instance, :short_description),
email: Keyword.get(instance, :email),
urls: %{
streaming_api: Pleroma.Web.Endpoint.websocket_url()
@@ -26,9 +26,10 @@ def render("show.json", _) do
thumbnail:
URI.merge(Pleroma.Web.Endpoint.url(), Keyword.get(instance, :instance_thumbnail))
|> to_string,
- languages: ["en"],
registrations: Keyword.get(instance, :registrations_open),
approval_required: Keyword.get(instance, :account_approval_required),
+ contact_account: contact_account(Keyword.get(instance, :contact_username)),
+ configuration: configuration(),
# Extra (not present in Mastodon):
max_toot_chars: Keyword.get(instance, :limit),
max_media_attachments: Keyword.get(instance, :max_media_attachments),
@@ -40,20 +41,61 @@ def render("show.json", _) do
background_image: Pleroma.Web.Endpoint.url() <> Keyword.get(instance, :background_image),
shout_limit: Config.get([:shout, :limit]),
description_limit: Keyword.get(instance, :description_limit),
- pleroma: %{
- metadata: %{
- account_activation_required: Keyword.get(instance, :account_activation_required),
- features: features(),
- federation: federation(),
- fields_limits: fields_limits(),
- post_formats: Config.get([:instance, :allowed_post_formats]),
- privileged_staff: Config.get([:instance, :privileged_staff]),
- birthday_required: Config.get([:instance, :birthday_required]),
- birthday_min_age: Config.get([:instance, :birthday_min_age])
- },
- stats: %{mau: Pleroma.User.active_user_count()},
- vapid_public_key: Keyword.get(Pleroma.Web.Push.vapid_config(), :public_key)
- }
+ chat_limit: Keyword.get(instance, :chat_limit),
+ pleroma: pleroma_configuration(instance)
+ })
+ end
+
+ def render("show2.json", _) do
+ instance = Config.get(:instance)
+
+ common_information(instance)
+ |> Map.merge(%{
+ domain: Pleroma.Web.WebFinger.host(),
+ source_url: Pleroma.Application.repository(),
+ description: Keyword.get(instance, :short_description),
+ usage: %{users: %{active_month: Pleroma.User.active_user_count()}},
+ thumbnail: %{
+ url:
+ URI.merge(Pleroma.Web.Endpoint.url(), Keyword.get(instance, :instance_thumbnail))
+ |> to_string
+ },
+ configuration: configuration2(),
+ registrations: %{
+ enabled: Keyword.get(instance, :registrations_open),
+ approval_required: Keyword.get(instance, :account_approval_required),
+ message: nil,
+ url: nil
+ },
+ contact: %{
+ email: Keyword.get(instance, :email),
+ account: contact_account(Keyword.get(instance, :contact_username))
+ },
+ # Extra (not present in Mastodon):
+ pleroma: pleroma_configuration2(instance)
+ })
+ end
+
+ def render("rules.json", _) do
+ Pleroma.Rule.query()
+ |> Pleroma.Repo.all()
+ |> render_many(__MODULE__, "rule.json", as: :rule)
+ end
+
+ def render("rule.json", %{rule: rule}) do
+ %{
+ id: to_string(rule.id),
+ text: rule.text,
+ hint: rule.hint || ""
+ }
+ end
+
+ defp common_information(instance) do
+ %{
+ languages: Keyword.get(instance, :languages, ["en"]),
+ rules: render(__MODULE__, "rules.json"),
+ title: Keyword.get(instance, :name),
+ version: "#{@mastodon_api_level} (compatible; #{Pleroma.Application.named_version()})"
}
end
@@ -68,6 +110,8 @@ def features do
"shareable_emoji_packs",
"multifetch",
"pleroma:api/v1/notifications:include_types_filter",
+ "editing",
+ "quote_posting",
if Config.get([:activitypub, :blockers_visible]) do
"blockers_visible"
end,
@@ -91,13 +135,17 @@ def features do
"safe_dm_mentions"
end,
"pleroma_emoji_reactions",
+ "pleroma_custom_emoji_reactions",
"pleroma_chat_messages",
if Config.get([:instance, :show_reactions]) do
"exposable_reactions"
end,
if Config.get([:instance, :profile_directory]) do
"profile_directory"
- end
+ end,
+ "pleroma:get:main/ostatus",
+ "pleroma:group_actors",
+ "pleroma:bookmark_folders"
]
|> Enum.filter(& &1)
end
@@ -129,7 +177,7 @@ def federation do
|> Map.put(:enabled, Config.get([:instance, :federating]))
end
- def fields_limits do
+ defp fields_limits do
%{
max_fields: Config.get([:instance, :max_account_fields]),
max_remote_fields: Config.get([:instance, :max_remote_account_fields]),
@@ -137,4 +185,94 @@ def fields_limits do
value_length: Config.get([:instance, :account_field_value_length])
}
end
+
+ defp contact_account(nil), do: nil
+
+ defp contact_account("@" <> username) do
+ contact_account(username)
+ end
+
+ defp contact_account(username) do
+ user = Pleroma.User.get_cached_by_nickname(username)
+
+ if user do
+ Pleroma.Web.MastodonAPI.AccountView.render("show.json", %{user: user, for: nil})
+ else
+ nil
+ end
+ end
+
+ defp configuration do
+ %{
+ accounts: %{
+ max_featured_tags: 0
+ },
+ statuses: %{
+ max_characters: Config.get([:instance, :limit]),
+ max_media_attachments: Config.get([:instance, :max_media_attachments])
+ },
+ media_attachments: %{
+ image_size_limit: Config.get([:instance, :upload_limit]),
+ video_size_limit: Config.get([:instance, :upload_limit]),
+ supported_mime_types: ["application/octet-stream"]
+ },
+ polls: %{
+ max_options: Config.get([:instance, :poll_limits, :max_options]),
+ max_characters_per_option: Config.get([:instance, :poll_limits, :max_option_chars]),
+ min_expiration: Config.get([:instance, :poll_limits, :min_expiration]),
+ max_expiration: Config.get([:instance, :poll_limits, :max_expiration])
+ }
+ }
+ end
+
+ defp configuration2 do
+ configuration()
+ |> put_in([:accounts, :max_pinned_statuses], Config.get([:instance, :max_pinned_statuses], 0))
+ |> put_in([:statuses, :characters_reserved_per_url], 0)
+ |> Map.merge(%{
+ urls: %{
+ streaming: Pleroma.Web.Endpoint.websocket_url(),
+ status: Config.get([:instance, :status_page])
+ },
+ vapid: %{
+ public_key: Keyword.get(Pleroma.Web.Push.vapid_config(), :public_key)
+ }
+ })
+ end
+
+ defp pleroma_configuration(instance) do
+ %{
+ metadata: %{
+ account_activation_required: Keyword.get(instance, :account_activation_required),
+ features: features(),
+ federation: federation(),
+ fields_limits: fields_limits(),
+ post_formats: Config.get([:instance, :allowed_post_formats]),
+ birthday_required: Config.get([:instance, :birthday_required]),
+ birthday_min_age: Config.get([:instance, :birthday_min_age])
+ },
+ stats: %{mau: Pleroma.User.active_user_count()},
+ vapid_public_key: Keyword.get(Pleroma.Web.Push.vapid_config(), :public_key)
+ }
+ end
+
+ defp pleroma_configuration2(instance) do
+ configuration = pleroma_configuration(instance)
+
+ configuration
+ |> Map.merge(%{
+ metadata:
+ configuration.metadata
+ |> Map.merge(%{
+ avatar_upload_limit: Keyword.get(instance, :avatar_upload_limit),
+ background_upload_limit: Keyword.get(instance, :background_upload_limit),
+ banner_upload_limit: Keyword.get(instance, :banner_upload_limit),
+ background_image:
+ Pleroma.Web.Endpoint.url() <> Keyword.get(instance, :background_image),
+ chat_limit: Keyword.get(instance, :chat_limit),
+ description_limit: Keyword.get(instance, :description_limit),
+ shout_limit: Config.get([:shout, :limit])
+ })
+ })
+ end
end
diff --git a/lib/pleroma/web/mastodon_api/views/notification_view.ex b/lib/pleroma/web/mastodon_api/views/notification_view.ex
index 0dc7f3beb..3f2478719 100644
--- a/lib/pleroma/web/mastodon_api/views/notification_view.ex
+++ b/lib/pleroma/web/mastodon_api/views/notification_view.ex
@@ -17,9 +17,14 @@ defmodule Pleroma.Web.MastodonAPI.NotificationView do
alias Pleroma.Web.MastodonAPI.AccountView
alias Pleroma.Web.MastodonAPI.NotificationView
alias Pleroma.Web.MastodonAPI.StatusView
+ alias Pleroma.Web.MediaProxy
alias Pleroma.Web.PleromaAPI.Chat.MessageReferenceView
- @parent_types ~w{Like Announce EmojiReact}
+ defp object_id_for(%{data: %{"object" => %{"id" => id}}}) when is_binary(id), do: id
+
+ defp object_id_for(%{data: %{"object" => id}}) when is_binary(id), do: id
+
+ @parent_types ~w{Like Announce EmojiReact Update}
def render("index.json", %{notifications: notifications, for: reading_user} = opts) do
activities = Enum.map(notifications, & &1.activity)
@@ -30,7 +35,7 @@ def render("index.json", %{notifications: notifications, for: reading_user} = op
%{data: %{"type" => type}} ->
type in @parent_types
end)
- |> Enum.map(& &1.data["object"])
+ |> Enum.map(&object_id_for/1)
|> Activity.create_by_object_ap_id()
|> Activity.with_preloaded_object(:left)
|> Pleroma.Repo.all()
@@ -78,9 +83,9 @@ def render(
parent_activity_fn = fn ->
if opts[:parent_activities] do
- Activity.Queries.find_by_object_ap_id(opts[:parent_activities], activity.data["object"])
+ Activity.Queries.find_by_object_ap_id(opts[:parent_activities], object_id_for(activity))
else
- Activity.get_create_by_object_ap_id(activity.data["object"])
+ Activity.get_create_by_object_ap_id(object_id_for(activity))
end
end
@@ -103,12 +108,18 @@ def render(
"mention" ->
put_status(response, activity, reading_user, status_render_opts)
+ "status" ->
+ put_status(response, activity, reading_user, status_render_opts)
+
"favourite" ->
put_status(response, parent_activity_fn.(), reading_user, status_render_opts)
"reblog" ->
put_status(response, parent_activity_fn.(), reading_user, status_render_opts)
+ "update" ->
+ put_status(response, parent_activity_fn.(), reading_user, status_render_opts)
+
"move" ->
put_target(response, activity, reading_user, %{})
@@ -138,7 +149,9 @@ defp put_report(response, activity) do
end
defp put_emoji(response, activity) do
- Map.put(response, :emoji, activity.data["content"])
+ response
+ |> Map.put(:emoji, activity.data["content"])
+ |> Map.put(:emoji_url, MediaProxy.url(Pleroma.Emoji.emoji_url(activity.data)))
end
defp put_chat_message(response, activity, reading_user, opts) do
diff --git a/lib/pleroma/web/mastodon_api/views/poll_view.ex b/lib/pleroma/web/mastodon_api/views/poll_view.ex
index 34e23873e..1e3c9f36d 100644
--- a/lib/pleroma/web/mastodon_api/views/poll_view.ex
+++ b/lib/pleroma/web/mastodon_api/views/poll_view.ex
@@ -21,7 +21,10 @@ def render("show.json", %{object: object, multiple: multiple, options: options}
votes_count: votes_count,
voters_count: voters_count(object),
options: options,
- emojis: Pleroma.Web.MastodonAPI.StatusView.build_emojis(object.data["emoji"])
+ emojis: Pleroma.Web.MastodonAPI.StatusView.build_emojis(object.data["emoji"]),
+ pleroma: %{
+ non_anonymous: object.data["nonAnonymous"] || false
+ }
}
if params[:for] do
diff --git a/lib/pleroma/web/mastodon_api/views/status_view.ex b/lib/pleroma/web/mastodon_api/views/status_view.ex
index 1ebfd6740..c945290c1 100644
--- a/lib/pleroma/web/mastodon_api/views/status_view.ex
+++ b/lib/pleroma/web/mastodon_api/views/status_view.ex
@@ -21,6 +21,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
alias Pleroma.Web.MastodonAPI.StatusView
alias Pleroma.Web.MediaProxy
alias Pleroma.Web.PleromaAPI.EmojiReactionController
+ alias Pleroma.Web.RichMedia.Card
import Pleroma.Web.ActivityPub.Visibility, only: [get_visibility: 1, visible_for_user?: 2]
@@ -29,9 +30,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
# pagination is restricted to 40 activities at a time
defp fetch_rich_media_for_activities(activities) do
Enum.each(activities, fn activity ->
- spawn(fn ->
- Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
- end)
+ spawn(fn -> Card.get_by_activity(activity) end)
end)
end
@@ -57,11 +56,40 @@ defp get_replied_to_activities(activities) do
end)
end
- defp get_context_id(%{data: %{"context_id" => context_id}}) when not is_nil(context_id),
- do: context_id
+ defp get_quoted_activities([]), do: %{}
- defp get_context_id(%{data: %{"context" => context}}) when is_binary(context),
- do: Utils.context_to_conversation_id(context)
+ defp get_quoted_activities(activities) do
+ activities
+ |> Enum.map(fn
+ %{data: %{"type" => "Create"}} = activity ->
+ object = Object.normalize(activity, fetch: false)
+ object && object.data["quoteUrl"] != "" && object.data["quoteUrl"]
+
+ _ ->
+ nil
+ end)
+ |> Enum.filter(& &1)
+ |> Activity.create_by_object_ap_id_with_object()
+ |> Repo.all()
+ |> Enum.reduce(%{}, fn activity, acc ->
+ object = Object.normalize(activity, fetch: false)
+ if object, do: Map.put(acc, object.data["id"], activity), else: acc
+ end)
+ end
+
+ # DEPRECATED This field seems to be a left-over from the StatusNet era.
+ # If your application uses `pleroma.conversation_id`: this field is deprecated.
+ # It is currently stubbed instead by doing a CRC32 of the context, and
+ # clearing the MSB to avoid overflow exceptions with signed integers on the
+ # different clients using this field (Java/Kotlin code, mostly; see Husky.)
+ # This should be removed in a future version of Pleroma. Pleroma-FE currently
+ # depends on this field, as well.
+ defp get_context_id(%{data: %{"context" => context}}) when is_binary(context) do
+ import Bitwise
+
+ :erlang.crc32(context)
+ |> band(bnot(0x8000_0000))
+ end
defp get_context_id(_), do: nil
@@ -84,11 +112,10 @@ def render("index.json", opts) do
# To do: check AdminAPIControllerTest on the reasons behind nil activities in the list
activities = Enum.filter(opts.activities, & &1)
- # Start fetching rich media before doing anything else, so that later calls to get the cards
- # only block for timeout in the worst case, as opposed to
- # length(activities_with_links) * timeout
+ # Start prefetching rich media before doing anything else
fetch_rich_media_for_activities(activities)
replied_to_activities = get_replied_to_activities(activities)
+ quoted_activities = get_quoted_activities(activities)
parent_activities =
activities
@@ -121,6 +148,7 @@ def render("index.json", opts) do
opts =
opts
|> Map.put(:replied_to_activities, replied_to_activities)
+ |> Map.put(:quoted_activities, quoted_activities)
|> Map.put(:parent_activities, parent_activities)
|> Map.put(:relationships, relationships_opt)
@@ -153,7 +181,14 @@ def render(
favorited = opts[:for] && opts[:for].ap_id in (object.data["likes"] || [])
- bookmarked = Activity.get_bookmark(reblogged_parent_activity, opts[:for]) != nil
+ bookmark = Activity.get_bookmark(reblogged_parent_activity, opts[:for])
+
+ bookmark_folder =
+ if bookmark != nil do
+ bookmark.folder_id
+ else
+ nil
+ end
mentions =
activity.recipients
@@ -182,7 +217,7 @@ def render(
favourites_count: 0,
reblogged: reblogged?(reblogged_parent_activity, opts[:for]),
favourited: present?(favorited),
- bookmarked: present?(bookmarked),
+ bookmarked: present?(bookmark),
muted: false,
pinned: pinned?,
sensitive: false,
@@ -196,7 +231,8 @@ def render(
emojis: [],
pleroma: %{
local: activity.local,
- pinned_at: pinned_at
+ pinned_at: pinned_at,
+ bookmark_folder: bookmark_folder
}
}
end
@@ -233,7 +269,14 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity}
favorited = opts[:for] && opts[:for].ap_id in (object.data["likes"] || [])
- bookmarked = Activity.get_bookmark(activity, opts[:for]) != nil
+ bookmark = Activity.get_bookmark(activity, opts[:for])
+
+ bookmark_folder =
+ if bookmark != nil do
+ bookmark.folder_id
+ else
+ nil
+ end
client_posted_this_activity = opts[:for] && user.id == opts[:for].id
@@ -258,10 +301,45 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity}
created_at = Utils.to_masto_date(object.data["published"])
- reply_to = get_reply_to(activity, opts)
+ edited_at =
+ with %{"updated" => updated} <- object.data,
+ date <- Utils.to_masto_date(updated),
+ true <- date != "" do
+ date
+ else
+ _ ->
+ nil
+ end
+ reply_to = get_reply_to(activity, opts)
reply_to_user = reply_to && CommonAPI.get_user(reply_to.data["actor"])
+ history_len =
+ 1 +
+ (Object.Updater.history_for(object.data)
+ |> Map.get("orderedItems")
+ |> length())
+
+ # See render("history.json", ...) for more details
+ # Here the implicit index of the current content is 0
+ chrono_order = history_len - 1
+
+ quote_activity = get_quote(activity, opts)
+
+ quote_id =
+ case quote_activity do
+ %Activity{id: id} -> id
+ _ -> nil
+ end
+
+ quote_post =
+ if visible_for_user?(quote_activity, opts[:for]) and opts[:show_quote] != false do
+ quote_rendering_opts = Map.merge(opts, %{activity: quote_activity, show_quote: false})
+ render("show.json", quote_rendering_opts)
+ else
+ nil
+ end
+
content =
object
|> render_content()
@@ -271,19 +349,23 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity}
|> Activity.HTML.get_cached_scrubbed_html_for_activity(
User.html_filter_policy(opts[:for]),
activity,
- "mastoapi:content"
+ "mastoapi:content:#{chrono_order}"
)
content_plaintext =
content
|> Activity.HTML.get_cached_stripped_html_for_activity(
activity,
- "mastoapi:content"
+ "mastoapi:content:#{chrono_order}"
)
summary = object.data["summary"] || ""
- card = render("card.json", Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity))
+ card =
+ case Card.get_by_activity(activity) do
+ %Card{} = result -> render("card.json", result)
+ _ -> nil
+ end
url =
if user.local do
@@ -306,14 +388,14 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity}
end
emoji_reactions =
- object.data
- |> Map.get("reactions", [])
+ object
+ |> Object.get_emoji_reactions()
|> EmojiReactionController.filter_allowed_users(
opts[:for],
Map.get(opts, :with_muted, false)
)
- |> Stream.map(fn {emoji, users} ->
- build_emoji_map(emoji, users, opts[:for])
+ |> Stream.map(fn {emoji, users, url} ->
+ build_emoji_map(emoji, users, url, opts[:for])
end)
|> Enum.to_list()
@@ -344,14 +426,15 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity}
reblog: nil,
card: card,
content: content_html,
- text: opts[:with_source] && object.data["source"],
+ text: opts[:with_source] && get_source_text(object.data["source"]),
created_at: created_at,
+ edited_at: edited_at,
reblogs_count: announcement_count,
replies_count: object.data["repliesCount"] || 0,
favourites_count: like_count,
reblogged: reblogged?(activity, opts[:for]),
favourited: present?(favorited),
- bookmarked: present?(bookmarked),
+ bookmarked: present?(bookmark),
muted: muted,
pinned: pinned?,
sensitive: sensitive,
@@ -367,7 +450,12 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity}
pleroma: %{
local: activity.local,
conversation_id: get_context_id(activity),
+ context: object.data["context"],
in_reply_to_account_acct: reply_to_user && reply_to_user.nickname,
+ quote: quote_post,
+ quote_id: quote_id,
+ quote_url: object.data["quoteUrl"],
+ quote_visible: visible_for_user?(quote_activity, opts[:for]),
content: %{"text/plain" => content_plaintext},
spoiler_text: %{"text/plain" => summary},
expires_at: expires_at,
@@ -375,7 +463,9 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity}
thread_muted: thread_muted?,
emoji_reactions: emoji_reactions,
parent_visible: visible_for_user?(reply_to, opts[:for]),
- pinned_at: pinned_at
+ pinned_at: pinned_at,
+ quotes_count: object.data["quotesCount"] || 0,
+ bookmark_folder: bookmark_folder
}
}
end
@@ -384,37 +474,124 @@ def render("show.json", _) do
nil
end
- def render("card.json", %{rich_media: rich_media, page_url: page_url}) do
- page_url_data = URI.parse(page_url)
+ def render("history.json", %{activity: %{data: %{"object" => _object}} = activity} = opts) do
+ object = Object.normalize(activity, fetch: false)
- page_url_data =
- if is_binary(rich_media["url"]) do
- URI.merge(page_url_data, URI.parse(rich_media["url"]))
- else
- page_url_data
- end
+ hashtags = Object.hashtags(object)
+
+ user = CommonAPI.get_user(activity.data["actor"])
+
+ past_history =
+ Object.Updater.history_for(object.data)
+ |> Map.get("orderedItems")
+ |> Enum.map(&Map.put(&1, "id", object.data["id"]))
+ |> Enum.map(&%Object{data: &1, id: object.id})
+
+ history =
+ [object | past_history]
+ # Mastodon expects the original to be at the first
+ |> Enum.reverse()
+ |> Enum.with_index()
+ |> Enum.map(fn {object, chrono_order} ->
+ %{
+ # The history is prepended every time there is a new edit.
+ # In chrono_order, the oldest item is always at 0, and so on.
+ # The chrono_order is an invariant kept between edits.
+ chrono_order: chrono_order,
+ object: object
+ }
+ end)
+
+ individual_opts =
+ opts
+ |> Map.put(:as, :item)
+ |> Map.put(:user, user)
+ |> Map.put(:hashtags, hashtags)
+
+ render_many(history, StatusView, "history_item.json", individual_opts)
+ end
+
+ def render(
+ "history_item.json",
+ %{
+ activity: activity,
+ user: user,
+ item: %{object: object, chrono_order: chrono_order},
+ hashtags: hashtags
+ } = opts
+ ) do
+ sensitive = object.data["sensitive"] || Enum.member?(hashtags, "nsfw")
+
+ attachment_data = object.data["attachment"] || []
+ attachments = render_many(attachment_data, StatusView, "attachment.json", as: :attachment)
+
+ created_at = Utils.to_masto_date(object.data["updated"] || object.data["published"])
+
+ content =
+ object
+ |> render_content()
+
+ content_html =
+ content
+ |> Activity.HTML.get_cached_scrubbed_html_for_activity(
+ User.html_filter_policy(opts[:for]),
+ activity,
+ "mastoapi:content:#{chrono_order}"
+ )
+
+ summary = object.data["summary"] || ""
+
+ %{
+ account:
+ AccountView.render("show.json", %{
+ user: user,
+ for: opts[:for]
+ }),
+ content: content_html,
+ sensitive: sensitive,
+ spoiler_text: summary,
+ created_at: created_at,
+ media_attachments: attachments,
+ emojis: build_emojis(object.data["emoji"]),
+ poll: render(PollView, "show.json", object: object, for: opts[:for])
+ }
+ end
+
+ def render("source.json", %{activity: %{data: %{"object" => _object}} = activity} = _opts) do
+ object = Object.normalize(activity, fetch: false)
+
+ %{
+ id: activity.id,
+ text: get_source_text(Map.get(object.data, "source", "")),
+ spoiler_text: Map.get(object.data, "summary", ""),
+ content_type: get_source_content_type(object.data["source"])
+ }
+ end
+
+ def render("card.json", %Card{fields: rich_media}) do
+ page_url_data = URI.parse(rich_media["url"])
page_url = page_url_data |> to_string
- image_url_data =
- if is_binary(rich_media["image"]) do
- URI.parse(rich_media["image"])
- else
- nil
- end
-
- image_url = build_image_url(image_url_data, page_url_data)
+ image_url = proxied_url(rich_media["image"], page_url_data)
+ audio_url = proxied_url(rich_media["audio"], page_url_data)
+ video_url = proxied_url(rich_media["video"], page_url_data)
%{
type: "link",
provider_name: page_url_data.host,
provider_url: page_url_data.scheme <> "://" <> page_url_data.host,
url: page_url,
- image: image_url |> MediaProxy.url(),
+ image: image_url,
+ image_description: rich_media["image:alt"] || "",
title: rich_media["title"] || "",
description: rich_media["description"] || "",
pleroma: %{
- opengraph: rich_media
+ opengraph:
+ rich_media
+ |> Maps.put_if_present("image", image_url)
+ |> Maps.put_if_present("audio", audio_url)
+ |> Maps.put_if_present("video", video_url)
}
}
end
@@ -436,10 +613,19 @@ def render("attachment.json", %{attachment: attachment}) do
true -> "unknown"
end
- <> = :crypto.hash(:md5, href)
+ attachment_id =
+ with {_, ap_id} when is_binary(ap_id) <- {:ap_id, attachment["id"]},
+ {_, %Object{data: _object_data, id: object_id}} <-
+ {:object, Object.get_by_ap_id(ap_id)} do
+ to_string(object_id)
+ else
+ _ ->
+ <> = :crypto.hash(:md5, href)
+ to_string(attachment["id"] || hash_id)
+ end
%{
- id: to_string(attachment["id"] || hash_id),
+ id: attachment_id,
url: href,
remote_url: href,
preview_url: href_preview,
@@ -500,6 +686,25 @@ def get_reply_to(%{data: %{"object" => _object}} = activity, _) do
end
end
+ def get_quote(activity, %{quoted_activities: quoted_activities}) do
+ object = Object.normalize(activity, fetch: false)
+
+ with nil <- quoted_activities[object.data["quoteUrl"]] do
+ # For when a quote post is inside an Announce
+ Activity.get_create_by_object_ap_id_with_object(object.data["quoteUrl"])
+ end
+ end
+
+ def get_quote(%{data: %{"object" => _object}} = activity, _) do
+ object = Object.normalize(activity, fetch: false)
+
+ if object.data["quoteUrl"] && object.data["quoteUrl"] != "" do
+ Activity.get_create_by_object_ap_id(object.data["quoteUrl"])
+ else
+ nil
+ end
+ end
+
def render_content(%{data: %{"name" => name}} = object) when not is_nil(name) and name != "" do
url = object.data["url"] || object.data["id"]
@@ -569,11 +774,13 @@ defp pin_data(%Object{data: %{"id" => object_id}}, %User{pinned_objects: pinned_
end
end
- defp build_emoji_map(emoji, users, current_user) do
+ defp build_emoji_map(emoji, users, url, current_user) do
%{
- name: emoji,
+ name: Pleroma.Web.PleromaAPI.EmojiReactionView.emoji_name(emoji, url),
count: length(users),
- me: !!(current_user && current_user.ap_id in users)
+ url: MediaProxy.url(url),
+ me: !!(current_user && current_user.ap_id in users),
+ account_ids: Enum.map(users, fn user -> User.get_cached_by_ap_id(user).id end)
}
end
@@ -600,5 +807,31 @@ defp build_image_url(%URI{} = image_url_data, %URI{} = page_url_data) do
URI.merge(page_url_data, image_url_data) |> to_string
end
- defp build_image_url(_, _), do: nil
+ defp get_source_text(%{"content" => content} = _source) do
+ content
+ end
+
+ defp get_source_text(source) when is_binary(source) do
+ source
+ end
+
+ defp get_source_text(_) do
+ ""
+ end
+
+ defp get_source_content_type(%{"mediaType" => type} = _source) do
+ type
+ end
+
+ defp get_source_content_type(_source) do
+ Utils.get_content_type(nil)
+ end
+
+ defp proxied_url(url, page_url_data) do
+ if is_binary(url) do
+ build_image_url(URI.parse(url), page_url_data) |> MediaProxy.url()
+ else
+ nil
+ end
+ end
end
diff --git a/lib/pleroma/web/mastodon_api/websocket_handler.ex b/lib/pleroma/web/mastodon_api/websocket_handler.ex
index e62b8a135..bb27d806d 100644
--- a/lib/pleroma/web/mastodon_api/websocket_handler.ex
+++ b/lib/pleroma/web/mastodon_api/websocket_handler.ex
@@ -9,119 +9,128 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do
alias Pleroma.User
alias Pleroma.Web.OAuth.Token
alias Pleroma.Web.Streamer
+ alias Pleroma.Web.StreamerView
- @behaviour :cowboy_websocket
+ @behaviour Phoenix.Socket.Transport
# Client ping period.
@tick :timer.seconds(30)
- # Cowboy timeout period.
- @timeout :timer.seconds(60)
- # Hibernate every X messages
- @hibernate_every 100
- def init(%{qs: qs} = req, state) do
- with params <- Enum.into(:cow_qs.parse_qs(qs), %{}),
- sec_websocket <- :cowboy_req.header("sec-websocket-protocol", req, nil),
- access_token <- Map.get(params, "access_token"),
- {:ok, user, oauth_token} <- authenticate_request(access_token, sec_websocket),
- {:ok, topic} <- Streamer.get_topic(params["stream"], user, oauth_token, params) do
- req =
- if sec_websocket do
- :cowboy_req.set_resp_header("sec-websocket-protocol", sec_websocket, req)
+ @impl Phoenix.Socket.Transport
+ def child_spec(_opts), do: :ignore
+
+ # This only prepares the connection and is not in the process yet
+ @impl Phoenix.Socket.Transport
+ def connect(%{params: params} = transport_info) do
+ with access_token <- Map.get(params, "access_token"),
+ {:ok, user, oauth_token} <- authenticate_request(access_token),
+ {:ok, topic} <-
+ Streamer.get_topic(params["stream"], user, oauth_token, params) do
+ topics =
+ if topic do
+ [topic]
else
- req
+ []
end
- {:cowboy_websocket, req, %{user: user, topic: topic, count: 0, timer: nil},
- %{idle_timeout: @timeout}}
+ state = %{
+ user: user,
+ topics: topics,
+ oauth_token: oauth_token,
+ count: 0,
+ timer: nil
+ }
+
+ {:ok, state}
else
{:error, :bad_topic} ->
- Logger.debug("#{__MODULE__} bad topic #{inspect(req)}")
- req = :cowboy_req.reply(404, req)
- {:ok, req, state}
+ Logger.debug("#{__MODULE__} bad topic #{inspect(transport_info)}")
+
+ {:error, :bad_topic}
{:error, :unauthorized} ->
- Logger.debug("#{__MODULE__} authentication error: #{inspect(req)}")
- req = :cowboy_req.reply(401, req)
- {:ok, req, state}
+ Logger.debug("#{__MODULE__} authentication error: #{inspect(transport_info)}")
+ {:error, :unauthorized}
end
end
- def websocket_init(state) do
- Logger.debug(
- "#{__MODULE__} accepted websocket connection for user #{(state.user || %{id: "anonymous"}).id}, topic #{state.topic}"
- )
+ # All subscriptions/links and messages cannot be created
+ # until the processed is launched with init/1
+ @impl Phoenix.Socket.Transport
+ def init(state) do
+ Enum.each(state.topics, fn topic -> Streamer.add_socket(topic, state.oauth_token) end)
- Streamer.add_socket(state.topic, state.user)
- {:ok, %{state | timer: timer()}}
+ Process.send_after(self(), :ping, @tick)
+
+ {:ok, state}
end
- # Client's Pong frame.
- def websocket_handle(:pong, state) do
- if state.timer, do: Process.cancel_timer(state.timer)
- {:ok, %{state | timer: timer()}}
+ @impl Phoenix.Socket.Transport
+ def handle_in({text, [opcode: :text]}, state) do
+ with {:ok, %{} = event} <- Jason.decode(text) do
+ handle_client_event(event, state)
+ else
+ _ ->
+ Logger.error("#{__MODULE__} received non-JSON event: #{inspect(text)}")
+ {:ok, state}
+ end
end
- # We only receive pings for now
- def websocket_handle(:ping, state), do: {:ok, state}
-
- def websocket_handle(frame, state) do
+ def handle_in(frame, state) do
Logger.error("#{__MODULE__} received frame: #{inspect(frame)}")
{:ok, state}
end
- def websocket_info({:render_with_user, view, template, item}, state) do
+ @impl Phoenix.Socket.Transport
+ def handle_info({:render_with_user, view, template, item, topic}, state) do
user = %User{} = User.get_cached_by_ap_id(state.user.ap_id)
unless Streamer.filtered_by_user?(user, item) do
- websocket_info({:text, view.render(template, item, user)}, %{state | user: user})
+ message = view.render(template, item, user, topic)
+ {:push, {:text, message}, %{state | user: user}}
else
{:ok, state}
end
end
- def websocket_info({:text, message}, state) do
- # If the websocket processed X messages, force an hibernate/GC.
- # We don't hibernate at every message to balance CPU usage/latency with RAM usage.
- if state.count > @hibernate_every do
- {:reply, {:text, message}, %{state | count: 0}, :hibernate}
- else
- {:reply, {:text, message}, %{state | count: state.count + 1}}
- end
+ def handle_info({:text, text}, state) do
+ {:push, {:text, text}, state}
end
- # Ping tick. We don't re-queue a timer there, it is instead queued when :pong is received.
- # As we hibernate there, reset the count to 0.
- # If the client misses :pong, Cowboy will automatically timeout the connection after
- # `@idle_timeout`.
- def websocket_info(:tick, state) do
- {:reply, :ping, %{state | timer: nil, count: 0}, :hibernate}
+ def handle_info(:ping, state) do
+ Process.send_after(self(), :ping, @tick)
+
+ {:push, {:ping, ""}, state}
end
- # State can be `[]` only in case we terminate before switching to websocket,
- # we already log errors for these cases in `init/1`, so just do nothing here
- def terminate(_reason, _req, []), do: :ok
+ def handle_info(:close, state) do
+ {:stop, {:closed, 'connection closed by server'}, state}
+ end
- def terminate(reason, _req, state) do
+ def handle_info(msg, state) do
+ Logger.debug("#{__MODULE__} received info: #{inspect(msg)}")
+
+ {:ok, state}
+ end
+
+ @impl Phoenix.Socket.Transport
+ def terminate(reason, state) do
Logger.debug(
- "#{__MODULE__} terminating websocket connection for user #{(state.user || %{id: "anonymous"}).id}, topic #{state.topic || "?"}: #{inspect(reason)}"
+ "#{__MODULE__} terminating websocket connection for user #{(state.user || %{id: "anonymous"}).id}, topics #{state.topics || "?"}: #{inspect(reason)})"
)
- Streamer.remove_socket(state.topic)
+ Enum.each(state.topics, fn topic -> Streamer.remove_socket(topic) end)
:ok
end
# Public streams without authentication.
- defp authenticate_request(nil, nil) do
+ defp authenticate_request(nil) do
{:ok, nil, nil}
end
# Authenticated streams.
- defp authenticate_request(access_token, sec_websocket) do
- token = access_token || sec_websocket
-
- with true <- is_bitstring(token),
- oauth_token = %Token{user_id: user_id} <- Repo.get_by(Token, token: token),
+ defp authenticate_request(access_token) do
+ with oauth_token = %Token{user_id: user_id} <- Repo.get_by(Token, token: access_token),
user = %User{} <- User.get_cached_by_id(user_id) do
{:ok, user, oauth_token}
else
@@ -129,7 +138,110 @@ defp authenticate_request(access_token, sec_websocket) do
end
end
- defp timer do
- Process.send_after(self(), :tick, @tick)
+ defp handle_client_event(%{"type" => "subscribe", "stream" => _topic} = params, state) do
+ with {_, {:ok, topic}} <-
+ {:topic, Streamer.get_topic(params["stream"], state.user, state.oauth_token, params)},
+ {_, false} <- {:subscribed, topic in state.topics} do
+ Streamer.add_socket(topic, state.oauth_token)
+
+ message =
+ StreamerView.render("pleroma_respond.json", %{type: "subscribe", result: "success"})
+
+ {:reply, :ok, {:text, message}, %{state | topics: [topic | state.topics]}}
+ else
+ {:subscribed, true} ->
+ message =
+ StreamerView.render("pleroma_respond.json", %{type: "subscribe", result: "ignored"})
+
+ {:reply, :error, {:text, message}, state}
+
+ {:topic, {:error, error}} ->
+ message =
+ StreamerView.render("pleroma_respond.json", %{
+ type: "subscribe",
+ result: "error",
+ error: error
+ })
+
+ {:reply, :error, {:text, message}, state}
+ end
+ end
+
+ defp handle_client_event(%{"type" => "unsubscribe", "stream" => _topic} = params, state) do
+ with {_, {:ok, topic}} <-
+ {:topic, Streamer.get_topic(params["stream"], state.user, state.oauth_token, params)},
+ {_, true} <- {:subscribed, topic in state.topics} do
+ Streamer.remove_socket(topic)
+
+ message =
+ StreamerView.render("pleroma_respond.json", %{type: "unsubscribe", result: "success"})
+
+ {:reply, :ok, {:text, message}, %{state | topics: List.delete(state.topics, topic)}}
+ else
+ {:subscribed, false} ->
+ message =
+ StreamerView.render("pleroma_respond.json", %{type: "unsubscribe", result: "ignored"})
+
+ {:reply, :error, {:text, message}, state}
+
+ {:topic, {:error, error}} ->
+ message =
+ StreamerView.render("pleroma_respond.json", %{
+ type: "unsubscribe",
+ result: "error",
+ error: error
+ })
+
+ {:reply, :error, {:text, message}, state}
+ end
+ end
+
+ defp handle_client_event(
+ %{"type" => "pleroma:authenticate", "token" => access_token} = _params,
+ state
+ ) do
+ with {:auth, nil, nil} <- {:auth, state.user, state.oauth_token},
+ {:ok, user, oauth_token} <- authenticate_request(access_token) do
+ message =
+ StreamerView.render("pleroma_respond.json", %{
+ type: "pleroma:authenticate",
+ result: "success"
+ })
+
+ {:reply, :ok, {:text, message}, %{state | user: user, oauth_token: oauth_token}}
+ else
+ {:auth, _, _} ->
+ message =
+ StreamerView.render("pleroma_respond.json", %{
+ type: "pleroma:authenticate",
+ result: "error",
+ error: :already_authenticated
+ })
+
+ {:reply, :error, {:text, message}, state}
+
+ _ ->
+ message =
+ StreamerView.render("pleroma_respond.json", %{
+ type: "pleroma:authenticate",
+ result: "error",
+ error: :unauthorized
+ })
+
+ {:reply, :error, {:text, message}, state}
+ end
+ end
+
+ defp handle_client_event(params, state) do
+ Logger.error("#{__MODULE__} received unknown event: #{inspect(params)}")
+ {:ok, state}
+ end
+
+ def handle_error(conn, :unauthorized) do
+ Plug.Conn.send_resp(conn, 401, "Unauthorized")
+ end
+
+ def handle_error(conn, _reason) do
+ Plug.Conn.send_resp(conn, 404, "Not Found")
end
end
diff --git a/lib/pleroma/web/media_proxy/media_proxy_controller.ex b/lib/pleroma/web/media_proxy/media_proxy_controller.ex
index 3d6716d43..c11484ecb 100644
--- a/lib/pleroma/web/media_proxy/media_proxy_controller.ex
+++ b/lib/pleroma/web/media_proxy/media_proxy_controller.ex
@@ -12,6 +12,8 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyController do
alias Pleroma.Web.MediaProxy
alias Plug.Conn
+ plug(:sandbox)
+
def remote(conn, %{"sig" => sig64, "url" => url64}) do
with {_, true} <- {:enabled, MediaProxy.enabled?()},
{:ok, url} <- MediaProxy.decode_url(sig64, url64),
@@ -54,7 +56,7 @@ defp handle_preview(conn, url) do
media_proxy_url = MediaProxy.url(url)
with {:ok, %{status: status} = head_response} when status in 200..299 <-
- Pleroma.HTTP.request("head", media_proxy_url, [], [], pool: :media) do
+ Pleroma.HTTP.request(:head, media_proxy_url, "", [], pool: :media) do
content_type = Tesla.get_header(head_response, "content-type")
content_length = Tesla.get_header(head_response, "content-length")
content_length = content_length && String.to_integer(content_length)
@@ -202,4 +204,9 @@ defp media_preview_proxy_config do
defp media_proxy_opts do
Config.get([:media_proxy, :proxy_opts], [])
end
+
+ defp sandbox(conn, _params) do
+ conn
+ |> merge_resp_headers([{"content-security-policy", "sandbox;"}])
+ end
end
diff --git a/lib/pleroma/web/metadata/providers/rel_me.ex b/lib/pleroma/web/metadata/providers/rel_me.ex
index f0bee85c8..eabd8cb00 100644
--- a/lib/pleroma/web/metadata/providers/rel_me.ex
+++ b/lib/pleroma/web/metadata/providers/rel_me.ex
@@ -8,12 +8,20 @@ defmodule Pleroma.Web.Metadata.Providers.RelMe do
@impl Provider
def build_tags(%{user: user}) do
- bio_tree = Floki.parse_fragment!(user.bio)
+ profile_tree =
+ user.bio
+ |> append_fields_tag(user.fields)
+ |> Floki.parse_fragment!()
- (Floki.attribute(bio_tree, "link[rel~=me]", "href") ++
- Floki.attribute(bio_tree, "a[rel~=me]", "href"))
+ (Floki.attribute(profile_tree, "link[rel~=me]", "href") ++
+ Floki.attribute(profile_tree, "a[rel~=me]", "href"))
|> Enum.map(fn link ->
{:link, [rel: "me", href: link], []}
end)
end
+
+ defp append_fields_tag(bio, fields) do
+ fields
+ |> Enum.reduce(bio, fn %{"value" => v}, res -> res <> v end)
+ end
end
diff --git a/lib/pleroma/web/metadata/providers/twitter_card.ex b/lib/pleroma/web/metadata/providers/twitter_card.ex
index bf0a12212..426022c65 100644
--- a/lib/pleroma/web/metadata/providers/twitter_card.ex
+++ b/lib/pleroma/web/metadata/providers/twitter_card.ex
@@ -20,12 +20,12 @@ def build_tags(%{activity_id: id, object: object, user: user}) do
[
title_tag(user),
- {:meta, [property: "twitter:description", content: scrubbed_content], []}
+ {:meta, [name: "twitter:description", content: scrubbed_content], []}
] ++
if attachments == [] or Metadata.activity_nsfw?(object) do
[
image_tag(user),
- {:meta, [property: "twitter:card", content: "summary"], []}
+ {:meta, [name: "twitter:card", content: "summary"], []}
]
else
attachments
@@ -37,20 +37,19 @@ def build_tags(%{user: user}) do
with truncated_bio = Utils.scrub_html_and_truncate(user.bio) do
[
title_tag(user),
- {:meta, [property: "twitter:description", content: truncated_bio], []},
+ {:meta, [name: "twitter:description", content: truncated_bio], []},
image_tag(user),
- {:meta, [property: "twitter:card", content: "summary"], []}
+ {:meta, [name: "twitter:card", content: "summary"], []}
]
end
end
defp title_tag(user) do
- {:meta, [property: "twitter:title", content: Utils.user_name_string(user)], []}
+ {:meta, [name: "twitter:title", content: Utils.user_name_string(user)], []}
end
def image_tag(user) do
- {:meta, [property: "twitter:image", content: MediaProxy.preview_url(User.avatar_url(user))],
- []}
+ {:meta, [name: "twitter:image", content: MediaProxy.preview_url(User.avatar_url(user))], []}
end
defp build_attachments(id, %{data: %{"attachment" => attachments}}) do
@@ -60,10 +59,10 @@ defp build_attachments(id, %{data: %{"attachment" => attachments}}) do
case Utils.fetch_media_type(@media_types, url["mediaType"]) do
"audio" ->
[
- {:meta, [property: "twitter:card", content: "player"], []},
- {:meta, [property: "twitter:player:width", content: "480"], []},
- {:meta, [property: "twitter:player:height", content: "80"], []},
- {:meta, [property: "twitter:player", content: player_url(id)], []}
+ {:meta, [name: "twitter:card", content: "player"], []},
+ {:meta, [name: "twitter:player:width", content: "480"], []},
+ {:meta, [name: "twitter:player:height", content: "80"], []},
+ {:meta, [name: "twitter:player", content: player_url(id)], []}
| acc
]
@@ -74,12 +73,13 @@ defp build_attachments(id, %{data: %{"attachment" => attachments}}) do
# workaround.
"image" ->
[
- {:meta, [property: "twitter:card", content: "summary_large_image"], []},
+ {:meta, [name: "twitter:card", content: "summary_large_image"], []},
{:meta,
[
- property: "twitter:player",
+ name: "twitter:image",
content: MediaProxy.url(url["href"])
- ], []}
+ ], []},
+ {:meta, [name: "twitter:image:alt", content: truncate(attachment["name"])], []}
| acc
]
|> maybe_add_dimensions(url)
@@ -90,14 +90,14 @@ defp build_attachments(id, %{data: %{"attachment" => attachments}}) do
width = url["width"] || 480
[
- {:meta, [property: "twitter:card", content: "player"], []},
- {:meta, [property: "twitter:player", content: player_url(id)], []},
- {:meta, [property: "twitter:player:width", content: "#{width}"], []},
- {:meta, [property: "twitter:player:height", content: "#{height}"], []},
- {:meta, [property: "twitter:player:stream", content: MediaProxy.url(url["href"])],
+ {:meta, [name: "twitter:card", content: "player"], []},
+ {:meta, [name: "twitter:player", content: player_url(id)], []},
+ {:meta, [name: "twitter:player:width", content: "#{width}"], []},
+ {:meta, [name: "twitter:player:height", content: "#{height}"], []},
+ {:meta, [name: "twitter:player:stream", content: MediaProxy.url(url["href"])],
[]},
- {:meta,
- [property: "twitter:player:stream:content_type", content: url["mediaType"]], []}
+ {:meta, [name: "twitter:player:stream:content_type", content: url["mediaType"]],
+ []}
| acc
]
@@ -123,12 +123,20 @@ defp maybe_add_dimensions(metadata, url) do
!is_nil(url["height"]) && !is_nil(url["width"]) ->
metadata ++
[
- {:meta, [property: "twitter:player:width", content: "#{url["width"]}"], []},
- {:meta, [property: "twitter:player:height", content: "#{url["height"]}"], []}
+ {:meta, [name: "twitter:player:width", content: "#{url["width"]}"], []},
+ {:meta, [name: "twitter:player:height", content: "#{url["height"]}"], []}
]
true ->
metadata
end
end
+
+ defp truncate(nil), do: ""
+
+ defp truncate(text) do
+ # truncate to 420 characters
+ # see https://developer.twitter.com/en/docs/twitter-for-websites/cards/overview/markup
+ Pleroma.Formatter.truncate(text, 420)
+ end
end
diff --git a/lib/pleroma/web/metadata/utils.ex b/lib/pleroma/web/metadata/utils.ex
index 8052eaa44..80a8be9a2 100644
--- a/lib/pleroma/web/metadata/utils.ex
+++ b/lib/pleroma/web/metadata/utils.ex
@@ -8,8 +8,8 @@ defmodule Pleroma.Web.Metadata.Utils do
alias Pleroma.Formatter
alias Pleroma.HTML
- def scrub_html_and_truncate(%{data: %{"content" => content}} = object) do
- content
+ defp scrub_html_and_truncate_object_field(field, object) do
+ field
# html content comes from DB already encoded, decode first and scrub after
|> HtmlEntities.decode()
|> String.replace(~r/ /, " ")
@@ -19,12 +19,24 @@ def scrub_html_and_truncate(%{data: %{"content" => content}} = object) do
|> Formatter.truncate()
end
- def scrub_html_and_truncate(content, max_length \\ 200) when is_binary(content) do
+ def scrub_html_and_truncate(%{data: %{"summary" => summary}} = object)
+ when is_binary(summary) and summary != "" do
+ summary
+ |> scrub_html_and_truncate_object_field(object)
+ end
+
+ def scrub_html_and_truncate(%{data: %{"content" => content}} = object) do
+ content
+ |> scrub_html_and_truncate_object_field(object)
+ end
+
+ def scrub_html_and_truncate(content, max_length \\ 200, omission \\ "...")
+ when is_binary(content) do
content
|> scrub_html
|> Emoji.Formatter.demojify()
|> HtmlEntities.decode()
- |> Formatter.truncate(max_length)
+ |> Formatter.truncate(max_length, omission)
end
def scrub_html(content) when is_binary(content) do
diff --git a/lib/pleroma/web/multipart.ex b/lib/pleroma/web/multipart.ex
new file mode 100644
index 000000000..e24bb14c2
--- /dev/null
+++ b/lib/pleroma/web/multipart.ex
@@ -0,0 +1,22 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2023 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+#
+defmodule Pleroma.Web.Multipart do
+ @multipart Plug.Parsers.MULTIPART
+
+ def init(opts) do
+ opts
+ end
+
+ def parse(conn, "multipart", subtype, headers, opts) do
+ length = Pleroma.Config.get([:instance, :upload_limit])
+ opts = @multipart.init([length: length] ++ opts)
+ @multipart.parse(conn, "multipart", subtype, headers, opts)
+ end
+
+ def parse(conn, _type, _subtype, _headers, _opts) do
+ {:next, conn}
+ end
+end
diff --git a/lib/pleroma/web/nodeinfo/nodeinfo.ex b/lib/pleroma/web/nodeinfo/nodeinfo.ex
index 62d445f34..4d5a9a57f 100644
--- a/lib/pleroma/web/nodeinfo/nodeinfo.ex
+++ b/lib/pleroma/web/nodeinfo/nodeinfo.ex
@@ -6,7 +6,7 @@ defmodule Pleroma.Web.Nodeinfo.Nodeinfo do
alias Pleroma.Config
alias Pleroma.Stats
alias Pleroma.User
- alias Pleroma.Web.Federator.Publisher
+ alias Pleroma.Web.ActivityPub.Publisher
alias Pleroma.Web.MastodonAPI.InstanceView
# returns a nodeinfo 2.0 map, since 2.1 just adds a repository field
@@ -49,6 +49,10 @@ def get_nodeinfo("2.0") do
enabled: false
},
staffAccounts: staff_accounts,
+ roles: %{
+ admin: Config.get([:instance, :admin_privileges]),
+ moderator: Config.get([:instance, :moderator_privileges])
+ },
federation: federation,
pollLimits: Config.get([:instance, :poll_limits]),
postFormats: Config.get([:instance, :allowed_post_formats]),
@@ -69,8 +73,7 @@ def get_nodeinfo("2.0") do
mailerEnabled: Config.get([Pleroma.Emails.Mailer, :enabled], false),
features: features,
restrictedNicknames: Config.get([Pleroma.User, :restricted_nicknames]),
- skipThreadContainment: Config.get([:instance, :skip_thread_containment], false),
- privilegedStaff: Config.get([:instance, :privileged_staff])
+ skipThreadContainment: Config.get([:instance, :skip_thread_containment], false)
}
}
end
diff --git a/lib/pleroma/web/o_auth/authorization.ex b/lib/pleroma/web/o_auth/authorization.ex
index 593d2d66f..22e5bfc53 100644
--- a/lib/pleroma/web/o_auth/authorization.ex
+++ b/lib/pleroma/web/o_auth/authorization.ex
@@ -28,7 +28,7 @@ defmodule Pleroma.Web.OAuth.Authorization do
end
@spec create_authorization(App.t(), User.t() | %{}, [String.t()] | nil) ::
- {:ok, Authorization.t()} | {:error, Changeset.t()}
+ {:ok, Authorization.t()} | {:error, Ecto.Changeset.t()}
def create_authorization(%App{} = app, %User{} = user, scopes \\ nil) do
%{
scopes: scopes || app.scopes,
@@ -39,7 +39,7 @@ def create_authorization(%App{} = app, %User{} = user, scopes \\ nil) do
|> Repo.insert()
end
- @spec create_changeset(map()) :: Changeset.t()
+ @spec create_changeset(map()) :: Ecto.Changeset.t()
def create_changeset(attrs \\ %{}) do
%Authorization{}
|> cast(attrs, [:user_id, :app_id, :scopes, :valid_until])
@@ -58,7 +58,7 @@ defp add_lifetime(changeset) do
put_change(changeset, :valid_until, NaiveDateTime.add(NaiveDateTime.utc_now(), lifespan))
end
- @spec use_changeset(Authtorizatiton.t(), map()) :: Changeset.t()
+ @spec use_changeset(Authorization.t(), map()) :: Ecto.Changeset.t()
def use_changeset(%Authorization{} = auth, params) do
auth
|> cast(params, [:used])
@@ -66,7 +66,7 @@ def use_changeset(%Authorization{} = auth, params) do
end
@spec use_token(Authorization.t()) ::
- {:ok, Authorization.t()} | {:error, Changeset.t()} | {:error, String.t()}
+ {:ok, Authorization.t()} | {:error, Ecto.Changeset.t()} | {:error, String.t()}
def use_token(%Authorization{used: false, valid_until: valid_until} = auth) do
if NaiveDateTime.diff(NaiveDateTime.utc_now(), valid_until) < 0 do
Repo.update(use_changeset(auth, %{used: true}))
diff --git a/lib/pleroma/web/o_auth/o_auth_controller.ex b/lib/pleroma/web/o_auth/o_auth_controller.ex
index c1fb4f378..47b03215f 100644
--- a/lib/pleroma/web/o_auth/o_auth_controller.ex
+++ b/lib/pleroma/web/o_auth/o_auth_controller.ex
@@ -310,7 +310,7 @@ def token_exchange(%Plug.Conn{} = conn, %{"grant_type" => "client_credentials"}
after_token_exchange(conn, %{token: token})
else
_error ->
- handle_token_exchange_error(conn, :invalid_credentails)
+ handle_token_exchange_error(conn, :invalid_credentials)
end
end
@@ -610,13 +610,8 @@ defp build_and_response_mfa_token(user, auth) do
end
end
- @spec validate_scopes(App.t(), map() | list()) ::
+ @spec validate_scopes(App.t(), list()) ::
{:ok, list()} | {:error, :missing_scopes | :unsupported_scopes}
- defp validate_scopes(%App{} = app, params) when is_map(params) do
- requested_scopes = Scopes.fetch_scopes(params, app.scopes)
- validate_scopes(app, requested_scopes)
- end
-
defp validate_scopes(%App{} = app, requested_scopes) when is_list(requested_scopes) do
Scopes.validate(requested_scopes, app.scopes)
end
diff --git a/lib/pleroma/web/o_auth/token.ex b/lib/pleroma/web/o_auth/token.ex
index 26de7bb10..a5ad2e909 100644
--- a/lib/pleroma/web/o_auth/token.ex
+++ b/lib/pleroma/web/o_auth/token.ex
@@ -56,7 +56,8 @@ def get_by_refresh_token(%App{id: app_id} = _app, token) do
|> Repo.find_resource()
end
- @spec exchange_token(App.t(), Authorization.t()) :: {:ok, Token.t()} | {:error, Changeset.t()}
+ @spec exchange_token(App.t(), Authorization.t()) ::
+ {:ok, Token.t()} | {:error, Ecto.Changeset.t()}
def exchange_token(app, auth) do
with {:ok, auth} <- Authorization.use_token(auth),
true <- auth.app_id == app.id do
@@ -95,7 +96,7 @@ defp put_valid_until(changeset, attrs) do
|> validate_required([:valid_until])
end
- @spec create(App.t(), User.t(), map()) :: {:ok, Token} | {:error, Changeset.t()}
+ @spec create(App.t(), User.t(), map()) :: {:ok, Token} | {:error, Ecto.Changeset.t()}
def create(%App{} = app, %User{} = user, attrs \\ %{}) do
with {:ok, token} <- do_create(app, user, attrs) do
if Pleroma.Config.get([:oauth2, :clean_expired_tokens]) do
@@ -137,9 +138,9 @@ def get_user_tokens(%User{id: user_id}) do
|> Repo.all()
end
- def is_expired?(%__MODULE__{valid_until: valid_until}) do
+ def expired?(%__MODULE__{valid_until: valid_until}) do
NaiveDateTime.diff(NaiveDateTime.utc_now(), valid_until) > 0
end
- def is_expired?(_), do: false
+ def expired?(_), do: false
end
diff --git a/lib/pleroma/web/o_auth/token/query.ex b/lib/pleroma/web/o_auth/token/query.ex
index 4a4d2d3ef..6853ec8dd 100644
--- a/lib/pleroma/web/o_auth/token/query.ex
+++ b/lib/pleroma/web/o_auth/token/query.ex
@@ -9,10 +9,10 @@ defmodule Pleroma.Web.OAuth.Token.Query do
import Ecto.Query, only: [from: 2]
- @type query :: Ecto.Queryable.t() | Token.t()
-
alias Pleroma.Web.OAuth.Token
+ @type query :: Ecto.Queryable.t() | Token.t()
+
@spec get_by_refresh_token(query, String.t()) :: query
def get_by_refresh_token(query \\ Token, refresh_token) do
from(q in query, where: q.refresh_token == ^refresh_token)
diff --git a/lib/pleroma/web/o_auth/token/strategy/revoke.ex b/lib/pleroma/web/o_auth/token/strategy/revoke.ex
index 752efca89..3b265b339 100644
--- a/lib/pleroma/web/o_auth/token/strategy/revoke.ex
+++ b/lib/pleroma/web/o_auth/token/strategy/revoke.ex
@@ -21,6 +21,18 @@ def revoke(%App{} = app, %{"token" => token} = _attrs) do
@doc "Revokes access token"
@spec revoke(Token.t()) :: {:ok, Token.t()} | {:error, Ecto.Changeset.t()}
def revoke(%Token{} = token) do
- Repo.delete(token)
+ with {:ok, token} <- Repo.delete(token) do
+ Task.Supervisor.start_child(
+ Pleroma.TaskSupervisor,
+ Pleroma.Web.Streamer,
+ :close_streams_by_oauth_token,
+ [token],
+ restart: :transient
+ )
+
+ {:ok, token}
+ else
+ result -> result
+ end
end
end
diff --git a/lib/pleroma/web/o_status/o_status_controller.ex b/lib/pleroma/web/o_status/o_status_controller.ex
index ea4994bd0..ee7ef4a5d 100644
--- a/lib/pleroma/web/o_status/o_status_controller.ex
+++ b/lib/pleroma/web/o_status/o_status_controller.ex
@@ -37,7 +37,7 @@ def object(conn, _params) do
with id <- Endpoint.url() <> conn.request_path,
{_, %Activity{} = activity} <-
{:activity, Activity.get_create_by_object_ap_id_with_object(id)},
- {_, true} <- {:public?, Visibility.is_public?(activity)} do
+ {_, true} <- {:public?, Visibility.public?(activity)} do
redirect(conn, to: "/notice/#{activity.id}")
else
reason when reason in [{:public?, false}, {:activity, nil}] ->
@@ -56,7 +56,7 @@ def activity(%{assigns: %{format: format}} = conn, _params)
def activity(conn, _params) do
with id <- Endpoint.url() <> conn.request_path,
{_, %Activity{} = activity} <- {:activity, Activity.normalize(id)},
- {_, true} <- {:public?, Visibility.is_public?(activity)} do
+ {_, true} <- {:public?, Visibility.public?(activity)} do
redirect(conn, to: "/notice/#{activity.id}")
else
reason when reason in [{:public?, false}, {:activity, nil}] ->
@@ -69,7 +69,7 @@ def activity(conn, _params) do
def notice(%{assigns: %{format: format}} = conn, %{"id" => id}) do
with {_, %Activity{} = activity} <- {:activity, Activity.get_by_id_with_object(id)},
- {_, true} <- {:public?, Visibility.is_public?(activity)},
+ {_, true} <- {:public?, Visibility.public?(activity)},
%User{} = user <- User.get_cached_by_ap_id(activity.data["actor"]) do
cond do
format in ["json", "activity+json"] ->
@@ -106,13 +106,12 @@ def notice(%{assigns: %{format: format}} = conn, %{"id" => id}) do
# Returns an HTML embedded or player suitable for embed iframes.
def notice_player(conn, %{"id" => id}) do
with %Activity{data: %{"type" => "Create"}} = activity <- Activity.get_by_id_with_object(id),
- true <- Visibility.is_public?(activity),
+ true <- Visibility.public?(activity),
{_, true} <- {:visible?, Visibility.visible_for_user?(activity, _reading_user = nil)},
%Object{} = object <- Object.normalize(activity, fetch: false),
%{data: %{"attachment" => [%{"url" => [url | _]} | _]}} <- object,
true <- String.starts_with?(url["mediaType"], ["audio", "video"]) do
conn
- |> put_layout(:metadata_player)
|> put_resp_header("x-frame-options", "ALLOW")
|> put_resp_header(
"content-security-policy",
diff --git a/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex b/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex
index 1a0548295..b9daed22b 100644
--- a/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex
+++ b/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex
@@ -9,7 +9,7 @@ defmodule Pleroma.Web.PleromaAPI.BackupController do
alias Pleroma.Web.Plugs.OAuthScopesPlug
action_fallback(Pleroma.Web.MastodonAPI.FallbackController)
- plug(OAuthScopesPlug, %{scopes: ["read:accounts"]} when action in [:index, :create])
+ plug(OAuthScopesPlug, %{scopes: ["read:backups"]} when action in [:index, :create])
plug(Pleroma.Web.ApiSpec.CastAndValidate)
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.PleromaBackupOperation
diff --git a/lib/pleroma/web/pleroma_api/controllers/bookmark_folder_controller.ex b/lib/pleroma/web/pleroma_api/controllers/bookmark_folder_controller.ex
new file mode 100644
index 000000000..6d6e2e940
--- /dev/null
+++ b/lib/pleroma/web/pleroma_api/controllers/bookmark_folder_controller.ex
@@ -0,0 +1,68 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2024 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.PleromaAPI.BookmarkFolderController do
+ use Pleroma.Web, :controller
+
+ alias Pleroma.BookmarkFolder
+ alias Pleroma.Web.Plugs.OAuthScopesPlug
+
+ plug(Pleroma.Web.ApiSpec.CastAndValidate)
+
+ # Note: scope not present in Mastodon: read:bookmarks
+ plug(OAuthScopesPlug, %{scopes: ["read:bookmarks"]} when action == :index)
+
+ # Note: scope not present in Mastodon: write:bookmarks
+ plug(
+ OAuthScopesPlug,
+ %{scopes: ["write:bookmarks"]} when action in [:create, :update, :delete]
+ )
+
+ defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.PleromaBookmarkFolderOperation
+
+ action_fallback(Pleroma.Web.MastodonAPI.FallbackController)
+
+ def index(%{assigns: %{user: user}} = conn, _params) do
+ with folders <- BookmarkFolder.for_user(user.id) do
+ conn
+ |> render("index.json", %{folders: folders, as: :folder})
+ end
+ end
+
+ def create(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{body_params: params}}} = conn,
+ _
+ ) do
+ with {:ok, folder} <- BookmarkFolder.create(user.id, params[:name], params[:emoji]) do
+ render(conn, "show.json", folder: folder)
+ end
+ end
+
+ def update(
+ %{
+ assigns: %{user: user},
+ private: %{open_api_spex: %{body_params: params, params: %{id: id}}}
+ } = conn,
+ _
+ ) do
+ with true <- BookmarkFolder.belongs_to_user?(id, user.id),
+ {:ok, folder} <- BookmarkFolder.update(id, params[:name], params[:emoji]) do
+ render(conn, "show.json", folder: folder)
+ else
+ false -> {:error, :forbidden}
+ end
+ end
+
+ def delete(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: id}}}} = conn,
+ _
+ ) do
+ with true <- BookmarkFolder.belongs_to_user?(id, user.id),
+ {:ok, folder} <- BookmarkFolder.delete(id) do
+ render(conn, "show.json", folder: folder)
+ else
+ false -> {:error, :forbidden}
+ end
+ end
+end
diff --git a/lib/pleroma/web/pleroma_api/controllers/chat_controller.ex b/lib/pleroma/web/pleroma_api/controllers/chat_controller.ex
index 3d7b6a4a7..58780ace2 100644
--- a/lib/pleroma/web/pleroma_api/controllers/chat_controller.ex
+++ b/lib/pleroma/web/pleroma_api/controllers/chat_controller.ex
@@ -38,14 +38,24 @@ defmodule Pleroma.Web.PleromaAPI.ChatController do
%{scopes: ["read:chats"]} when action in [:messages, :index, :index2, :show]
)
- plug(Pleroma.Web.ApiSpec.CastAndValidate)
+ plug(Pleroma.Web.ApiSpec.CastAndValidate, replace_params: false)
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.ChatOperation
- def delete_message(%{assigns: %{user: %{id: user_id} = user}} = conn, %{
- message_id: message_id,
- id: chat_id
- }) do
+ def delete_message(
+ %{
+ assigns: %{user: %{id: user_id} = user},
+ private: %{
+ open_api_spex: %{
+ params: %{
+ message_id: message_id,
+ id: chat_id
+ }
+ }
+ }
+ } = conn,
+ _
+ ) do
with %MessageReference{} = cm_ref <-
MessageReference.get_by_id(message_id),
^chat_id <- to_string(cm_ref.chat_id),
@@ -72,11 +82,14 @@ defp remove_or_delete(
defp remove_or_delete(cm_ref, _), do: MessageReference.delete(cm_ref)
def post_chat_message(
- %{body_params: params, assigns: %{user: user}} = conn,
- %{id: id}
+ %{
+ private: %{open_api_spex: %{body_params: params, params: %{id: id}}},
+ assigns: %{user: user}
+ } = conn,
+ _
) do
with {:ok, chat} <- Chat.get_by_user_and_id(user, id),
- %User{} = recipient <- User.get_cached_by_ap_id(chat.recipient),
+ {_, %User{} = recipient} <- {:user, User.get_cached_by_ap_id(chat.recipient)},
{:ok, activity} <-
CommonAPI.post_chat_message(user, recipient, params[:content],
media_id: params[:media_id],
@@ -97,12 +110,20 @@ def post_chat_message(
conn
|> put_status(:bad_request)
|> json(%{error: message})
+
+ {:user, nil} ->
+ conn
+ |> put_status(:bad_request)
+ |> json(%{error: "Recipient does not exist"})
end
end
def mark_message_as_read(
- %{assigns: %{user: %{id: user_id}}} = conn,
- %{id: chat_id, message_id: message_id}
+ %{
+ assigns: %{user: %{id: user_id}},
+ private: %{open_api_spex: %{params: %{id: chat_id, message_id: message_id}}}
+ } = conn,
+ _
) do
with %MessageReference{} = cm_ref <- MessageReference.get_by_id(message_id),
^chat_id <- to_string(cm_ref.chat_id),
@@ -115,8 +136,16 @@ def mark_message_as_read(
end
def mark_as_read(
- %{body_params: %{last_read_id: last_read_id}, assigns: %{user: user}} = conn,
- %{id: id}
+ %{
+ assigns: %{user: user},
+ private: %{
+ open_api_spex: %{
+ body_params: %{last_read_id: last_read_id},
+ params: %{id: id}
+ }
+ }
+ } = conn,
+ _
) do
with {:ok, chat} <- Chat.get_by_user_and_id(user, id),
{_n, _} <- MessageReference.set_all_seen_for_chat(chat, last_read_id) do
@@ -124,7 +153,13 @@ def mark_as_read(
end
end
- def messages(%{assigns: %{user: user}} = conn, %{id: id} = params) do
+ def messages(
+ %{
+ assigns: %{user: user},
+ private: %{open_api_spex: %{params: %{id: id} = params}}
+ } = conn,
+ _
+ ) do
with {:ok, chat} <- Chat.get_by_user_and_id(user, id) do
chat_message_refs =
chat
@@ -138,7 +173,7 @@ def messages(%{assigns: %{user: user}} = conn, %{id: id} = params) do
end
end
- def index(%{assigns: %{user: user}} = conn, params) do
+ def index(%{assigns: %{user: user}, private: %{open_api_spex: %{params: params}}} = conn, _) do
chats =
index_query(user, params)
|> Repo.all()
@@ -146,7 +181,7 @@ def index(%{assigns: %{user: user}} = conn, params) do
render(conn, "index.json", chats: chats)
end
- def index2(%{assigns: %{user: user}} = conn, params) do
+ def index2(%{assigns: %{user: user}, private: %{open_api_spex: %{params: params}}} = conn, _) do
chats =
index_query(user, params)
|> Pagination.fetch_paginated(params)
@@ -166,14 +201,14 @@ defp index_query(%{id: user_id} = user, params) do
|> where([c], c.recipient not in ^exclude_users)
end
- def create(%{assigns: %{user: user}} = conn, %{id: id}) do
+ def create(%{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: id}}}} = conn, _) do
with %User{ap_id: recipient} <- User.get_cached_by_id(id),
{:ok, %Chat{} = chat} <- Chat.get_or_create(user.id, recipient) do
render(conn, "show.json", chat: chat)
end
end
- def show(%{assigns: %{user: user}} = conn, %{id: id}) do
+ def show(%{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: id}}}} = conn, _) do
with {:ok, chat} <- Chat.get_by_user_and_id(user, id) do
render(conn, "show.json", chat: chat)
end
diff --git a/lib/pleroma/web/pleroma_api/controllers/emoji_file_controller.ex b/lib/pleroma/web/pleroma_api/controllers/emoji_file_controller.ex
index f854cf9c1..00fb30451 100644
--- a/lib/pleroma/web/pleroma_api/controllers/emoji_file_controller.ex
+++ b/lib/pleroma/web/pleroma_api/controllers/emoji_file_controller.ex
@@ -8,7 +8,7 @@ defmodule Pleroma.Web.PleromaAPI.EmojiFileController do
alias Pleroma.Emoji.Pack
alias Pleroma.Web.ApiSpec
- plug(Pleroma.Web.ApiSpec.CastAndValidate)
+ plug(Pleroma.Web.ApiSpec.CastAndValidate, replace_params: false)
plug(
Pleroma.Web.Plugs.OAuthScopesPlug,
@@ -22,7 +22,10 @@ defmodule Pleroma.Web.PleromaAPI.EmojiFileController do
defdelegate open_api_operation(action), to: ApiSpec.PleromaEmojiFileOperation
- def create(%{body_params: params} = conn, %{name: pack_name}) do
+ def create(
+ %{private: %{open_api_spex: %{body_params: params, params: %{name: pack_name}}}} = conn,
+ _
+ ) do
filename = params[:filename] || get_filename(params[:file])
shortcode = params[:shortcode] || Path.basename(filename, Path.extname(filename))
@@ -49,7 +52,17 @@ def create(%{body_params: params} = conn, %{name: pack_name}) do
end
end
- def update(%{body_params: %{shortcode: shortcode} = params} = conn, %{name: pack_name}) do
+ def update(
+ %{
+ private: %{
+ open_api_spex: %{
+ body_params: %{shortcode: shortcode} = params,
+ params: %{name: pack_name}
+ }
+ }
+ } = conn,
+ _
+ ) do
new_shortcode = params[:new_shortcode]
new_filename = params[:new_filename]
force = params[:force]
@@ -80,7 +93,10 @@ def update(%{body_params: %{shortcode: shortcode} = params} = conn, %{name: pack
end
end
- def delete(conn, %{name: pack_name, shortcode: shortcode}) do
+ def delete(
+ %{private: %{open_api_spex: %{params: %{name: pack_name, shortcode: shortcode}}}} = conn,
+ _
+ ) do
with {:ok, pack} <- Pack.load_pack(pack_name),
{:ok, pack} <- Pack.delete_file(pack, shortcode) do
json(conn, pack.files)
diff --git a/lib/pleroma/web/pleroma_api/controllers/emoji_pack_controller.ex b/lib/pleroma/web/pleroma_api/controllers/emoji_pack_controller.ex
index 420fea12c..32360d2a2 100644
--- a/lib/pleroma/web/pleroma_api/controllers/emoji_pack_controller.ex
+++ b/lib/pleroma/web/pleroma_api/controllers/emoji_pack_controller.ex
@@ -7,7 +7,7 @@ defmodule Pleroma.Web.PleromaAPI.EmojiPackController do
alias Pleroma.Emoji.Pack
- plug(Pleroma.Web.ApiSpec.CastAndValidate)
+ plug(Pleroma.Web.ApiSpec.CastAndValidate, replace_params: false)
plug(
Pleroma.Web.Plugs.OAuthScopesPlug,
@@ -26,7 +26,7 @@ defmodule Pleroma.Web.PleromaAPI.EmojiPackController do
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.PleromaEmojiPackOperation
- def remote(conn, params) do
+ def remote(%{private: %{open_api_spex: %{params: params}}} = conn, _) do
with {:ok, packs} <-
Pack.list_remote(url: params.url, page_size: params.page_size, page: params.page) do
json(conn, packs)
@@ -38,7 +38,7 @@ def remote(conn, params) do
end
end
- def index(conn, params) do
+ def index(%{private: %{open_api_spex: %{params: params}}} = conn, _) do
emoji_path =
[:instance, :static_dir]
|> Pleroma.Config.get!()
@@ -61,7 +61,11 @@ def index(conn, params) do
end
end
- def show(conn, %{name: name, page: page, page_size: page_size}) do
+ def show(
+ %{private: %{open_api_spex: %{params: %{name: name, page: page, page_size: page_size}}}} =
+ conn,
+ _
+ ) do
name = String.trim(name)
with {:ok, pack} <- Pack.show(name: name, page: page, page_size: page_size) do
@@ -90,7 +94,7 @@ def show(conn, %{name: name, page: page, page_size: page_size}) do
end
end
- def archive(conn, %{name: name}) do
+ def archive(%{private: %{open_api_spex: %{params: %{name: name}}}} = conn, _) do
with {:ok, archive} <- Pack.get_archive(name) do
send_download(conn, {:binary, archive}, filename: "#{name}.zip")
else
@@ -109,7 +113,10 @@ def archive(conn, %{name: name}) do
end
end
- def download(%{body_params: %{url: url, name: name} = params} = conn, _) do
+ def download(
+ %{private: %{open_api_spex: %{body_params: %{url: url, name: name} = params}}} = conn,
+ _
+ ) do
with {:ok, _pack} <- Pack.download(name, url, params[:as]) do
json(conn, "ok")
else
@@ -130,7 +137,7 @@ def download(%{body_params: %{url: url, name: name} = params} = conn, _) do
end
end
- def create(conn, %{name: name}) do
+ def create(%{private: %{open_api_spex: %{params: %{name: name}}}} = conn, _) do
name = String.trim(name)
with {:ok, _pack} <- Pack.create(name) do
@@ -159,7 +166,7 @@ def create(conn, %{name: name}) do
end
end
- def delete(conn, %{name: name}) do
+ def delete(%{private: %{open_api_spex: %{params: %{name: name}}}} = conn, _) do
name = String.trim(name)
with {:ok, deleted} when deleted != [] <- Pack.delete(name) do
@@ -184,7 +191,11 @@ def delete(conn, %{name: name}) do
end
end
- def update(%{body_params: %{metadata: metadata}} = conn, %{name: name}) do
+ def update(
+ %{private: %{open_api_spex: %{body_params: %{metadata: metadata}, params: %{name: name}}}} =
+ conn,
+ _
+ ) do
with {:ok, pack} <- Pack.update_metadata(name, metadata) do
json(conn, pack.pack)
else
diff --git a/lib/pleroma/web/pleroma_api/controllers/emoji_reaction_controller.ex b/lib/pleroma/web/pleroma_api/controllers/emoji_reaction_controller.ex
index 78fd0b219..662cc15d6 100644
--- a/lib/pleroma/web/pleroma_api/controllers/emoji_reaction_controller.ex
+++ b/lib/pleroma/web/pleroma_api/controllers/emoji_reaction_controller.ex
@@ -28,8 +28,8 @@ defmodule Pleroma.Web.PleromaAPI.EmojiReactionController do
def index(%{assigns: %{user: user}} = conn, %{id: activity_id} = params) do
with true <- Pleroma.Config.get([:instance, :show_reactions]),
%Activity{} = activity <- Activity.get_by_id_with_object(activity_id),
- %Object{data: %{"reactions" => reactions}} when is_list(reactions) <-
- Object.normalize(activity, fetch: false) do
+ %Object{} = object <- Object.normalize(activity, fetch: false),
+ reactions <- Object.get_emoji_reactions(object) do
reactions =
reactions
|> filter(params)
@@ -50,29 +50,32 @@ def filter_allowed_users(reactions, user, with_muted) do
if not with_muted, do: User.cached_muted_users_ap_ids(user), else: []
end
- filter_emoji = fn emoji, users ->
+ filter_emoji = fn emoji, users, url ->
case Enum.reject(users, &(&1 in exclude_ap_ids)) do
[] -> nil
- users -> {emoji, users}
+ users -> {emoji, users, url}
end
end
reactions
|> Stream.map(fn
- [emoji, users] when is_list(users) -> filter_emoji.(emoji, users)
- {emoji, users} when is_list(users) -> filter_emoji.(emoji, users)
- _ -> nil
+ [emoji, users, url] when is_list(users) -> filter_emoji.(emoji, users, url)
end)
|> Stream.reject(&is_nil/1)
end
defp filter(reactions, %{emoji: emoji}) when is_binary(emoji) do
- Enum.filter(reactions, fn [e, _] -> e == emoji end)
+ Enum.filter(reactions, fn [e, _, _] -> e == emoji end)
end
defp filter(reactions, _), do: reactions
def create(%{assigns: %{user: user}} = conn, %{id: activity_id, emoji: emoji}) do
+ emoji =
+ emoji
+ |> Pleroma.Emoji.fully_qualify_emoji()
+ |> Pleroma.Emoji.maybe_quote()
+
with {:ok, _activity} <- CommonAPI.react_with_emoji(activity_id, user, emoji) do
activity = Activity.get_by_id(activity_id)
@@ -83,6 +86,11 @@ def create(%{assigns: %{user: user}} = conn, %{id: activity_id, emoji: emoji}) d
end
def delete(%{assigns: %{user: user}} = conn, %{id: activity_id, emoji: emoji}) do
+ emoji =
+ emoji
+ |> Pleroma.Emoji.fully_qualify_emoji()
+ |> Pleroma.Emoji.maybe_quote()
+
with {:ok, _activity} <- CommonAPI.unreact_with_emoji(activity_id, user, emoji) do
activity = Activity.get_by_id(activity_id)
diff --git a/lib/pleroma/web/pleroma_api/controllers/mascot_controller.ex b/lib/pleroma/web/pleroma_api/controllers/mascot_controller.ex
index 66e9d8481..3208cde98 100644
--- a/lib/pleroma/web/pleroma_api/controllers/mascot_controller.ex
+++ b/lib/pleroma/web/pleroma_api/controllers/mascot_controller.ex
@@ -10,7 +10,7 @@ defmodule Pleroma.Web.PleromaAPI.MascotController do
alias Pleroma.Web.Plugs.OAuthScopesPlug
plug(Majic.Plug, [pool: Pleroma.MajicPool] when action in [:update])
- plug(Pleroma.Web.ApiSpec.CastAndValidate)
+ plug(Pleroma.Web.ApiSpec.CastAndValidate, replace_params: false)
plug(OAuthScopesPlug, %{scopes: ["read:accounts"]} when action == :show)
plug(OAuthScopesPlug, %{scopes: ["write:accounts"]} when action != :show)
@@ -22,9 +22,13 @@ def show(%{assigns: %{user: user}} = conn, _params) do
end
@doc "PUT /api/v1/pleroma/mascot"
- def update(%{assigns: %{user: user}, body_params: %{file: file}} = conn, _) do
+ def update(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{body_params: %{file: file}}}} =
+ conn,
+ _
+ ) do
with {:content_type, "image" <> _} <- {:content_type, file.content_type},
- {:ok, object} <- ActivityPub.upload(file, actor: User.ap_id(user)) do
+ {_, {:ok, object}} <- {:upload, ActivityPub.upload(file, actor: User.ap_id(user))} do
attachment = render_attachment(object)
{:ok, _user} = User.mascot_update(user, attachment)
@@ -32,6 +36,9 @@ def update(%{assigns: %{user: user}, body_params: %{file: file}} = conn, _) do
else
{:content_type, _} ->
render_error(conn, :unsupported_media_type, "mascots can only be images")
+
+ {:upload, {:error, _}} ->
+ render_error(conn, :error, "error uploading file")
end
end
diff --git a/lib/pleroma/web/pleroma_api/controllers/notification_controller.ex b/lib/pleroma/web/pleroma_api/controllers/notification_controller.ex
index 87ea81cef..435ccfabe 100644
--- a/lib/pleroma/web/pleroma_api/controllers/notification_controller.ex
+++ b/lib/pleroma/web/pleroma_api/controllers/notification_controller.ex
@@ -7,7 +7,7 @@ defmodule Pleroma.Web.PleromaAPI.NotificationController do
alias Pleroma.Notification
- plug(Pleroma.Web.ApiSpec.CastAndValidate)
+ plug(Pleroma.Web.ApiSpec.CastAndValidate, replace_params: false)
plug(
Pleroma.Web.Plugs.OAuthScopesPlug,
@@ -16,9 +16,16 @@ defmodule Pleroma.Web.PleromaAPI.NotificationController do
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.PleromaNotificationOperation
- def mark_as_read(%{assigns: %{user: user}, body_params: %{id: notification_id}} = conn, _) do
- with {:ok, notification} <- Notification.read_one(user, notification_id) do
- render(conn, "show.json", notification: notification, for: user)
+ def mark_as_read(
+ %{
+ assigns: %{user: user},
+ private: %{open_api_spex: %{body_params: %{id: notification_id}}}
+ } = conn,
+ _
+ ) do
+ with {:ok, _} <- Notification.read_one(user, notification_id) do
+ conn
+ |> json("ok")
else
{:error, message} ->
conn
@@ -27,12 +34,19 @@ def mark_as_read(%{assigns: %{user: user}, body_params: %{id: notification_id}}
end
end
- def mark_as_read(%{assigns: %{user: user}, body_params: %{max_id: max_id}} = conn, _) do
- notifications =
- user
- |> Notification.set_read_up_to(max_id)
- |> Enum.take(80)
-
- render(conn, "index.json", notifications: notifications, for: user)
+ def mark_as_read(
+ %{assigns: %{user: user}, private: %{open_api_spex: %{body_params: %{max_id: max_id}}}} =
+ conn,
+ _
+ ) do
+ with {:ok, _} <- Notification.set_read_up_to(user, max_id) do
+ conn
+ |> json("ok")
+ else
+ {:error, message} ->
+ conn
+ |> put_status(:bad_request)
+ |> json(%{"error" => message})
+ end
end
end
diff --git a/lib/pleroma/web/pleroma_api/controllers/settings_controller.ex b/lib/pleroma/web/pleroma_api/controllers/settings_controller.ex
new file mode 100644
index 000000000..1136575b6
--- /dev/null
+++ b/lib/pleroma/web/pleroma_api/controllers/settings_controller.ex
@@ -0,0 +1,79 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.PleromaAPI.SettingsController do
+ use Pleroma.Web, :controller
+
+ alias Pleroma.Web.Plugs.OAuthScopesPlug
+
+ plug(Pleroma.Web.ApiSpec.CastAndValidate)
+
+ plug(
+ OAuthScopesPlug,
+ %{scopes: ["write:accounts"]} when action in [:update]
+ )
+
+ plug(
+ OAuthScopesPlug,
+ %{scopes: ["read:accounts"]} when action in [:show]
+ )
+
+ defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.PleromaSettingsOperation
+
+ @doc "GET /api/v1/pleroma/settings/:app"
+ def show(%{assigns: %{user: user}} = conn, %{app: app} = _params) do
+ conn
+ |> json(get_settings(user, app))
+ end
+
+ @doc "PATCH /api/v1/pleroma/settings/:app"
+ def update(%{assigns: %{user: user}, body_params: body_params} = conn, %{app: app} = _params) do
+ settings =
+ get_settings(user, app)
+ |> merge_recursively(body_params)
+
+ with changeset <-
+ Pleroma.User.update_changeset(
+ user,
+ %{pleroma_settings_store: %{app => settings}}
+ ),
+ {:ok, _} <- Pleroma.Repo.update(changeset) do
+ conn
+ |> json(settings)
+ end
+ end
+
+ defp merge_recursively(old, %{} = new) do
+ old = ensure_object(old)
+
+ Enum.reduce(
+ new,
+ old,
+ fn
+ {k, nil}, acc ->
+ Map.drop(acc, [k])
+
+ {k, %{} = new_child}, acc ->
+ Map.put(acc, k, merge_recursively(acc[k], new_child))
+
+ {k, v}, acc ->
+ Map.put(acc, k, v)
+ end
+ )
+ end
+
+ defp get_settings(user, app) do
+ user.pleroma_settings_store
+ |> Map.get(app, %{})
+ |> ensure_object()
+ end
+
+ defp ensure_object(%{} = object) do
+ object
+ end
+
+ defp ensure_object(_) do
+ %{}
+ end
+end
diff --git a/lib/pleroma/web/pleroma_api/controllers/status_controller.ex b/lib/pleroma/web/pleroma_api/controllers/status_controller.ex
new file mode 100644
index 000000000..482662fdd
--- /dev/null
+++ b/lib/pleroma/web/pleroma_api/controllers/status_controller.ex
@@ -0,0 +1,66 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.PleromaAPI.StatusController do
+ use Pleroma.Web, :controller
+
+ import Pleroma.Web.ControllerHelper, only: [add_link_headers: 2]
+
+ require Ecto.Query
+ require Pleroma.Constants
+
+ alias Pleroma.Activity
+ alias Pleroma.User
+ alias Pleroma.Web.ActivityPub.ActivityPub
+ alias Pleroma.Web.ActivityPub.Visibility
+ alias Pleroma.Web.MastodonAPI.StatusView
+ alias Pleroma.Web.Plugs.OAuthScopesPlug
+
+ plug(Pleroma.Web.ApiSpec.CastAndValidate)
+
+ action_fallback(Pleroma.Web.MastodonAPI.FallbackController)
+
+ plug(
+ OAuthScopesPlug,
+ %{scopes: ["read:statuses"], fallback: :proceed_unauthenticated} when action == :quotes
+ )
+
+ defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.PleromaStatusOperation
+
+ @doc "GET /api/v1/pleroma/statuses/:id/quotes"
+ def quotes(%{assigns: %{user: user}} = conn, %{id: id} = params) do
+ with %Activity{object: object} = activity <- Activity.get_by_id_with_object(id),
+ true <- Visibility.visible_for_user?(activity, user) do
+ params =
+ params
+ |> Map.put(:type, "Create")
+ |> Map.put(:blocking_user, user)
+ |> Map.put(:quote_url, object.data["id"])
+
+ recipients =
+ if user do
+ [Pleroma.Constants.as_public()] ++ [user.ap_id | User.following(user)]
+ else
+ [Pleroma.Constants.as_public()]
+ end
+
+ activities =
+ recipients
+ |> ActivityPub.fetch_activities(params)
+ |> Enum.reverse()
+
+ conn
+ |> add_link_headers(activities)
+ |> put_view(StatusView)
+ |> render("index.json",
+ activities: activities,
+ for: user,
+ as: :activity
+ )
+ else
+ nil -> {:error, :not_found}
+ false -> {:error, :not_found}
+ end
+ end
+end
diff --git a/lib/pleroma/web/pleroma_api/controllers/user_import_controller.ex b/lib/pleroma/web/pleroma_api/controllers/user_import_controller.ex
index 90428a532..96466f192 100644
--- a/lib/pleroma/web/pleroma_api/controllers/user_import_controller.ex
+++ b/lib/pleroma/web/pleroma_api/controllers/user_import_controller.ex
@@ -15,14 +15,21 @@ defmodule Pleroma.Web.PleromaAPI.UserImportController do
plug(OAuthScopesPlug, %{scopes: ["follow", "write:blocks"]} when action == :blocks)
plug(OAuthScopesPlug, %{scopes: ["follow", "write:mutes"]} when action == :mutes)
- plug(Pleroma.Web.ApiSpec.CastAndValidate)
+ plug(Pleroma.Web.ApiSpec.CastAndValidate, replace_params: false)
defdelegate open_api_operation(action), to: ApiSpec.UserImportOperation
- def follow(%{body_params: %{list: %Plug.Upload{path: path}}} = conn, _) do
- follow(%Plug.Conn{conn | body_params: %{list: File.read!(path)}}, %{})
+ def follow(
+ %{private: %{open_api_spex: %{body_params: %{list: %Plug.Upload{path: path}}}}} = conn,
+ _
+ ) do
+ list = File.read!(path)
+ do_follow(conn, list)
end
- def follow(%{assigns: %{user: follower}, body_params: %{list: list}} = conn, _) do
+ def follow(%{private: %{open_api_spex: %{body_params: %{list: list}}}} = conn, _),
+ do: do_follow(conn, list)
+
+ def do_follow(%{assigns: %{user: follower}} = conn, list) do
identifiers =
list
|> String.split("\n")
@@ -35,20 +42,34 @@ def follow(%{assigns: %{user: follower}, body_params: %{list: list}} = conn, _)
json(conn, "job started")
end
- def blocks(%{body_params: %{list: %Plug.Upload{path: path}}} = conn, _) do
- blocks(%Plug.Conn{conn | body_params: %{list: File.read!(path)}}, %{})
+ def blocks(
+ %{private: %{open_api_spex: %{body_params: %{list: %Plug.Upload{path: path}}}}} = conn,
+ _
+ ) do
+ list = File.read!(path)
+ do_block(conn, list)
end
- def blocks(%{assigns: %{user: blocker}, body_params: %{list: list}} = conn, _) do
+ def blocks(%{private: %{open_api_spex: %{body_params: %{list: list}}}} = conn, _),
+ do: do_block(conn, list)
+
+ defp do_block(%{assigns: %{user: blocker}} = conn, list) do
User.Import.blocks_import(blocker, prepare_user_identifiers(list))
json(conn, "job started")
end
- def mutes(%{body_params: %{list: %Plug.Upload{path: path}}} = conn, _) do
- mutes(%Plug.Conn{conn | body_params: %{list: File.read!(path)}}, %{})
+ def mutes(
+ %{private: %{open_api_spex: %{body_params: %{list: %Plug.Upload{path: path}}}}} = conn,
+ _
+ ) do
+ list = File.read!(path)
+ do_mute(conn, list)
end
- def mutes(%{assigns: %{user: user}, body_params: %{list: list}} = conn, _) do
+ def mutes(%{private: %{open_api_spex: %{body_params: %{list: list}}}} = conn, _),
+ do: do_mute(conn, list)
+
+ defp do_mute(%{assigns: %{user: user}} = conn, list) do
User.Import.mutes_import(user, prepare_user_identifiers(list))
json(conn, "job started")
end
diff --git a/lib/pleroma/web/pleroma_api/views/backup_view.ex b/lib/pleroma/web/pleroma_api/views/backup_view.ex
index d778590f0..20403aeee 100644
--- a/lib/pleroma/web/pleroma_api/views/backup_view.ex
+++ b/lib/pleroma/web/pleroma_api/views/backup_view.ex
@@ -9,12 +9,22 @@ defmodule Pleroma.Web.PleromaAPI.BackupView do
alias Pleroma.Web.CommonAPI.Utils
def render("show.json", %{backup: %Backup{} = backup}) do
+ # To deal with records before the migration
+ state =
+ if backup.state == :invalid do
+ if backup.processed, do: :complete, else: :failed
+ else
+ backup.state
+ end
+
%{
id: backup.id,
content_type: backup.content_type,
url: download_url(backup),
file_size: backup.file_size,
processed: backup.processed,
+ state: to_string(state),
+ processed_number: backup.processed_number,
inserted_at: Utils.to_masto_date(backup.inserted_at)
}
end
diff --git a/lib/pleroma/web/pleroma_api/views/bookmark_folder_view.ex b/lib/pleroma/web/pleroma_api/views/bookmark_folder_view.ex
new file mode 100644
index 000000000..12decb816
--- /dev/null
+++ b/lib/pleroma/web/pleroma_api/views/bookmark_folder_view.ex
@@ -0,0 +1,42 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2024 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.PleromaAPI.BookmarkFolderView do
+ use Pleroma.Web, :view
+
+ alias Pleroma.BookmarkFolder
+ alias Pleroma.Emoji
+ alias Pleroma.Web.Endpoint
+
+ def render("show.json", %{folder: %BookmarkFolder{} = folder}) do
+ %{
+ id: folder.id |> to_string(),
+ name: folder.name,
+ emoji: folder.emoji,
+ emoji_url: get_emoji_url(folder.emoji)
+ }
+ end
+
+ def render("index.json", %{folders: folders} = opts) do
+ render_many(folders, __MODULE__, "show.json", Map.delete(opts, :folders))
+ end
+
+ defp get_emoji_url(nil) do
+ nil
+ end
+
+ defp get_emoji_url(emoji) do
+ if Emoji.unicode?(emoji) do
+ nil
+ else
+ emoji = Emoji.get(emoji)
+
+ if emoji != nil do
+ Endpoint.url() |> URI.merge(emoji.file) |> to_string()
+ else
+ nil
+ end
+ end
+ end
+end
diff --git a/lib/pleroma/web/pleroma_api/views/chat/message_reference_view.ex b/lib/pleroma/web/pleroma_api/views/chat/message_reference_view.ex
index 241bf0010..a1c88d075 100644
--- a/lib/pleroma/web/pleroma_api/views/chat/message_reference_view.ex
+++ b/lib/pleroma/web/pleroma_api/views/chat/message_reference_view.ex
@@ -9,6 +9,7 @@ defmodule Pleroma.Web.PleromaAPI.Chat.MessageReferenceView do
alias Pleroma.User
alias Pleroma.Web.CommonAPI.Utils
alias Pleroma.Web.MastodonAPI.StatusView
+ alias Pleroma.Web.RichMedia.Card
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
@@ -23,6 +24,12 @@ def render(
}
}
) do
+ card =
+ case Card.get_by_object(object) do
+ %Card{} = card_data -> StatusView.render("card.json", card_data)
+ _ -> nil
+ end
+
%{
id: id |> to_string(),
content: chat_message["content"],
@@ -34,11 +41,7 @@ def render(
chat_message["attachment"] &&
StatusView.render("attachment.json", attachment: chat_message["attachment"]),
unread: unread,
- card:
- StatusView.render(
- "card.json",
- Pleroma.Web.RichMedia.Helpers.fetch_data_for_object(object)
- )
+ card: card
}
|> put_idempotency_key()
end
diff --git a/lib/pleroma/web/pleroma_api/views/emoji_reaction_view.ex b/lib/pleroma/web/pleroma_api/views/emoji_reaction_view.ex
index 68ebd8292..6df4ab9d0 100644
--- a/lib/pleroma/web/pleroma_api/views/emoji_reaction_view.ex
+++ b/lib/pleroma/web/pleroma_api/views/emoji_reaction_view.ex
@@ -7,17 +7,30 @@ defmodule Pleroma.Web.PleromaAPI.EmojiReactionView do
alias Pleroma.Web.MastodonAPI.AccountView
+ def emoji_name(emoji, nil), do: emoji
+
+ def emoji_name(emoji, url) do
+ url = URI.parse(url)
+
+ if url.host == Pleroma.Web.Endpoint.host() do
+ emoji
+ else
+ "#{emoji}@#{url.host}"
+ end
+ end
+
def render("index.json", %{emoji_reactions: emoji_reactions} = opts) do
render_many(emoji_reactions, __MODULE__, "show.json", opts)
end
- def render("show.json", %{emoji_reaction: {emoji, user_ap_ids}, user: user}) do
+ def render("show.json", %{emoji_reaction: {emoji, user_ap_ids, url}, user: user}) do
users = fetch_users(user_ap_ids)
%{
- name: emoji,
+ name: emoji_name(emoji, url),
count: length(users),
accounts: render(AccountView, "index.json", users: users, for: user),
+ url: Pleroma.Web.MediaProxy.url(url),
me: !!(user && user.ap_id in user_ap_ids)
}
end
diff --git a/lib/pleroma/web/pleroma_api/views/scrobble_view.ex b/lib/pleroma/web/pleroma_api/views/scrobble_view.ex
index a5985fb2a..edf0a2390 100644
--- a/lib/pleroma/web/pleroma_api/views/scrobble_view.ex
+++ b/lib/pleroma/web/pleroma_api/views/scrobble_view.ex
@@ -27,6 +27,7 @@ def render("show.json", %{activity: %Activity{data: %{"type" => "Listen"}} = act
title: object.data["title"] |> HTML.strip_tags(),
artist: object.data["artist"] |> HTML.strip_tags(),
album: object.data["album"] |> HTML.strip_tags(),
+ externalLink: object.data["externalLink"],
length: object.data["length"]
}
end
diff --git a/lib/pleroma/web/plugs/authentication_plug.ex b/lib/pleroma/web/plugs/authentication_plug.ex
index a7fd697b5..f912a1542 100644
--- a/lib/pleroma/web/plugs/authentication_plug.ex
+++ b/lib/pleroma/web/plugs/authentication_plug.ex
@@ -38,10 +38,6 @@ def call(
def call(conn, _), do: conn
- def checkpw(password, "$6" <> _ = password_hash) do
- :crypt.crypt(password, password_hash) == password_hash
- end
-
def checkpw(password, "$2" <> _ = password_hash) do
# Handle bcrypt passwords for Mastodon migration
Bcrypt.verify_pass(password, password_hash)
@@ -60,10 +56,6 @@ def maybe_update_password(%User{password_hash: "$2" <> _} = user, password) do
do_update_password(user, password)
end
- def maybe_update_password(%User{password_hash: "$6" <> _} = user, password) do
- do_update_password(user, password)
- end
-
def maybe_update_password(user, _), do: {:ok, user}
defp do_update_password(user, password) do
diff --git a/lib/pleroma/web/plugs/cache.ex b/lib/pleroma/web/plugs/cache.ex
index 667477857..5a7e86ef7 100644
--- a/lib/pleroma/web/plugs/cache.ex
+++ b/lib/pleroma/web/plugs/cache.ex
@@ -20,7 +20,7 @@ defmodule Pleroma.Web.Plugs.Cache do
- `ttl`: An expiration time (time-to-live). This value should be in milliseconds or `nil` to disable expiration. Defaults to `nil`.
- `query_params`: Take URL query string into account (`true`), ignore it (`false`) or limit to specific params only (list). Defaults to `true`.
- - `tracking_fun`: A function that is called on successfull responses, no matter if the request is cached or not. It should accept a conn as the first argument and the value assigned to `tracking_fun_data` as the second.
+ - `tracking_fun`: A function that is called on successful responses, no matter if the request is cached or not. It should accept a conn as the first argument and the value assigned to `tracking_fun_data` as the second.
Additionally, you can overwrite the TTL inside a controller action by assigning `cache_ttl` to the connection struct:
diff --git a/lib/pleroma/web/plugs/ensure_privileged_plug.ex b/lib/pleroma/web/plugs/ensure_privileged_plug.ex
new file mode 100644
index 000000000..f886c87ea
--- /dev/null
+++ b/lib/pleroma/web/plugs/ensure_privileged_plug.ex
@@ -0,0 +1,44 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.Plugs.EnsurePrivilegedPlug do
+ @moduledoc """
+ Ensures staff are privileged enough to do certain tasks.
+ """
+ import Pleroma.Web.TranslationHelpers
+ import Plug.Conn
+
+ alias Pleroma.Config
+ alias Pleroma.User
+
+ def init(options) do
+ options
+ end
+
+ def call(%{assigns: %{user: %User{is_admin: false, is_moderator: false}}} = conn, _) do
+ conn
+ |> render_error(:forbidden, "User isn't privileged.")
+ |> halt()
+ end
+
+ def call(
+ %{assigns: %{user: %User{is_admin: is_admin, is_moderator: is_moderator}}} = conn,
+ privilege
+ ) do
+ if (is_admin and privilege in Config.get([:instance, :admin_privileges])) or
+ (is_moderator and privilege in Config.get([:instance, :moderator_privileges])) do
+ conn
+ else
+ conn
+ |> render_error(:forbidden, "User isn't privileged.")
+ |> halt()
+ end
+ end
+
+ def call(conn, _) do
+ conn
+ |> render_error(:forbidden, "User isn't privileged.")
+ |> halt()
+ end
+end
diff --git a/lib/pleroma/web/plugs/ensure_staff_privileged_plug.ex b/lib/pleroma/web/plugs/ensure_staff_privileged_plug.ex
deleted file mode 100644
index 3c2109496..000000000
--- a/lib/pleroma/web/plugs/ensure_staff_privileged_plug.ex
+++ /dev/null
@@ -1,36 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2022 Pleroma Authors
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Web.Plugs.EnsureStaffPrivilegedPlug do
- @moduledoc """
- Ensures staff are privileged enough to do certain tasks.
- """
- import Pleroma.Web.TranslationHelpers
- import Plug.Conn
-
- alias Pleroma.Config
- alias Pleroma.User
-
- def init(options) do
- options
- end
-
- def call(%{assigns: %{user: %User{is_admin: true}}} = conn, _), do: conn
-
- def call(%{assigns: %{user: %User{is_moderator: true}}} = conn, _) do
- if Config.get!([:instance, :privileged_staff]) do
- conn
- else
- conn
- |> render_error(:forbidden, "User is not an admin.")
- |> halt()
- end
- end
-
- def call(conn, _) do
- conn
- |> render_error(:forbidden, "User is not a staff member.")
- |> halt()
- end
-end
diff --git a/lib/pleroma/web/plugs/http_security_plug.ex b/lib/pleroma/web/plugs/http_security_plug.ex
index b89948cec..a27dcd0ab 100644
--- a/lib/pleroma/web/plugs/http_security_plug.ex
+++ b/lib/pleroma/web/plugs/http_security_plug.ex
@@ -68,7 +68,7 @@ def headers do
]
}
- [{"reply-to", Jason.encode!(report_group)} | headers]
+ [{"report-to", Jason.encode!(report_group)} | headers]
else
headers
end
@@ -93,18 +93,26 @@ defp csp_string do
img_src = "img-src 'self' data: blob:"
media_src = "media-src 'self'"
+ connect_src = ["connect-src 'self' blob: ", static_url, ?\s, websocket_url]
# Strict multimedia CSP enforcement only when MediaProxy is enabled
- {img_src, media_src} =
+ {img_src, media_src, connect_src} =
if Config.get([:media_proxy, :enabled]) &&
!Config.get([:media_proxy, :proxy_opts, :redirect_on_failure]) do
sources = build_csp_multimedia_source_list()
- {[img_src, sources], [media_src, sources]}
- else
- {[img_src, " https:"], [media_src, " https:"]}
- end
- connect_src = ["connect-src 'self' blob: ", static_url, ?\s, websocket_url]
+ {
+ [img_src, sources],
+ [media_src, sources],
+ [connect_src, sources]
+ }
+ else
+ {
+ [img_src, " https:"],
+ [media_src, " https:"],
+ [connect_src, " https:"]
+ }
+ end
connect_src =
if Config.get(:env) == :dev do
@@ -117,7 +125,7 @@ defp csp_string do
if Config.get(:env) == :dev do
"script-src 'self' 'unsafe-eval'"
else
- "script-src 'self'"
+ "script-src 'self' 'wasm-unsafe-eval'"
end
report = if report_uri, do: ["report-uri ", report_uri, ";report-to csp-endpoint"]
@@ -193,7 +201,7 @@ defp build_csp_param(url) when is_binary(url) do
def warn_if_disabled do
unless Config.get([:http_security, :enabled]) do
- Logger.warn("
+ Logger.warning("
.i;;;;i.
iYcviii;vXY:
.YXi .i1c.
diff --git a/lib/pleroma/web/plugs/http_signature_plug.ex b/lib/pleroma/web/plugs/http_signature_plug.ex
index d023754a6..e814efc2c 100644
--- a/lib/pleroma/web/plugs/http_signature_plug.ex
+++ b/lib/pleroma/web/plugs/http_signature_plug.ex
@@ -16,7 +16,7 @@ def call(%{assigns: %{valid_signature: true}} = conn, _opts) do
end
def call(conn, _opts) do
- if get_format(conn) == "activity+json" do
+ if get_format(conn) in ["json", "activity+json"] do
conn
|> maybe_assign_valid_signature()
|> maybe_require_signature()
@@ -25,21 +25,58 @@ def call(conn, _opts) do
end
end
+ defp validate_signature(conn, request_target) do
+ # Newer drafts for HTTP signatures now use @request-target instead of the
+ # old (request-target). We'll now support both for incoming signatures.
+ conn =
+ conn
+ |> put_req_header("(request-target)", request_target)
+ |> put_req_header("@request-target", request_target)
+
+ HTTPSignatures.validate_conn(conn)
+ end
+
+ defp validate_signature(conn) do
+ # This (request-target) is non-standard, but many implementations do it
+ # this way due to a misinterpretation of
+ # https://datatracker.ietf.org/doc/html/draft-cavage-http-signatures-06
+ # "path" was interpreted as not having the query, though later examples
+ # show that it must be the absolute path + query. This behavior is kept to
+ # make sure most software (Pleroma itself, Mastodon, and probably others)
+ # do not break.
+ request_target = String.downcase("#{conn.method}") <> " #{conn.request_path}"
+
+ # This is the proper way to build the @request-target, as expected by
+ # many HTTP signature libraries, clarified in the following draft:
+ # https://www.ietf.org/archive/id/draft-ietf-httpbis-message-signatures-11.html#section-2.2.6
+ # It is the same as before, but containing the query part as well.
+ proper_target = request_target <> "?#{conn.query_string}"
+
+ cond do
+ # Normal, non-standard behavior but expected by Pleroma and more.
+ validate_signature(conn, request_target) ->
+ true
+
+ # Has query string and the previous one failed: let's try the standard.
+ conn.query_string != "" ->
+ validate_signature(conn, proper_target)
+
+ # If there's no query string and signature fails, it's rotten.
+ true ->
+ false
+ end
+ end
+
defp maybe_assign_valid_signature(conn) do
if has_signature_header?(conn) do
- # set (request-target) header to the appropriate value
- # we also replace the digest header with the one we computed
- request_target = String.downcase("#{conn.method}") <> " #{conn.request_path}"
-
+ # we replace the digest header with the one we computed in DigestPlug
conn =
- conn
- |> put_req_header("(request-target)", request_target)
- |> case do
+ case conn do
%{assigns: %{digest: digest}} = conn -> put_req_header(conn, "digest", digest)
conn -> conn
end
- assign(conn, :valid_signature, HTTPSignatures.validate_conn(conn))
+ assign(conn, :valid_signature, validate_signature(conn))
else
Logger.debug("No signature header!")
conn
diff --git a/lib/pleroma/web/plugs/o_auth_plug.ex b/lib/pleroma/web/plugs/o_auth_plug.ex
index 0f74d626b..b59ac9d3e 100644
--- a/lib/pleroma/web/plugs/o_auth_plug.ex
+++ b/lib/pleroma/web/plugs/o_auth_plug.ex
@@ -23,14 +23,14 @@ def call(%{assigns: %{user: %User{}}} = conn, _), do: conn
def call(conn, _) do
with {:ok, token_str} <- fetch_token_str(conn) do
with {:ok, user, user_token} <- fetch_user_and_token(token_str),
- false <- Token.is_expired?(user_token) do
+ false <- Token.expired?(user_token) do
conn
|> assign(:token, user_token)
|> assign(:user, user)
else
_ ->
with {:ok, app, app_token} <- fetch_app_and_token(token_str),
- false <- Token.is_expired?(app_token) do
+ false <- Token.expired?(app_token) do
conn
|> assign(:token, app_token)
|> assign(:app, app)
@@ -47,15 +47,17 @@ def call(conn, _) do
#
@spec fetch_user_and_token(String.t()) :: {:ok, User.t(), Token.t()} | nil
defp fetch_user_and_token(token) do
- query =
+ token_query =
from(t in Token,
- where: t.token == ^token,
- join: user in assoc(t, :user),
- preload: [user: user]
+ where: t.token == ^token
)
- with %Token{user: user} = token_record <- Repo.one(query) do
+ with %Token{user_id: user_id} = token_record <- Repo.one(token_query),
+ false <- is_nil(user_id),
+ %User{} = user <- User.get_cached_by_id(user_id) do
{:ok, user, token_record}
+ else
+ _ -> nil
end
end
diff --git a/lib/pleroma/web/plugs/rate_limiter.ex b/lib/pleroma/web/plugs/rate_limiter.ex
index 2080b06bd..aa79dbf6b 100644
--- a/lib/pleroma/web/plugs/rate_limiter.ex
+++ b/lib/pleroma/web/plugs/rate_limiter.ex
@@ -89,7 +89,7 @@ def call(conn, plug_opts) do
end
defp handle_disabled(conn) do
- Logger.warn(
+ Logger.warning(
"Rate limiter disabled due to forwarded IP not being found. Please ensure your reverse proxy is providing the X-Forwarded-For header or disable the RemoteIP plug/rate limiter."
)
diff --git a/lib/pleroma/web/plugs/rate_limiter/supervisor.ex b/lib/pleroma/web/plugs/rate_limiter/supervisor.ex
index f00f3d95e..5f79a3e3e 100644
--- a/lib/pleroma/web/plugs/rate_limiter/supervisor.ex
+++ b/lib/pleroma/web/plugs/rate_limiter/supervisor.ex
@@ -14,7 +14,7 @@ def init(_args) do
Pleroma.Web.Plugs.RateLimiter.LimiterSupervisor
]
- opts = [strategy: :one_for_one, name: Pleroma.Web.Streamer.Supervisor]
+ opts = [strategy: :one_for_one]
Supervisor.init(children, opts)
end
end
diff --git a/lib/pleroma/web/plugs/remote_ip.ex b/lib/pleroma/web/plugs/remote_ip.ex
index f207d9fef..9f733a96f 100644
--- a/lib/pleroma/web/plugs/remote_ip.ex
+++ b/lib/pleroma/web/plugs/remote_ip.ex
@@ -43,6 +43,6 @@ defp maybe_add_cidr(proxy) when is_binary(proxy) do
InetCidr.v6?(InetCidr.parse_address!(proxy)) -> proxy <> "/128"
end
- InetCidr.parse(proxy, true)
+ InetCidr.parse_cidr!(proxy, true)
end
end
diff --git a/lib/pleroma/web/plugs/uploaded_media.ex b/lib/pleroma/web/plugs/uploaded_media.ex
index ad8143234..f1076da1b 100644
--- a/lib/pleroma/web/plugs/uploaded_media.ex
+++ b/lib/pleroma/web/plugs/uploaded_media.ex
@@ -35,9 +35,9 @@ def call(%{request_path: <<"/", @path, "/", file::binary>>} = conn, opts) do
conn =
case fetch_query_params(conn) do
%{query_params: %{"name" => name}} = conn ->
- name = String.replace(name, "\"", "\\\"")
+ name = String.replace(name, ~s["], ~s[\\"])
- put_resp_header(conn, "content-disposition", "filename=\"#{name}\"")
+ put_resp_header(conn, "content-disposition", ~s[inline; filename="#{name}"])
conn ->
conn
@@ -105,7 +105,7 @@ defp get_media(conn, {:url, url}, _, _) do
end
defp get_media(conn, unknown, _, _) do
- Logger.error("#{__MODULE__}: Unknown get startegy: #{inspect(unknown)}")
+ Logger.error("#{__MODULE__}: Unknown get strategy: #{inspect(unknown)}")
conn
|> send_resp(:internal_server_error, dgettext("errors", "Internal Error"))
diff --git a/lib/pleroma/web/preload.ex b/lib/pleroma/web/preload.ex
index 4485383f9..6a4a8885e 100644
--- a/lib/pleroma/web/preload.ex
+++ b/lib/pleroma/web/preload.ex
@@ -11,7 +11,7 @@ def build_tags(_conn, params) do
terms =
params
|> parser.generate_terms()
- |> Enum.map(fn {k, v} -> {k, Base.encode64(Jason.encode!(v))} end)
+ |> Enum.map(fn {k, v} -> {k, Base.encode64(Jason.encode!(v, escape: :html_safe))} end)
|> Enum.into(%{})
Map.merge(acc, terms)
@@ -19,7 +19,7 @@ def build_tags(_conn, params) do
rendered_html =
preload_data
- |> Jason.encode!()
+ |> Jason.encode!(escape: :html_safe)
|> build_script_tag()
|> HTML.safe_to_string()
diff --git a/lib/pleroma/web/push.ex b/lib/pleroma/web/push.ex
index 9665b0b4a..0d43f402e 100644
--- a/lib/pleroma/web/push.ex
+++ b/lib/pleroma/web/push.ex
@@ -9,7 +9,7 @@ defmodule Pleroma.Web.Push do
def init do
unless enabled() do
- Logger.warn("""
+ Logger.warning("""
VAPID key pair is not found. If you wish to enabled web push, please run
mix web_push.gen.keypair
diff --git a/lib/pleroma/web/push/impl.ex b/lib/pleroma/web/push/impl.ex
index daf3eeb9e..9e68d827b 100644
--- a/lib/pleroma/web/push/impl.ex
+++ b/lib/pleroma/web/push/impl.ex
@@ -16,7 +16,7 @@ defmodule Pleroma.Web.Push.Impl do
require Logger
import Ecto.Query
- @types ["Create", "Follow", "Announce", "Like", "Move", "EmojiReact"]
+ @types ["Create", "Follow", "Announce", "Like", "Move", "EmojiReact", "Update"]
@doc "Performs sending notifications for user subscriptions"
@spec perform(Notification.t()) :: list(any) | :error | {:error, :unknown_type}
@@ -57,7 +57,7 @@ def perform(
end
def perform(_) do
- Logger.warn("Unknown notification type")
+ Logger.warning("Unknown notification type")
{:error, :unknown_type}
end
@@ -174,6 +174,15 @@ def format_body(
end
end
+ def format_body(
+ %{activity: %{data: %{"type" => "Update"}}},
+ actor,
+ _object,
+ _mastodon_type
+ ) do
+ "@#{actor.nickname} edited a status"
+ end
+
def format_title(activity, mastodon_type \\ nil)
def format_title(%{activity: %{data: %{"directMessage" => true}}}, _mastodon_type) do
@@ -183,10 +192,12 @@ def format_title(%{activity: %{data: %{"directMessage" => true}}}, _mastodon_typ
def format_title(%{type: type}, mastodon_type) do
case mastodon_type || type do
"mention" -> "New Mention"
+ "status" -> "New Status"
"follow" -> "New Follower"
"follow_request" -> "New Follow Request"
"reblog" -> "New Repeat"
"favourite" -> "New Favorite"
+ "update" -> "New Update"
"pleroma:chat_mention" -> "New Chat Message"
"pleroma:emoji_reaction" -> "New Reaction"
type -> "New #{String.capitalize(type || "event")}"
diff --git a/lib/pleroma/web/rel_me.ex b/lib/pleroma/web/rel_me.ex
index 98fbc1c59..ceb6a05f0 100644
--- a/lib/pleroma/web/rel_me.ex
+++ b/lib/pleroma/web/rel_me.ex
@@ -9,17 +9,13 @@ defmodule Pleroma.Web.RelMe do
recv_timeout: 2_000
]
- if Pleroma.Config.get(:env) == :test do
- def parse(url) when is_binary(url), do: parse_url(url)
- else
- @cachex Pleroma.Config.get([:cachex, :provider], Cachex)
- def parse(url) when is_binary(url) do
- @cachex.fetch!(:rel_me_cache, url, fn _ ->
- {:commit, parse_url(url)}
- end)
- rescue
- e -> {:error, "Cachex error: #{inspect(e)}"}
- end
+ @cachex Pleroma.Config.get([:cachex, :provider], Cachex)
+ def parse(url) when is_binary(url) do
+ @cachex.fetch!(:rel_me_cache, url, fn _ ->
+ {:commit, parse_url(url)}
+ end)
+ rescue
+ e -> {:error, "Cachex error: #{inspect(e)}"}
end
def parse(_), do: {:error, "No URL provided"}
diff --git a/lib/pleroma/web/rich_media/backfill.ex b/lib/pleroma/web/rich_media/backfill.ex
new file mode 100644
index 000000000..4ec50e132
--- /dev/null
+++ b/lib/pleroma/web/rich_media/backfill.ex
@@ -0,0 +1,101 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.RichMedia.Backfill do
+ alias Pleroma.Web.RichMedia.Card
+ alias Pleroma.Web.RichMedia.Parser
+ alias Pleroma.Web.RichMedia.Parser.TTL
+ alias Pleroma.Workers.RichMediaExpirationWorker
+
+ require Logger
+
+ @backfiller Pleroma.Config.get([__MODULE__, :provider], Pleroma.Web.RichMedia.Backfill.Task)
+ @cachex Pleroma.Config.get([:cachex, :provider], Cachex)
+ @max_attempts 3
+ @retry 5_000
+
+ def start(%{url: url} = args) when is_binary(url) do
+ url_hash = Card.url_to_hash(url)
+
+ args =
+ args
+ |> Map.put(:attempt, 1)
+ |> Map.put(:url_hash, url_hash)
+
+ @backfiller.run(args)
+ end
+
+ def run(%{url: url, url_hash: url_hash, attempt: attempt} = args)
+ when attempt <= @max_attempts do
+ case Parser.parse(url) do
+ {:ok, fields} ->
+ {:ok, card} = Card.create(url, fields)
+
+ maybe_schedule_expiration(url, fields)
+
+ if Map.has_key?(args, :activity_id) do
+ stream_update(args)
+ end
+
+ warm_cache(url_hash, card)
+
+ {:error, {:invalid_metadata, fields}} ->
+ Logger.debug("Rich media incomplete or invalid metadata for #{url}: #{inspect(fields)}")
+ negative_cache(url_hash)
+
+ {:error, :body_too_large} ->
+ Logger.error("Rich media error for #{url}: :body_too_large")
+ negative_cache(url_hash)
+
+ {:error, {:content_type, type}} ->
+ Logger.debug("Rich media error for #{url}: :content_type is #{type}")
+ negative_cache(url_hash)
+
+ e ->
+ Logger.debug("Rich media error for #{url}: #{inspect(e)}")
+
+ :timer.sleep(@retry * attempt)
+
+ run(%{args | attempt: attempt + 1})
+ end
+ end
+
+ def run(%{url: url, url_hash: url_hash}) do
+ Logger.debug("Rich media failure for #{url}")
+
+ negative_cache(url_hash, :timer.minutes(15))
+ end
+
+ defp maybe_schedule_expiration(url, fields) do
+ case TTL.process(fields, url) do
+ {:ok, ttl} when is_number(ttl) ->
+ timestamp = DateTime.from_unix!(ttl)
+
+ RichMediaExpirationWorker.new(%{"url" => url}, scheduled_at: timestamp)
+ |> Oban.insert()
+
+ _ ->
+ :ok
+ end
+ end
+
+ defp stream_update(%{activity_id: activity_id}) do
+ Pleroma.Activity.get_by_id(activity_id)
+ |> Pleroma.Activity.normalize()
+ |> Pleroma.Web.ActivityPub.ActivityPub.stream_out()
+ end
+
+ defp warm_cache(key, val), do: @cachex.put(:rich_media_cache, key, val)
+ defp negative_cache(key, ttl \\ nil), do: @cachex.put(:rich_media_cache, key, nil, ttl: ttl)
+end
+
+defmodule Pleroma.Web.RichMedia.Backfill.Task do
+ alias Pleroma.Web.RichMedia.Backfill
+
+ def run(args) do
+ Task.Supervisor.start_child(Pleroma.TaskSupervisor, Backfill, :run, [args],
+ name: {:global, {:rich_media, args.url_hash}}
+ )
+ end
+end
diff --git a/lib/pleroma/web/rich_media/card.ex b/lib/pleroma/web/rich_media/card.ex
new file mode 100644
index 000000000..36a1ae44a
--- /dev/null
+++ b/lib/pleroma/web/rich_media/card.ex
@@ -0,0 +1,157 @@
+defmodule Pleroma.Web.RichMedia.Card do
+ use Ecto.Schema
+ import Ecto.Changeset
+ import Ecto.Query
+
+ alias Pleroma.Activity
+ alias Pleroma.HTML
+ alias Pleroma.Object
+ alias Pleroma.Repo
+ alias Pleroma.Web.RichMedia.Backfill
+ alias Pleroma.Web.RichMedia.Parser
+
+ @cachex Pleroma.Config.get([:cachex, :provider], Cachex)
+ @config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
+
+ @type t :: %__MODULE__{}
+
+ schema "rich_media_card" do
+ field(:url_hash, :binary)
+ field(:fields, :map)
+
+ timestamps()
+ end
+
+ @doc false
+ def changeset(card, attrs) do
+ card
+ |> cast(attrs, [:url_hash, :fields])
+ |> validate_required([:url_hash, :fields])
+ |> unique_constraint(:url_hash)
+ end
+
+ @spec create(String.t(), map()) :: {:ok, t()}
+ def create(url, fields) do
+ url_hash = url_to_hash(url)
+
+ fields = Map.put_new(fields, "url", url)
+
+ %__MODULE__{}
+ |> changeset(%{url_hash: url_hash, fields: fields})
+ |> Repo.insert(on_conflict: {:replace, [:fields]}, conflict_target: :url_hash)
+ end
+
+ @spec delete(String.t()) :: {:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()} | :ok
+ def delete(url) do
+ url_hash = url_to_hash(url)
+ @cachex.del(:rich_media_cache, url_hash)
+
+ case get_by_url(url) do
+ %__MODULE__{} = card -> Repo.delete(card)
+ nil -> :ok
+ end
+ end
+
+ @spec get_by_url(String.t() | nil) :: t() | nil | :error
+ def get_by_url(url) when is_binary(url) do
+ if @config_impl.get([:rich_media, :enabled]) do
+ url_hash = url_to_hash(url)
+
+ @cachex.fetch!(:rich_media_cache, url_hash, fn _ ->
+ result =
+ __MODULE__
+ |> where(url_hash: ^url_hash)
+ |> Repo.one()
+
+ case result do
+ %__MODULE__{} = card -> {:commit, card}
+ _ -> {:ignore, nil}
+ end
+ end)
+ else
+ :error
+ end
+ end
+
+ def get_by_url(nil), do: nil
+
+ @spec get_or_backfill_by_url(String.t(), map()) :: t() | nil
+ def get_or_backfill_by_url(url, backfill_opts \\ %{}) do
+ case get_by_url(url) do
+ %__MODULE__{} = card ->
+ card
+
+ nil ->
+ backfill_opts = Map.put(backfill_opts, :url, url)
+
+ Backfill.start(backfill_opts)
+
+ nil
+
+ :error ->
+ nil
+ end
+ end
+
+ @spec get_by_object(Object.t()) :: t() | nil | :error
+ def get_by_object(object) do
+ case HTML.extract_first_external_url_from_object(object) do
+ nil -> nil
+ url -> get_or_backfill_by_url(url)
+ end
+ end
+
+ @spec get_by_activity(Activity.t()) :: t() | nil | :error
+ # Fake/Draft activity
+ def get_by_activity(%Activity{id: "pleroma:fakeid"} = activity) do
+ with %Object{} = object <- Object.normalize(activity, fetch: false),
+ url when not is_nil(url) <- HTML.extract_first_external_url_from_object(object) do
+ case get_by_url(url) do
+ # Cache hit
+ %__MODULE__{} = card ->
+ card
+
+ # Cache miss, but fetch for rendering the Draft
+ _ ->
+ with {:ok, fields} <- Parser.parse(url),
+ {:ok, card} <- create(url, fields) do
+ card
+ else
+ _ -> nil
+ end
+ end
+ else
+ _ ->
+ nil
+ end
+ end
+
+ def get_by_activity(activity) do
+ with %Object{} = object <- Object.normalize(activity, fetch: false),
+ {_, nil} <- {:cached, get_cached_url(object, activity.id)} do
+ nil
+ else
+ {:cached, url} ->
+ get_or_backfill_by_url(url, %{activity_id: activity.id})
+
+ _ ->
+ :error
+ end
+ end
+
+ @spec url_to_hash(String.t()) :: String.t()
+ def url_to_hash(url) do
+ :crypto.hash(:sha256, url) |> Base.encode16(case: :lower)
+ end
+
+ defp get_cached_url(object, activity_id) do
+ key = "URL|#{activity_id}"
+
+ @cachex.fetch!(:scrubber_cache, key, fn _ ->
+ url = HTML.extract_first_external_url_from_object(object)
+ Activity.HTML.add_cache_key_for(activity_id, key)
+
+ {:commit, url}
+ end)
+ end
+end
diff --git a/lib/pleroma/web/rich_media/helpers.ex b/lib/pleroma/web/rich_media/helpers.ex
index 0488df30e..119994458 100644
--- a/lib/pleroma/web/rich_media/helpers.ex
+++ b/lib/pleroma/web/rich_media/helpers.ex
@@ -3,86 +3,13 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.RichMedia.Helpers do
- alias Pleroma.Activity
alias Pleroma.Config
- alias Pleroma.HTML
- alias Pleroma.Object
- alias Pleroma.Web.RichMedia.Parser
-
- @options [
- pool: :media,
- max_body: 2_000_000,
- recv_timeout: 2_000
- ]
-
- @spec validate_page_url(URI.t() | binary()) :: :ok | :error
- defp validate_page_url(page_url) when is_binary(page_url) do
- validate_tld = Config.get([Pleroma.Formatter, :validate_tld])
-
- page_url
- |> Linkify.Parser.url?(validate_tld: validate_tld)
- |> parse_uri(page_url)
- end
-
- defp validate_page_url(%URI{host: host, scheme: "https", authority: authority})
- when is_binary(authority) do
- cond do
- host in Config.get([:rich_media, :ignore_hosts], []) ->
- :error
-
- get_tld(host) in Config.get([:rich_media, :ignore_tld], []) ->
- :error
-
- true ->
- :ok
- end
- end
-
- defp validate_page_url(_), do: :error
-
- defp parse_uri(true, url) do
- url
- |> URI.parse()
- |> validate_page_url
- end
-
- defp parse_uri(_, _), do: :error
-
- defp get_tld(host) do
- host
- |> String.split(".")
- |> Enum.reverse()
- |> hd
- end
-
- def fetch_data_for_object(object) do
- with true <- Config.get([:rich_media, :enabled]),
- {:ok, page_url} <-
- HTML.extract_first_external_url_from_object(object),
- :ok <- validate_page_url(page_url),
- {:ok, rich_media} <- Parser.parse(page_url) do
- %{page_url: page_url, rich_media: rich_media}
- else
- _ -> %{}
- end
- end
-
- def fetch_data_for_activity(%Activity{data: %{"type" => "Create"}} = activity) do
- with true <- Config.get([:rich_media, :enabled]),
- %Object{} = object <- Object.normalize(activity, fetch: false) do
- fetch_data_for_object(object)
- else
- _ -> %{}
- end
- end
-
- def fetch_data_for_activity(_), do: %{}
def rich_media_get(url) do
headers = [{"user-agent", Pleroma.Application.user_agent() <> "; Bot"}]
head_check =
- case Pleroma.HTTP.head(url, headers, @options) do
+ case Pleroma.HTTP.head(url, headers, http_options()) do
# If the HEAD request didn't reach the server for whatever reason,
# we assume the GET that comes right after won't either
{:error, _} = e ->
@@ -97,7 +24,7 @@ def rich_media_get(url) do
:ok
end
- with :ok <- head_check, do: Pleroma.HTTP.get(url, headers, @options)
+ with :ok <- head_check, do: Pleroma.HTTP.get(url, headers, http_options())
end
defp check_content_type(headers) do
@@ -113,12 +40,13 @@ defp check_content_type(headers) do
end
end
- @max_body @options[:max_body]
defp check_content_length(headers) do
+ max_body = Keyword.get(http_options(), :max_body)
+
case List.keyfind(headers, "content-length", 0) do
{_, maybe_content_length} ->
case Integer.parse(maybe_content_length) do
- {content_length, ""} when content_length <= @max_body -> :ok
+ {content_length, ""} when content_length <= max_body -> :ok
{_, ""} -> {:error, :body_too_large}
_ -> :ok
end
@@ -127,4 +55,11 @@ defp check_content_length(headers) do
:ok
end
end
+
+ defp http_options do
+ [
+ pool: :media,
+ max_body: Config.get([:rich_media, :max_body], 5_000_000)
+ ]
+ end
end
diff --git a/lib/pleroma/web/rich_media/parser.ex b/lib/pleroma/web/rich_media/parser.ex
index dbe81eabb..37cf29029 100644
--- a/lib/pleroma/web/rich_media/parser.ex
+++ b/lib/pleroma/web/rich_media/parser.ex
@@ -5,137 +5,28 @@
defmodule Pleroma.Web.RichMedia.Parser do
require Logger
- @cachex Pleroma.Config.get([:cachex, :provider], Cachex)
+ @config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
defp parsers do
Pleroma.Config.get([:rich_media, :parsers])
end
- def parse(nil), do: {:error, "No URL provided"}
+ def parse(nil), do: nil
- if Pleroma.Config.get(:env) == :test do
- @spec parse(String.t()) :: {:ok, map()} | {:error, any()}
- def parse(url), do: parse_url(url)
- else
- @spec parse(String.t()) :: {:ok, map()} | {:error, any()}
- def parse(url) do
- with {:ok, data} <- get_cached_or_parse(url),
- {:ok, _} <- set_ttl_based_on_image(data, url) do
- {:ok, data}
- end
- end
-
- defp get_cached_or_parse(url) do
- case @cachex.fetch(:rich_media_cache, url, fn ->
- case parse_url(url) do
- {:ok, _} = res ->
- {:commit, res}
-
- {:error, reason} = e ->
- # Unfortunately we have to log errors here, instead of doing that
- # along with ttl setting at the bottom. Otherwise we can get log spam
- # if more than one process was waiting for the rich media card
- # while it was generated. Ideally we would set ttl here as well,
- # so we don't override it number_of_waiters_on_generation
- # times, but one, obviously, can't set ttl for not-yet-created entry
- # and Cachex doesn't support returning ttl from the fetch callback.
- log_error(url, reason)
- {:commit, e}
- end
- end) do
- {action, res} when action in [:commit, :ok] ->
- case res do
- {:ok, _data} = res ->
- res
-
- {:error, reason} = e ->
- if action == :commit, do: set_error_ttl(url, reason)
- e
- end
-
- {:error, e} ->
- {:error, {:cachex_error, e}}
- end
- end
-
- defp set_error_ttl(_url, :body_too_large), do: :ok
- defp set_error_ttl(_url, {:content_type, _}), do: :ok
-
- # The TTL is not set for the errors above, since they are unlikely to change
- # with time
-
- defp set_error_ttl(url, _reason) do
- ttl = Pleroma.Config.get([:rich_media, :failure_backoff], 60_000)
- @cachex.expire(:rich_media_cache, url, ttl)
- :ok
- end
-
- defp log_error(url, {:invalid_metadata, data}) do
- Logger.debug(fn -> "Incomplete or invalid metadata for #{url}: #{inspect(data)}" end)
- end
-
- defp log_error(url, reason) do
- Logger.warn(fn -> "Rich media error for #{url}: #{inspect(reason)}" end)
+ @spec parse(String.t()) :: {:ok, map()} | {:error, any()}
+ def parse(url) do
+ with :ok <- validate_page_url(url),
+ {:ok, data} <- parse_url(url) do
+ data = Map.put(data, "url", url)
+ {:ok, data}
end
end
- @doc """
- Set the rich media cache based on the expiration time of image.
-
- Adopt behaviour `Pleroma.Web.RichMedia.Parser.TTL`
-
- ## Example
-
- defmodule MyModule do
- @behaviour Pleroma.Web.RichMedia.Parser.TTL
- def ttl(data, url) do
- image_url = Map.get(data, :image)
- # do some parsing in the url and get the ttl of the image
- # and return ttl is unix time
- parse_ttl_from_url(image_url)
- end
- end
-
- Define the module in the config
-
- config :pleroma, :rich_media,
- ttl_setters: [MyModule]
- """
- @spec set_ttl_based_on_image(map(), String.t()) ::
- {:ok, Integer.t() | :noop} | {:error, :no_key}
- def set_ttl_based_on_image(data, url) do
- case get_ttl_from_image(data, url) do
- {:ok, ttl} when is_number(ttl) ->
- ttl = ttl * 1000
-
- case @cachex.expire_at(:rich_media_cache, url, ttl) do
- {:ok, true} -> {:ok, ttl}
- {:ok, false} -> {:error, :no_key}
- end
-
- _ ->
- {:ok, :noop}
- end
- end
-
- defp get_ttl_from_image(data, url) do
- [:rich_media, :ttl_setters]
- |> Pleroma.Config.get()
- |> Enum.reduce({:ok, nil}, fn
- module, {:ok, _ttl} ->
- module.ttl(data, url)
-
- _, error ->
- error
- end)
- end
-
- def parse_url(url) do
+ defp parse_url(url) do
with {:ok, %Tesla.Env{body: html}} <- Pleroma.Web.RichMedia.Helpers.rich_media_get(url),
{:ok, html} <- Floki.parse_document(html) do
html
|> maybe_parse()
- |> Map.put("url", url)
|> clean_parsed_data()
|> check_parsed_data()
end
@@ -166,4 +57,46 @@ defp clean_parsed_data(data) do
end)
|> Map.new()
end
+
+ @spec validate_page_url(URI.t() | binary()) :: :ok | :error
+ defp validate_page_url(page_url) when is_binary(page_url) do
+ validate_tld = @config_impl.get([Pleroma.Formatter, :validate_tld])
+
+ page_url
+ |> Linkify.Parser.url?(validate_tld: validate_tld)
+ |> parse_uri(page_url)
+ end
+
+ defp validate_page_url(%URI{host: host, scheme: "https"}) do
+ cond do
+ Linkify.Parser.ip?(host) ->
+ :error
+
+ host in @config_impl.get([:rich_media, :ignore_hosts], []) ->
+ :error
+
+ get_tld(host) in @config_impl.get([:rich_media, :ignore_tld], []) ->
+ :error
+
+ true ->
+ :ok
+ end
+ end
+
+ defp validate_page_url(_), do: :error
+
+ defp parse_uri(true, url) do
+ url
+ |> URI.parse()
+ |> validate_page_url
+ end
+
+ defp parse_uri(_, _), do: :error
+
+ defp get_tld(host) do
+ host
+ |> String.split(".")
+ |> Enum.reverse()
+ |> hd
+ end
end
diff --git a/lib/pleroma/web/rich_media/parser/ttl.ex b/lib/pleroma/web/rich_media/parser/ttl.ex
index 59d7f87ab..7e56375ff 100644
--- a/lib/pleroma/web/rich_media/parser/ttl.ex
+++ b/lib/pleroma/web/rich_media/parser/ttl.ex
@@ -3,5 +3,18 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.RichMedia.Parser.TTL do
- @callback ttl(Map.t(), String.t()) :: Integer.t() | nil
+ @callback ttl(map(), String.t()) :: integer() | nil
+
+ @spec process(map(), String.t()) :: {:ok, integer() | nil}
+ def process(data, url) do
+ [:rich_media, :ttl_setters]
+ |> Pleroma.Config.get()
+ |> Enum.reduce_while({:ok, nil}, fn
+ module, acc ->
+ case module.ttl(data, url) do
+ ttl when is_number(ttl) -> {:halt, {:ok, ttl}}
+ _ -> {:cont, acc}
+ end
+ end)
+ end
end
diff --git a/lib/pleroma/web/rich_media/parser/ttl/aws_signed_url.ex b/lib/pleroma/web/rich_media/parser/ttl/aws_signed_url.ex
index fa41c160d..1172a120a 100644
--- a/lib/pleroma/web/rich_media/parser/ttl/aws_signed_url.ex
+++ b/lib/pleroma/web/rich_media/parser/ttl/aws_signed_url.ex
@@ -7,25 +7,26 @@ defmodule Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl do
@impl true
def ttl(data, _url) do
- image = Map.get(data, :image)
+ image = Map.get(data, "image")
- if is_aws_signed_url(image) do
+ if aws_signed_url?(image) do
image
|> parse_query_params()
|> format_query_params()
|> get_expiration_timestamp()
else
- {:error, "Not aws signed url #{inspect(image)}"}
+ nil
end
end
- defp is_aws_signed_url(image) when is_binary(image) and image != "" do
+ defp aws_signed_url?(image) when is_binary(image) and image != "" do
%URI{host: host, query: query} = URI.parse(image)
- String.contains?(host, "amazonaws.com") and String.contains?(query, "X-Amz-Expires")
+ is_binary(host) and String.contains?(host, "amazonaws.com") and
+ is_binary(query) and String.contains?(query, "X-Amz-Expires")
end
- defp is_aws_signed_url(_), do: nil
+ defp aws_signed_url?(_), do: nil
defp parse_query_params(image) do
%URI{query: query} = URI.parse(image)
@@ -45,6 +46,6 @@ defp get_expiration_timestamp(params) when is_map(params) do
|> Map.get("X-Amz-Date")
|> Timex.parse("{ISO:Basic:Z}")
- {:ok, Timex.to_unix(date) + String.to_integer(Map.get(params, "X-Amz-Expires"))}
+ Timex.to_unix(date) + String.to_integer(Map.get(params, "X-Amz-Expires"))
end
end
diff --git a/lib/pleroma/web/rich_media/parser/ttl/opengraph.ex b/lib/pleroma/web/rich_media/parser/ttl/opengraph.ex
new file mode 100644
index 000000000..b06889669
--- /dev/null
+++ b/lib/pleroma/web/rich_media/parser/ttl/opengraph.ex
@@ -0,0 +1,20 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2022 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.RichMedia.Parser.TTL.Opengraph do
+ @behaviour Pleroma.Web.RichMedia.Parser.TTL
+
+ @impl true
+ def ttl(%{"ttl" => ttl_string}, _url) when is_binary(ttl_string) do
+ try do
+ ttl = String.to_integer(ttl_string)
+ now = DateTime.utc_now() |> DateTime.to_unix()
+ now + ttl
+ rescue
+ _ -> nil
+ end
+ end
+
+ def ttl(_, _), do: nil
+end
diff --git a/lib/pleroma/web/rich_media/parsers/o_embed.ex b/lib/pleroma/web/rich_media/parsers/o_embed.ex
index 75318d9c7..0f303176c 100644
--- a/lib/pleroma/web/rich_media/parsers/o_embed.ex
+++ b/lib/pleroma/web/rich_media/parsers/o_embed.ex
@@ -6,8 +6,8 @@ defmodule Pleroma.Web.RichMedia.Parsers.OEmbed do
def parse(html, _data) do
with elements = [_ | _] <- get_discovery_data(html),
oembed_url when is_binary(oembed_url) <- get_oembed_url(elements),
- {:ok, oembed_data} <- get_oembed_data(oembed_url) do
- oembed_data
+ {:ok, oembed_data = %{"html" => html}} <- get_oembed_data(oembed_url) do
+ %{oembed_data | "html" => Pleroma.HTML.filter_tags(html)}
else
_e -> %{}
end
diff --git a/lib/pleroma/web/router.ex b/lib/pleroma/web/router.ex
index 7bbc20275..368a04df0 100644
--- a/lib/pleroma/web/router.ex
+++ b/lib/pleroma/web/router.ex
@@ -101,14 +101,80 @@ defmodule Pleroma.Web.Router do
plug(Pleroma.Web.Plugs.IdempotencyPlug)
end
- pipeline :require_privileged_staff do
- plug(Pleroma.Web.Plugs.EnsureStaffPrivilegedPlug)
- end
-
pipeline :require_admin do
plug(Pleroma.Web.Plugs.UserIsAdminPlug)
end
+ pipeline :require_privileged_role_users_delete do
+ plug(:admin_api)
+ plug(Pleroma.Web.Plugs.EnsurePrivilegedPlug, :users_delete)
+ end
+
+ pipeline :require_privileged_role_users_manage_credentials do
+ plug(:admin_api)
+ plug(Pleroma.Web.Plugs.EnsurePrivilegedPlug, :users_manage_credentials)
+ end
+
+ pipeline :require_privileged_role_messages_read do
+ plug(:admin_api)
+ plug(Pleroma.Web.Plugs.EnsurePrivilegedPlug, :messages_read)
+ end
+
+ pipeline :require_privileged_role_users_manage_tags do
+ plug(:admin_api)
+ plug(Pleroma.Web.Plugs.EnsurePrivilegedPlug, :users_manage_tags)
+ end
+
+ pipeline :require_privileged_role_users_manage_activation_state do
+ plug(:admin_api)
+ plug(Pleroma.Web.Plugs.EnsurePrivilegedPlug, :users_manage_activation_state)
+ end
+
+ pipeline :require_privileged_role_users_manage_invites do
+ plug(:admin_api)
+ plug(Pleroma.Web.Plugs.EnsurePrivilegedPlug, :users_manage_invites)
+ end
+
+ pipeline :require_privileged_role_reports_manage_reports do
+ plug(:admin_api)
+ plug(Pleroma.Web.Plugs.EnsurePrivilegedPlug, :reports_manage_reports)
+ end
+
+ pipeline :require_privileged_role_users_read do
+ plug(:admin_api)
+ plug(Pleroma.Web.Plugs.EnsurePrivilegedPlug, :users_read)
+ end
+
+ pipeline :require_privileged_role_messages_delete do
+ plug(:admin_api)
+ plug(Pleroma.Web.Plugs.EnsurePrivilegedPlug, :messages_delete)
+ end
+
+ pipeline :require_privileged_role_emoji_manage_emoji do
+ plug(:admin_api)
+ plug(Pleroma.Web.Plugs.EnsurePrivilegedPlug, :emoji_manage_emoji)
+ end
+
+ pipeline :require_privileged_role_instances_delete do
+ plug(:admin_api)
+ plug(Pleroma.Web.Plugs.EnsurePrivilegedPlug, :instances_delete)
+ end
+
+ pipeline :require_privileged_role_moderation_log_read do
+ plug(:admin_api)
+ plug(Pleroma.Web.Plugs.EnsurePrivilegedPlug, :moderation_log_read)
+ end
+
+ pipeline :require_privileged_role_statistics_read do
+ plug(:admin_api)
+ plug(Pleroma.Web.Plugs.EnsurePrivilegedPlug, :statistics_read)
+ end
+
+ pipeline :require_privileged_role_announcements_manage_announcements do
+ plug(:admin_api)
+ plug(Pleroma.Web.Plugs.EnsurePrivilegedPlug, :announcements_manage_announcements)
+ end
+
pipeline :pleroma_html do
plug(:browser)
plug(:authenticate)
@@ -116,7 +182,7 @@ defmodule Pleroma.Web.Router do
end
pipeline :well_known do
- plug(:accepts, ["json", "jrd+json", "xml", "xrd+xml"])
+ plug(:accepts, ["json", "jrd", "jrd+json", "xml", "xrd+xml"])
end
pipeline :config do
@@ -158,6 +224,12 @@ defmodule Pleroma.Web.Router do
post("/remote_interaction", UtilController, :remote_interaction)
end
+ scope "/api/v1/pleroma", Pleroma.Web.PleromaAPI do
+ pipe_through(:pleroma_api)
+
+ get("/federation_status", InstancesController, :show)
+ end
+
scope "/api/v1/pleroma", Pleroma.Web do
pipe_through(:pleroma_api)
post("/uploader_callback/:upload_path", UploaderController, :callback)
@@ -167,8 +239,6 @@ defmodule Pleroma.Web.Router do
scope "/api/v1/pleroma/admin", Pleroma.Web.AdminAPI do
pipe_through([:admin_api, :require_admin])
- put("/users/disable_mfa", AdminAPIController, :disable_mfa)
-
get("/users/:nickname/permission_group", AdminAPIController, :right_get)
get("/users/:nickname/permission_group/:permission_group", AdminAPIController, :right_get)
@@ -199,17 +269,10 @@ defmodule Pleroma.Web.Router do
post("/relay", RelayController, :follow)
delete("/relay", RelayController, :unfollow)
- patch("/users/force_password_reset", AdminAPIController, :force_password_reset)
- get("/users/:nickname/credentials", AdminAPIController, :show_user_credentials)
- patch("/users/:nickname/credentials", AdminAPIController, :update_user_credentials)
-
get("/instance_document/:name", InstanceDocumentController, :show)
patch("/instance_document/:name", InstanceDocumentController, :update)
delete("/instance_document/:name", InstanceDocumentController, :delete)
- patch("/users/confirm_email", AdminAPIController, :confirm_email)
- patch("/users/resend_confirmation_email", AdminAPIController, :resend_confirmation_email)
-
get("/config", ConfigController, :show)
post("/config", ConfigController, :update)
get("/config/descriptions", ConfigController, :descriptions)
@@ -230,6 +293,16 @@ defmodule Pleroma.Web.Router do
post("/backups", AdminAPIController, :create_backup)
+ get("/rules", RuleController, :index)
+ post("/rules", RuleController, :create)
+ patch("/rules/:id", RuleController, :update)
+ delete("/rules/:id", RuleController, :delete)
+ end
+
+ # AdminAPI: admins and mods (staff) can perform these actions (if privileged by role)
+ scope "/api/v1/pleroma/admin", Pleroma.Web.AdminAPI do
+ pipe_through(:require_privileged_role_announcements_manage_announcements)
+
get("/announcements", AnnouncementController, :index)
post("/announcements", AnnouncementController, :create)
get("/announcements/:id", AnnouncementController, :show)
@@ -237,14 +310,29 @@ defmodule Pleroma.Web.Router do
delete("/announcements/:id", AnnouncementController, :delete)
end
- # AdminAPI: admins and mods (staff) can perform these actions (if enabled by config)
+ # AdminAPI: admins and mods (staff) can perform these actions (if privileged by role)
scope "/api/v1/pleroma/admin", Pleroma.Web.AdminAPI do
- pipe_through([:admin_api, :require_privileged_staff])
+ pipe_through(:require_privileged_role_users_delete)
delete("/users", UserController, :delete)
+ end
+
+ # AdminAPI: admins and mods (staff) can perform these actions (if privileged by role)
+ scope "/api/v1/pleroma/admin", Pleroma.Web.AdminAPI do
+ pipe_through(:require_privileged_role_users_manage_credentials)
get("/users/:nickname/password_reset", AdminAPIController, :get_password_reset)
+ get("/users/:nickname/credentials", AdminAPIController, :show_user_credentials)
patch("/users/:nickname/credentials", AdminAPIController, :update_user_credentials)
+ put("/users/disable_mfa", AdminAPIController, :disable_mfa)
+ patch("/users/force_password_reset", AdminAPIController, :force_password_reset)
+ patch("/users/confirm_email", AdminAPIController, :confirm_email)
+ patch("/users/resend_confirmation_email", AdminAPIController, :resend_confirmation_email)
+ end
+
+ # AdminAPI: admins and mods (staff) can perform these actions (if privileged by role)
+ scope "/api/v1/pleroma/admin", Pleroma.Web.AdminAPI do
+ pipe_through(:require_privileged_role_messages_read)
get("/users/:nickname/statuses", AdminAPIController, :list_user_statuses)
get("/users/:nickname/chats", AdminAPIController, :list_user_chats)
@@ -253,52 +341,100 @@ defmodule Pleroma.Web.Router do
get("/chats/:id", ChatController, :show)
get("/chats/:id/messages", ChatController, :messages)
+
+ get("/instances/:instance/statuses", InstanceController, :list_statuses)
+
+ get("/statuses/:id", StatusController, :show)
end
- # AdminAPI: admins and mods (staff) can perform these actions
+ # AdminAPI: admins and mods (staff) can perform these actions (if privileged by role)
scope "/api/v1/pleroma/admin", Pleroma.Web.AdminAPI do
- pipe_through(:admin_api)
+ pipe_through(:require_privileged_role_users_manage_tags)
put("/users/tag", AdminAPIController, :tag_users)
delete("/users/tag", AdminAPIController, :untag_users)
+ end
+
+ # AdminAPI: admins and mods (staff) can perform these actions (if privileged by role)
+ scope "/api/v1/pleroma/admin", Pleroma.Web.AdminAPI do
+ pipe_through(:require_privileged_role_users_manage_activation_state)
patch("/users/:nickname/toggle_activation", UserController, :toggle_activation)
patch("/users/activate", UserController, :activate)
patch("/users/deactivate", UserController, :deactivate)
- patch("/users/approve", UserController, :approve)
+ end
+ # AdminAPI: admins and mods (staff) can perform these actions (if privileged by role)
+ scope "/api/v1/pleroma/admin", Pleroma.Web.AdminAPI do
+ pipe_through(:require_privileged_role_users_manage_invites)
+
+ patch("/users/approve", UserController, :approve)
post("/users/invite_token", InviteController, :create)
get("/users/invites", InviteController, :index)
post("/users/revoke_invite", InviteController, :revoke)
post("/users/email_invite", InviteController, :email)
+ end
- get("/users", UserController, :index)
- get("/users/:nickname", UserController, :show)
-
- get("/instances/:instance/statuses", InstanceController, :list_statuses)
- delete("/instances/:instance", InstanceController, :delete)
+ # AdminAPI: admins and mods (staff) can perform these actions (if privileged by role)
+ scope "/api/v1/pleroma/admin", Pleroma.Web.AdminAPI do
+ pipe_through(:require_privileged_role_reports_manage_reports)
get("/reports", ReportController, :index)
get("/reports/:id", ReportController, :show)
patch("/reports", ReportController, :update)
post("/reports/:id/notes", ReportController, :notes_create)
delete("/reports/:report_id/notes/:id", ReportController, :notes_delete)
+ end
+
+ # AdminAPI: admins and mods (staff) can perform these actions (if privileged by role)
+ scope "/api/v1/pleroma/admin", Pleroma.Web.AdminAPI do
+ pipe_through(:require_privileged_role_users_read)
+
+ get("/users", UserController, :index)
+ get("/users/:nickname", UserController, :show)
+ end
+
+ # AdminAPI: admins and mods (staff) can perform these actions (if privileged by role)
+ scope "/api/v1/pleroma/admin", Pleroma.Web.AdminAPI do
+ pipe_through(:require_privileged_role_messages_delete)
- get("/statuses/:id", StatusController, :show)
put("/statuses/:id", StatusController, :update)
delete("/statuses/:id", StatusController, :delete)
- get("/moderation_log", AdminAPIController, :list_log)
-
- post("/reload_emoji", AdminAPIController, :reload_emoji)
- get("/stats", AdminAPIController, :stats)
-
delete("/chats/:id/messages/:message_id", ChatController, :delete_message)
end
+ # AdminAPI: admins and mods (staff) can perform these actions (if privileged by role)
+ scope "/api/v1/pleroma/admin", Pleroma.Web.AdminAPI do
+ pipe_through(:require_privileged_role_emoji_manage_emoji)
+
+ post("/reload_emoji", AdminAPIController, :reload_emoji)
+ end
+
+ # AdminAPI: admins and mods (staff) can perform these actions (if privileged by role)
+ scope "/api/v1/pleroma/admin", Pleroma.Web.AdminAPI do
+ pipe_through(:require_privileged_role_instances_delete)
+
+ delete("/instances/:instance", InstanceController, :delete)
+ end
+
+ # AdminAPI: admins and mods (staff) can perform these actions (if privileged by role)
+ scope "/api/v1/pleroma/admin", Pleroma.Web.AdminAPI do
+ pipe_through(:require_privileged_role_moderation_log_read)
+
+ get("/moderation_log", AdminAPIController, :list_log)
+ end
+
+ # AdminAPI: admins and mods (staff) can perform these actions (if privileged by role)
+ scope "/api/v1/pleroma/admin", Pleroma.Web.AdminAPI do
+ pipe_through(:require_privileged_role_statistics_read)
+
+ get("/stats", AdminAPIController, :stats)
+ end
+
scope "/api/v1/pleroma/emoji", Pleroma.Web.PleromaAPI do
scope "/pack" do
- pipe_through(:admin_api)
+ pipe_through(:require_privileged_role_emoji_manage_emoji)
post("/", EmojiPackController, :create)
patch("/", EmojiPackController, :update)
@@ -313,7 +449,7 @@ defmodule Pleroma.Web.Router do
# Modifying packs
scope "/packs" do
- pipe_through(:admin_api)
+ pipe_through(:require_privileged_role_emoji_manage_emoji)
get("/import", EmojiPackController, :import_from_filesystem)
get("/remote", EmojiPackController, :remote)
@@ -337,8 +473,11 @@ defmodule Pleroma.Web.Router do
pipe_through(:pleroma_html)
post("/main/ostatus", UtilController, :remote_subscribe)
+ get("/main/ostatus", UtilController, :show_subscribe_form)
get("/ostatus_subscribe", RemoteFollowController, :follow)
post("/ostatus_subscribe", RemoteFollowController, :do_follow)
+
+ get("/authorize_interaction", RemoteFollowController, :authorize_interaction)
end
scope "/api/pleroma", Pleroma.Web.TwitterAPI do
@@ -347,8 +486,13 @@ defmodule Pleroma.Web.Router do
post("/change_email", UtilController, :change_email)
post("/change_password", UtilController, :change_password)
post("/delete_account", UtilController, :delete_account)
- put("/notification_settings", UtilController, :update_notificaton_settings)
+ put("/notification_settings", UtilController, :update_notification_settings)
post("/disable_account", UtilController, :disable_account)
+ post("/move_account", UtilController, :move_account)
+
+ put("/aliases", UtilController, :add_alias)
+ get("/aliases", UtilController, :list_aliases)
+ delete("/aliases", UtilController, :delete_alias)
end
scope "/api/pleroma", Pleroma.Web.PleromaAPI do
@@ -441,12 +585,19 @@ defmodule Pleroma.Web.Router do
get("/backups", BackupController, :index)
post("/backups", BackupController, :create)
+
+ get("/bookmark_folders", BookmarkFolderController, :index)
+ post("/bookmark_folders", BookmarkFolderController, :create)
+ patch("/bookmark_folders/:id", BookmarkFolderController, :update)
+ delete("/bookmark_folders/:id", BookmarkFolderController, :delete)
end
scope [] do
pipe_through(:api)
get("/accounts/:id/favourites", AccountController, :favourites)
get("/accounts/:id/endorsements", AccountController, :endorsements)
+
+ get("/statuses/:id/quotes", StatusController, :quotes)
end
scope [] do
@@ -458,13 +609,19 @@ defmodule Pleroma.Web.Router do
get("/birthdays", AccountController, :birthdays)
end
+ scope [] do
+ pipe_through(:authenticated_api)
+
+ get("/settings/:app", SettingsController, :show)
+ patch("/settings/:app", SettingsController, :update)
+ end
+
post("/accounts/confirmation_resend", AccountController, :confirmation_resend)
end
scope "/api/v1/pleroma", Pleroma.Web.PleromaAPI do
pipe_through(:api)
get("/accounts/:id/scrobbles", ScrobbleController, :index)
- get("/federation_status", InstancesController, :show)
end
scope "/api/v2/pleroma", Pleroma.Web.PleromaAPI do
@@ -481,6 +638,7 @@ defmodule Pleroma.Web.Router do
patch("/accounts/update_credentials", AccountController, :update_credentials)
get("/accounts/relationships", AccountController, :relationships)
+ get("/accounts/familiar_followers", AccountController, :familiar_followers)
get("/accounts/:id/lists", AccountController, :lists)
get("/accounts/:id/identity_proofs", AccountController, :identity_proofs)
get("/endorsements", AccountController, :endorsements)
@@ -497,6 +655,7 @@ defmodule Pleroma.Web.Router do
post("/accounts/:id/note", AccountController, :note)
post("/accounts/:id/pin", AccountController, :endorse)
post("/accounts/:id/unpin", AccountController, :unendorse)
+ post("/accounts/:id/remove_from_followers", AccountController, :remove_from_followers)
get("/conversations", ConversationController, :index)
post("/conversations/:id/read", ConversationController, :mark_as_read)
@@ -558,6 +717,7 @@ defmodule Pleroma.Web.Router do
get("/bookmarks", StatusController, :bookmarks)
post("/statuses", StatusController, :create)
+ put("/statuses/:id", StatusController, :update)
delete("/statuses/:id", StatusController, :delete)
post("/statuses/:id/reblog", StatusController, :reblog)
post("/statuses/:id/unreblog", StatusController, :unreblog)
@@ -610,13 +770,15 @@ defmodule Pleroma.Web.Router do
get("/instance", InstanceController, :show)
get("/instance/peers", InstanceController, :peers)
+ get("/instance/rules", InstanceController, :rules)
get("/statuses", StatusController, :index)
get("/statuses/:id", StatusController, :show)
get("/statuses/:id/context", StatusController, :context)
- get("/statuses/:id/card", StatusController, :card)
get("/statuses/:id/favourited_by", StatusController, :favourited_by)
get("/statuses/:id/reblogged_by", StatusController, :reblogged_by)
+ get("/statuses/:id/history", StatusController, :show_history)
+ get("/statuses/:id/source", StatusController, :show_source)
get("/custom_emojis", CustomEmojiController, :index)
@@ -632,11 +794,14 @@ defmodule Pleroma.Web.Router do
scope "/api/v2", Pleroma.Web.MastodonAPI do
pipe_through(:api)
+
get("/search", SearchController, :search2)
post("/media", MediaController, :create2)
get("/suggestions", SuggestionController, :index2)
+
+ get("/instance", InstanceController, :show2)
end
scope "/api", Pleroma.Web do
@@ -678,11 +843,6 @@ defmodule Pleroma.Web.Router do
get("/activities/:uuid", OStatus.OStatusController, :activity)
get("/notice/:id", OStatus.OStatusController, :notice)
- # Notice compatibility routes for other frontends
- get("/@:nickname/:id", OStatus.OStatusController, :notice)
- get("/@:nickname/posts/:id", OStatus.OStatusController, :notice)
- get("/:nickname/status/:id", OStatus.OStatusController, :notice)
-
# Mastodon compatibility routes
get("/users/:nickname/statuses/:id", OStatus.OStatusController, :object)
get("/users/:nickname/statuses/:id/activity", OStatus.OStatusController, :activity)
@@ -698,8 +858,7 @@ defmodule Pleroma.Web.Router do
end
scope "/", Pleroma.Web do
- # Note: html format is supported only if static FE is enabled
- pipe_through([:accepts_html_xml, :static_fe])
+ pipe_through([:accepts_html_xml])
get("/users/:nickname/feed", Feed.UserController, :feed, as: :user_feed)
end
@@ -860,16 +1019,15 @@ defmodule Pleroma.Web.Router do
scope "/", Pleroma.Web.Fallback do
get("/registration/:token", RedirectController, :registration_page)
get("/:maybe_nickname_or_id", RedirectController, :redirector_with_meta)
- match(:*, "/api/pleroma*path", LegacyPleromaApiRerouterPlug, [])
- get("/api*path", RedirectController, :api_not_implemented)
+ match(:*, "/api/pleroma/*path", LegacyPleromaApiRerouterPlug, [])
+ get("/api/*path", RedirectController, :api_not_implemented)
get("/*path", RedirectController, :redirector_with_preload)
options("/*path", RedirectController, :empty)
end
- # TODO: Change to Phoenix.Router.routes/1 for Phoenix 1.6.0+
def get_api_routes do
- __MODULE__.__routes__()
+ Phoenix.Router.routes(__MODULE__)
|> Enum.reject(fn r -> r.plug == Pleroma.Web.Fallback.RedirectController end)
|> Enum.map(fn r ->
r.path
diff --git a/lib/pleroma/web/static_fe/static_fe_controller.ex b/lib/pleroma/web/static_fe/static_fe_controller.ex
index b20a3689c..012f8e464 100644
--- a/lib/pleroma/web/static_fe/static_fe_controller.ex
+++ b/lib/pleroma/web/static_fe/static_fe_controller.ex
@@ -13,7 +13,6 @@ defmodule Pleroma.Web.StaticFE.StaticFEController do
alias Pleroma.Web.Metadata
alias Pleroma.Web.Router.Helpers
- plug(:put_layout, :static_fe)
plug(:assign_id)
@page_keys ["max_id", "min_id", "limit", "since_id", "order"]
@@ -22,10 +21,18 @@ defmodule Pleroma.Web.StaticFE.StaticFEController do
def show(%{assigns: %{notice_id: notice_id}} = conn, _params) do
with %Activity{local: true} = activity <-
Activity.get_by_id_with_object(notice_id),
- true <- Visibility.is_public?(activity.object),
+ true <- Visibility.public?(activity.object),
{_, true} <- {:visible?, Visibility.visible_for_user?(activity, _reading_user = nil)},
%User{} = user <- User.get_by_ap_id(activity.object.data["actor"]) do
- meta = Metadata.build_tags(%{activity_id: notice_id, object: activity.object, user: user})
+ url = Helpers.url(conn) <> conn.request_path
+
+ meta =
+ Metadata.build_tags(%{
+ activity_id: notice_id,
+ object: activity.object,
+ user: user,
+ url: url
+ })
timeline =
activity.object.data["context"]
@@ -167,15 +174,6 @@ defp represent(%Activity{object: %Object{data: data}} = activity, selected) do
defp assign_id(%{path_info: ["notice", notice_id]} = conn, _opts),
do: assign(conn, :notice_id, notice_id)
- defp assign_id(%{path_info: ["@" <> _nickname, notice_id]} = conn, _opts),
- do: assign(conn, :notice_id, notice_id)
-
- defp assign_id(%{path_info: ["@" <> _nickname, "posts", notice_id]} = conn, _opts),
- do: assign(conn, :notice_id, notice_id)
-
- defp assign_id(%{path_info: [_nickname, "status", notice_id]} = conn, _opts),
- do: assign(conn, :notice_id, notice_id)
-
defp assign_id(%{path_info: ["users", user_id]} = conn, _opts),
do: assign(conn, :username_or_id, user_id)
diff --git a/lib/pleroma/web/streamer.ex b/lib/pleroma/web/streamer.ex
index ff7f62a1e..7065fdab2 100644
--- a/lib/pleroma/web/streamer.ex
+++ b/lib/pleroma/web/streamer.ex
@@ -4,6 +4,7 @@
defmodule Pleroma.Web.Streamer do
require Logger
+ require Pleroma.Constants
alias Pleroma.Activity
alias Pleroma.Chat.MessageReference
@@ -19,12 +20,12 @@ defmodule Pleroma.Web.Streamer do
alias Pleroma.Web.Plugs.OAuthScopesPlug
alias Pleroma.Web.StreamerView
- @mix_env Mix.env()
@registry Pleroma.Web.StreamerRegistry
def registry, do: @registry
- @public_streams ["public", "public:local", "public:media", "public:local:media"]
+ @public_streams Pleroma.Constants.public_streams()
+ @local_streams ["public:local", "public:local:media"]
@user_streams ["user", "user:notification", "direct", "user:pleroma_chat"]
@doc "Expands and authorizes a stream, and registers the process for streaming."
@@ -32,23 +33,50 @@ def registry, do: @registry
stream :: String.t(),
User.t() | nil,
Token.t() | nil,
- Map.t() | nil
+ map() | nil
) ::
{:ok, topic :: String.t()} | {:error, :bad_topic} | {:error, :unauthorized}
def get_topic_and_add_socket(stream, user, oauth_token, params \\ %{}) do
with {:ok, topic} <- get_topic(stream, user, oauth_token, params) do
- add_socket(topic, user)
+ add_socket(topic, oauth_token)
+ end
+ end
+
+ defp can_access_stream(user, oauth_token, kind) do
+ with {_, true} <- {:restrict?, Config.restrict_unauthenticated_access?(:timelines, kind)},
+ {_, %User{id: user_id}, %Token{user_id: user_id}} <- {:user, user, oauth_token},
+ {_, true} <-
+ {:scopes,
+ OAuthScopesPlug.filter_descendants(["read:statuses"], oauth_token.scopes) != []} do
+ true
+ else
+ {:restrict?, _} ->
+ true
+
+ _ ->
+ false
end
end
@doc "Expand and authorizes a stream"
- @spec get_topic(stream :: String.t(), User.t() | nil, Token.t() | nil, Map.t()) ::
- {:ok, topic :: String.t()} | {:error, :bad_topic}
+ @spec get_topic(stream :: String.t() | nil, User.t() | nil, Token.t() | nil, map()) ::
+ {:ok, topic :: String.t() | nil} | {:error, :bad_topic}
def get_topic(stream, user, oauth_token, params \\ %{})
- # Allow all public steams.
- def get_topic(stream, _user, _oauth_token, _params) when stream in @public_streams do
- {:ok, stream}
+ def get_topic(nil = _stream, _user, _oauth_token, _params) do
+ {:ok, nil}
+ end
+
+ # Allow all public steams if the instance allows unauthenticated access.
+ # Otherwise, only allow users with valid oauth tokens.
+ def get_topic(stream, user, oauth_token, _params) when stream in @public_streams do
+ kind = if stream in @local_streams, do: :local, else: :federated
+
+ if can_access_stream(user, oauth_token, kind) do
+ {:ok, stream}
+ else
+ {:error, :unauthorized}
+ end
end
# Allow all hashtags streams.
@@ -57,12 +85,20 @@ def get_topic("hashtag", _user, _oauth_token, %{"tag" => tag} = _params) do
end
# Allow remote instance streams.
- def get_topic("public:remote", _user, _oauth_token, %{"instance" => instance} = _params) do
- {:ok, "public:remote:" <> instance}
+ def get_topic("public:remote", user, oauth_token, %{"instance" => instance} = _params) do
+ if can_access_stream(user, oauth_token, :federated) do
+ {:ok, "public:remote:" <> instance}
+ else
+ {:error, :unauthorized}
+ end
end
- def get_topic("public:remote:media", _user, _oauth_token, %{"instance" => instance} = _params) do
- {:ok, "public:remote:media:" <> instance}
+ def get_topic("public:remote:media", user, oauth_token, %{"instance" => instance} = _params) do
+ if can_access_stream(user, oauth_token, :federated) do
+ {:ok, "public:remote:media:" <> instance}
+ else
+ {:error, :unauthorized}
+ end
end
# Expand user streams.
@@ -120,10 +156,10 @@ def get_topic(_stream, _user, _oauth_token, _params) do
end
@doc "Registers the process for streaming. Use `get_topic/3` to get the full authorized topic."
- def add_socket(topic, user) do
+ def add_socket(topic, oauth_token) do
if should_env_send?() do
- auth? = if user, do: true
- Registry.register(@registry, topic, auth?)
+ oauth_token_id = if oauth_token, do: oauth_token.id, else: false
+ Registry.register(@registry, topic, oauth_token_id)
end
{:ok, topic}
@@ -187,8 +223,8 @@ defp do_stream("direct", item) do
end
defp do_stream("follow_relationship", item) do
- text = StreamerView.render("follow_relationships_update.json", item)
user_topic = "user:#{item.follower.id}"
+ text = StreamerView.render("follow_relationships_update.json", item, user_topic)
Logger.debug("Trying to push follow relationship update to #{user_topic}\n\n")
@@ -209,7 +245,7 @@ defp do_stream("participation", participation) do
defp do_stream("list", item) do
# filter the recipient list if the activity is not public, see #270.
recipient_lists =
- case Visibility.is_public?(item) do
+ case Visibility.public?(item) do
true ->
Pleroma.List.get_lists_from_activity(item)
@@ -234,9 +270,11 @@ defp do_stream("list", item) do
defp do_stream(topic, %Notification{} = item)
when topic in ["user", "user:notification"] do
- Registry.dispatch(@registry, "#{topic}:#{item.user_id}", fn list ->
+ user_topic = "#{topic}:#{item.user_id}"
+
+ Registry.dispatch(@registry, user_topic, fn list ->
Enum.each(list, fn {pid, _auth} ->
- send(pid, {:render_with_user, StreamerView, "notification.json", item})
+ send(pid, {:render_with_user, StreamerView, "notification.json", item, user_topic})
end)
end)
end
@@ -245,7 +283,7 @@ defp do_stream(topic, {user, %MessageReference{} = cm_ref})
when topic in ["user", "user:pleroma_chat"] do
topic = "#{topic}:#{user.id}"
- text = StreamerView.render("chat_update.json", %{chat_message_reference: cm_ref})
+ text = StreamerView.render("chat_update.json", %{chat_message_reference: cm_ref}, topic)
Registry.dispatch(@registry, topic, fn list ->
Enum.each(list, fn {pid, _auth} ->
@@ -273,7 +311,7 @@ defp do_stream(topic, item) do
end
defp push_to_socket(topic, %Participation{} = participation) do
- rendered = StreamerView.render("conversation.json", participation)
+ rendered = StreamerView.render("conversation.json", participation, topic)
Registry.dispatch(@registry, topic, fn list ->
Enum.each(list, fn {pid, _} ->
@@ -296,13 +334,34 @@ defp push_to_socket(topic, %Activity{
defp push_to_socket(_topic, %Activity{data: %{"type" => "Delete"}}), do: :noop
- defp push_to_socket(topic, item) do
- anon_render = StreamerView.render("update.json", item)
+ defp push_to_socket(topic, %Activity{data: %{"type" => "Update"}} = item) do
+ create_activity =
+ Pleroma.Activity.get_create_by_object_ap_id(item.object.data["id"])
+ |> Map.put(:object, item.object)
+
+ anon_render = StreamerView.render("status_update.json", create_activity, topic)
Registry.dispatch(@registry, topic, fn list ->
Enum.each(list, fn {pid, auth?} ->
if auth? do
- send(pid, {:render_with_user, StreamerView, "update.json", item})
+ send(
+ pid,
+ {:render_with_user, StreamerView, "status_update.json", create_activity, topic}
+ )
+ else
+ send(pid, {:text, anon_render})
+ end
+ end)
+ end)
+ end
+
+ defp push_to_socket(topic, item) do
+ anon_render = StreamerView.render("update.json", item, topic)
+
+ Registry.dispatch(@registry, topic, fn list ->
+ Enum.each(list, fn {pid, auth?} ->
+ if auth? do
+ send(pid, {:render_with_user, StreamerView, "update.json", item, topic})
else
send(pid, {:text, anon_render})
end
@@ -320,25 +379,36 @@ defp thread_containment(activity, user) do
end
end
- # In test environement, only return true if the registry is started.
- # In benchmark environment, returns false.
- # In any other environment, always returns true.
- cond do
- @mix_env == :test ->
- def should_env_send? do
- case Process.whereis(@registry) do
- nil ->
- false
+ def close_streams_by_oauth_token(oauth_token) do
+ if should_env_send?() do
+ Registry.select(
+ @registry,
+ [
+ {
+ {:"$1", :"$2", :"$3"},
+ [{:==, :"$3", oauth_token.id}],
+ [:"$2"]
+ }
+ ]
+ )
+ |> Enum.each(fn pid -> send(pid, :close) end)
+ end
+ end
- pid ->
- Process.alive?(pid)
- end
+ # In dev/prod the streamer registry is expected to be started, so return true
+ # In test it is possible to have the registry started for a test so it will check
+ # In benchmark it will never find the process alive and return false
+ def should_env_send? do
+ if Application.get_env(:pleroma, Pleroma.Application)[:streamer_registry] do
+ true
+ else
+ case Process.whereis(@registry) do
+ nil ->
+ false
+
+ pid ->
+ Process.alive?(pid)
end
-
- @mix_env == :benchmark ->
- def should_env_send?, do: false
-
- true ->
- def should_env_send?, do: true
+ end
end
end
diff --git a/lib/pleroma/web/templates/feed/feed/_activity.atom.eex b/lib/pleroma/web/templates/feed/feed/_activity.atom.eex
index 57bd92468..b774f7984 100644
--- a/lib/pleroma/web/templates/feed/feed/_activity.atom.eex
+++ b/lib/pleroma/web/templates/feed/feed/_activity.atom.eex
@@ -3,15 +3,15 @@
http://activitystrea.ms/schema/1.0/post
<%= @data["id"] %>
<%= activity_title(@data, Keyword.get(@feed_config, :post_title, %{})) %>
- <%= activity_content(@data) %>
- <%= @activity.data["published"] %>
- <%= @activity.data["published"] %>
+ <%= activity_description(@data) %>
+ <%= to_rfc3339(@data["published"]) %>
+ <%= to_rfc3339(@data["published"]) %>
<%= activity_context(@activity) %>
- <%= if @data["summary"] do %>
+ <%= if @data["summary"] != "" do %>
<%= escape(@data["summary"]) %>
<% end %>
diff --git a/lib/pleroma/web/templates/feed/feed/_activity.rss.eex b/lib/pleroma/web/templates/feed/feed/_activity.rss.eex
index 279f2171d..7de98f736 100644
--- a/lib/pleroma/web/templates/feed/feed/_activity.rss.eex
+++ b/lib/pleroma/web/templates/feed/feed/_activity.rss.eex
@@ -3,17 +3,12 @@
http://activitystrea.ms/schema/1.0/post
<%= @data["id"] %>
<%= activity_title(@data, Keyword.get(@feed_config, :post_title, %{})) %>
- <%= activity_content(@data) %>
- <%= @activity.data["published"] %>
- <%= @activity.data["published"] %>
+ <%= activity_description(@data) %>
+ <%= to_rfc2822(@data["published"]) %>
<%= activity_context(@activity) %>
- <%= if @data["summary"] do %>
- <%= escape(@data["summary"]) %>
- <% end %>
-
<%= if @activity.local do %>
<%= @data["id"] %>
<% else %>
@@ -27,7 +22,7 @@
<% end %>
<%= for attachment <- @data["attachment"] || [] do %>
- <%= attachment_href(attachment) %>
+
<% end %>
<%= if @data["inReplyTo"] do %>
diff --git a/lib/pleroma/web/templates/feed/feed/_author.atom.eex b/lib/pleroma/web/templates/feed/feed/_author.atom.eex
index 25cbffada..90be8a559 100644
--- a/lib/pleroma/web/templates/feed/feed/_author.atom.eex
+++ b/lib/pleroma/web/templates/feed/feed/_author.atom.eex
@@ -1,17 +1,14 @@
- <%= @user.ap_id %>
- http://activitystrea.ms/schema/1.0/person
<%= @user.ap_id %>
+ <%= @user.nickname %>
+ http://activitystrea.ms/schema/1.0/person
+ <%= @user.name %>
+ <%= User.avatar_url(@user) %>
+ <%= @user.ap_id %>
+ <%= to_rfc3339(@user.inserted_at) %>
+ <%= to_rfc3339(@user.updated_at) %>
+ <%= @user.ap_id %>
<%= @user.nickname %>
<%= @user.name %>
<%= escape(@user.bio) %>
- <%= escape(@user.bio) %>
- <%= @user.nickname %>
-
- <%= if User.banner_url(@user) do %>
-
- <% end %>
- <%= if @user.local do %>
- true
- <% end %>
diff --git a/lib/pleroma/web/templates/feed/feed/_author.rss.eex b/lib/pleroma/web/templates/feed/feed/_author.rss.eex
index 526aeddcf..22477e6b1 100644
--- a/lib/pleroma/web/templates/feed/feed/_author.rss.eex
+++ b/lib/pleroma/web/templates/feed/feed/_author.rss.eex
@@ -1,17 +1,10 @@
-
- <%= @user.ap_id %>
- http://activitystrea.ms/schema/1.0/person
- <%= @user.ap_id %>
- <%= @user.nickname %>
- <%= @user.name %>
- <%= escape(@user.bio) %>
- <%= escape(@user.bio) %>
- <%= @user.nickname %>
- <%= User.avatar_url(@user) %>
- <%= if User.banner_url(@user) do %>
- <%= User.banner_url(@user) %>
- <% end %>
- <%= if @user.local do %>
- true
- <% end %>
-
+<%= "#{email(@user)} (#{escape(@user.name)})" %>
+http://activitystrea.ms/schema/1.0/person
+<%= @user.name %>
+<%= User.avatar_url(@user) %>
+<%= @user.ap_id %>
+<%= to_rfc3339(@user.inserted_at) %>
+<%= to_rfc3339(@user.updated_at) %>
+<%= @user.nickname %>
+<%= @user.name %>
+<%= escape(@user.bio) %>
diff --git a/lib/pleroma/web/templates/feed/feed/_tag_activity.atom.eex b/lib/pleroma/web/templates/feed/feed/_tag_activity.atom.eex
index aa3035bca..03c222975 100644
--- a/lib/pleroma/web/templates/feed/feed/_tag_activity.atom.eex
+++ b/lib/pleroma/web/templates/feed/feed/_tag_activity.atom.eex
@@ -1,12 +1,22 @@
- http://activitystrea.ms/schema/1.0/note
- http://activitystrea.ms/schema/1.0/post
+ http://activitystrea.ms/schema/1.0/note
+ http://activitystrea.ms/schema/1.0/post
- <%= render @view_module, "_tag_author.atom", assigns %>
+ <%= render Phoenix.Controller.view_module(@conn), "_tag_author.atom", assigns %>
- <%= @data["id"] %>
- <%= activity_title(@data, Keyword.get(@feed_config, :post_title, %{})) %>
- <%= activity_content(@data) %>
+ <%= @data["id"] %>
+ <%= activity_title(@data, Keyword.get(@feed_config, :post_title, %{})) %>
+ <%= activity_description(@data) %>
+ <%= to_rfc3339(@data["published"]) %>
+ <%= to_rfc3339(@data["published"]) %>
+
+ <%= activity_context(@activity) %>
+
+
+
+ <%= if @data["summary"] != "" do %>
+ <%= @data["summary"] %>
+ <% end %>
<%= if @activity.local do %>
@@ -15,37 +25,25 @@
<% end %>
- <%= @activity.data["published"] %>
- <%= @activity.data["published"] %>
-
-
- <%= activity_context(@activity) %>
-
-
-
- <%= if @data["summary"] do %>
- <%= @data["summary"] %>
- <% end %>
-
- <%= for id <- @activity.recipients do %>
- <%= if id == Pleroma.Constants.as_public() do %>
+ <%= for id <- @activity.recipients do %>
+ <%= if id == Pleroma.Constants.as_public() do %>
+
+ <% else %>
+ <%= unless Regex.match?(~r/^#{Pleroma.Web.Endpoint.url()}.+followers$/, id) do %>
- <% else %>
- <%= unless Regex.match?(~r/^#{Pleroma.Web.Endpoint.url()}.+followers$/, id) do %>
-
- <% end %>
+ ostatus:object-type="http://activitystrea.ms/schema/1.0/person"
+ href="<%= id %>" />
<% end %>
<% end %>
+ <% end %>
- <%= for tag <- Pleroma.Object.hashtags(@object) do %>
-
- <% end %>
+ <%= for tag <- Pleroma.Object.hashtags(@object) do %>
+
+ <% end %>
- <%= for {emoji, file} <- @data["emoji"] || %{} do %>
-
- <% end %>
+ <%= for {emoji, file} <- @data["emoji"] || %{} do %>
+
+ <% end %>
diff --git a/lib/pleroma/web/templates/feed/feed/_tag_activity.xml.eex b/lib/pleroma/web/templates/feed/feed/_tag_activity.xml.eex
index 2334e24a2..1b8c34b87 100644
--- a/lib/pleroma/web/templates/feed/feed/_tag_activity.xml.eex
+++ b/lib/pleroma/web/templates/feed/feed/_tag_activity.xml.eex
@@ -4,9 +4,9 @@
<%= activity_context(@activity) %>
<%= activity_context(@activity) %>
- <%= pub_date(@activity.data["published"]) %>
+ <%= to_rfc2822(@data["published"]) %>
- <%= activity_content(@data) %>
+ <%= activity_description(@data) %>
<%= for attachment <- @data["attachment"] || [] do %>
<% end %>
diff --git a/lib/pleroma/web/templates/feed/feed/_tag_author.atom.eex b/lib/pleroma/web/templates/feed/feed/_tag_author.atom.eex
index 997c4936e..71c696832 100644
--- a/lib/pleroma/web/templates/feed/feed/_tag_author.atom.eex
+++ b/lib/pleroma/web/templates/feed/feed/_tag_author.atom.eex
@@ -1,18 +1,14 @@
- http://activitystrea.ms/schema/1.0/person
- <%= @actor.ap_id %>
- <%= @actor.ap_id %>
- <%= @actor.nickname %>
- <%= escape(@actor.bio) %>
-
- <%= if User.banner_url(@actor) do %>
-
- <% end %>
- <%= if @actor.local do %>
- true
- <% end %>
-
- <%= @actor.nickname %>
- <%= @actor.name %>
- <%= escape(@actor.bio) %>
+ <%= @actor.ap_id %>
+ <%= @actor.nickname %>
+ http://activitystrea.ms/schema/1.0/person
+ <%= @actor.name %>
+ <%= User.avatar_url(@actor) %>
+ <%= @actor.ap_id %>
+ <%= to_rfc3339(@actor.inserted_at) %>
+ <%= to_rfc3339(@actor.updated_at) %>
+ <%= @actor.ap_id %>
+ <%= @actor.nickname %>
+ <%= @actor.name %>
+ <%= escape(@actor.bio) %>
diff --git a/lib/pleroma/web/templates/feed/feed/tag.atom.eex b/lib/pleroma/web/templates/feed/feed/tag.atom.eex
index 6d497e84c..14b0ee594 100644
--- a/lib/pleroma/web/templates/feed/feed/tag.atom.eex
+++ b/lib/pleroma/web/templates/feed/feed/tag.atom.eex
@@ -1,22 +1,20 @@
+
-
+ <%= Routes.tag_feed_url(@conn, :feed, @tag) <> ".atom" %>
+ #<%= @tag %>
+ <%= Gettext.dpgettext("static_pages", "tag feed description", "These are public toots tagged with #%{tag}. You can interact with them if you have an account anywhere in the fediverse.", tag: @tag) %>
+ <%= feed_logo() %>
+ <%= most_recent_update(@activities) %>
+
- <%= '#{Routes.tag_feed_url(@conn, :feed, @tag)}.rss' %>
- #<%= @tag %>
-
- <%= Gettext.dpgettext("static_pages", "tag feed description", "These are public toots tagged with #%{tag}. You can interact with them if you have an account anywhere in the fediverse.", tag: @tag) %>
- <%= feed_logo() %>
- <%= most_recent_update(@activities) %>
-
- <%= for activity <- @activities do %>
- <%= render @view_module, "_tag_activity.atom", Map.merge(assigns, prepare_activity(activity, actor: true)) %>
- <% end %>
+ <%= for activity <- @activities do %>
+ <%= render Phoenix.Controller.view_module(@conn), "_tag_activity.atom", Map.merge(assigns, prepare_activity(activity, actor: true)) %>
+ <% end %>
diff --git a/lib/pleroma/web/templates/feed/feed/tag.rss.eex b/lib/pleroma/web/templates/feed/feed/tag.rss.eex
index edcc3e436..27dde5627 100644
--- a/lib/pleroma/web/templates/feed/feed/tag.rss.eex
+++ b/lib/pleroma/web/templates/feed/feed/tag.rss.eex
@@ -1,15 +1,16 @@
-
+
-
#<%= @tag %>
<%= Gettext.dpgettext("static_pages", "tag feed description", "These are public toots tagged with #%{tag}. You can interact with them if you have an account anywhere in the fediverse.", tag: @tag) %>
<%= '#{Routes.tag_feed_url(@conn, :feed, @tag)}.rss' %>
<%= feed_logo() %>
2b90d9
<%= for activity <- @activities do %>
- <%= render @view_module, "_tag_activity.xml", Map.merge(assigns, prepare_activity(activity)) %>
+ <%= render Phoenix.Controller.view_module(@conn), "_tag_activity.xml", Map.merge(assigns, prepare_activity(activity)) %>
<% end %>
diff --git a/lib/pleroma/web/templates/feed/feed/user.atom.eex b/lib/pleroma/web/templates/feed/feed/user.atom.eex
index 5c1f0ecbc..e36bfc66c 100644
--- a/lib/pleroma/web/templates/feed/feed/user.atom.eex
+++ b/lib/pleroma/web/templates/feed/feed/user.atom.eex
@@ -8,17 +8,18 @@
<%= Routes.user_feed_url(@conn, :feed, @user.nickname) <> ".atom" %>
<%= @user.nickname <> "'s timeline" %>
- <%= most_recent_update(@activities, @user) %>
+ <%= escape(@user.bio) %>
+ <%= most_recent_update(@activities, @user, :atom) %>
<%= logo(@user) %>
- <%= render @view_module, "_author.atom", assigns %>
+ <%= render Phoenix.Controller.view_module(@conn), "_author.atom", assigns %>
<%= if last_activity(@activities) do %>
<% end %>
<%= for activity <- @activities do %>
- <%= render @view_module, "_activity.atom", Map.merge(assigns, prepare_activity(activity)) %>
+ <%= render Phoenix.Controller.view_module(@conn), "_activity.atom", Map.merge(assigns, prepare_activity(activity)) %>
<% end %>
diff --git a/lib/pleroma/web/templates/feed/feed/user.rss.eex b/lib/pleroma/web/templates/feed/feed/user.rss.eex
index 6b842a085..fae3fcf3d 100644
--- a/lib/pleroma/web/templates/feed/feed/user.rss.eex
+++ b/lib/pleroma/web/templates/feed/feed/user.rss.eex
@@ -1,20 +1,30 @@
-
+
- <%= Routes.user_feed_url(@conn, :feed, @user.nickname) <> ".rss" %>
<%= @user.nickname <> "'s timeline" %>
- <%= most_recent_update(@activities, @user) %>
- <%= logo(@user) %>
<%= '#{Routes.user_feed_url(@conn, :feed, @user.nickname)}.rss' %>
+ "
+ rel="self" type="application/rss+xml" />
+ <%= escape(@user.bio) %>
+
+ <%= logo(@user) %>
+ <%= @user.nickname <> "'s timeline" %>
+ <%= '#{Routes.user_feed_url(@conn, :feed, @user.nickname)}.rss' %>
+
- <%= render @view_module, "_author.rss", assigns %>
+ <%= render Phoenix.Controller.view_module(@conn), "_author.rss", assigns %>
<%= if last_activity(@activities) do %>
<%= '#{Routes.user_feed_url(@conn, :feed, @user.nickname)}.rss?max_id=#{last_activity(@activities).id}' %>
<% end %>
<%= for activity <- @activities do %>
- <%= render @view_module, "_activity.rss", Map.merge(assigns, prepare_activity(activity)) %>
+ <%= render Phoenix.Controller.view_module(@conn), "_activity.rss", Map.merge(assigns, prepare_activity(activity)) %>
<% end %>
diff --git a/lib/pleroma/web/templates/layout/email.html.eex b/lib/pleroma/web/templates/layout/email.html.eex
index 087aa4fc0..5858e48b4 100644
--- a/lib/pleroma/web/templates/layout/email.html.eex
+++ b/lib/pleroma/web/templates/layout/email.html.eex
@@ -5,6 +5,6 @@
<%= @email.subject %>
- <%= render @view_module, @view_template, assigns %>
+ <%= render Phoenix.Controller.view_module(@conn), Phoenix.Controller.view_template(@conn), assigns %>