Compare commits

...

31 Commits

Author SHA1 Message Date
ida schmidt 26a22b8fcc Merge tag 'v4.0.10' of https://github.com/mastodon/mastodon into HEAD 2023-09-19 09:07:22 -07:00
Claire 3d8ae6ab73 Bump version to v4.0.10 2023-09-19 17:01:32 +02:00
Claire 5c64f01b19 Fix moderator rights inconsistencies (#26729) 2023-09-19 17:01:32 +02:00
Claire 57acad0e9f Fix crash when encountering invalid URL (#26814) 2023-09-19 17:01:32 +02:00
Claire 3ab722a79c Fix cached posts including stale stats (#26409) 2023-09-19 17:01:32 +02:00
Nicolai Søborg 871e63edff Fix `frame_rate` for videos where `ffprobe` reports 0/0 (#26500) 2023-09-19 17:01:32 +02:00
yufushiro bc4408db08 Fix unexpected audio stream transcoding when uploaded video is eligible to passthrough (#26608)
Co-authored-by: Claire <claire.github-309c@sitedethib.com>
2023-09-19 17:01:32 +02:00
Claire 9ae857c035
Merge pull request from GHSA-v3xf-c9qf-j667 2023-09-19 16:53:58 +02:00
Claire 9fa89dbdcb
Merge pull request from GHSA-2693-xr3m-jhqr 2023-09-19 16:53:21 +02:00
Claire 75400abe0b
Change Dockerfile to upgrade packages when building (#26930)
Co-authored-by: Renaud Chaput <renchap@gmail.com>
2023-09-18 08:32:00 +02:00
Claire 7df732e9a9
Update actions for stable-4.0 (#26813)
Co-authored-by: Renaud Chaput <renchap@gmail.com>
2023-09-06 12:18:28 +02:00
Claire a9915c596b Bump version to v4.0.9 2023-09-05 18:51:01 +02:00
Claire 48fbb9d53d Fix Dockerfile installing incompatible npm version (#26803) 2023-09-05 18:51:01 +02:00
Emelia Smith d3e97e8c23 Allow reports with long comments from remote instances, but truncate (#25028) 2023-09-05 18:51:01 +02:00
Daniel M Brasil db8db60244 Fix `/api/v1/timelines/tag/:hashtag` allowing for unauthenticated access when public preview is disabled (#26237) 2023-09-05 18:51:01 +02:00
Claire d30fbc0900 Fix blocking subdomains of an already-blocked domain (#26392) 2023-09-05 18:51:01 +02:00
Claire a62d9a9a78 Change text extraction in `PlainTextFormatter` to be faster (#26727) 2023-09-05 18:51:01 +02:00
Claire 2b0cabe0d7
Backport container build changes to the stable-4.0 branch (#26741)
Co-authored-by: Renaud Chaput <renchap@gmail.com>
2023-08-31 19:54:13 +02:00
Claire e3c57f1add Bump version to v4.0.8 2023-07-31 14:33:14 +02:00
Renaud Chaput 879b8b69d3 Fix missing return values in streaming (#26233) 2023-07-31 14:33:14 +02:00
Emelia Smith 8018d478ab Fix: Streaming server memory leak in HTTP EventSource cleanup (#26228) 2023-07-31 14:33:14 +02:00
Claire fea5640374 Fix incorrect connect timeout in outgoing requests (#26116) 2023-07-31 14:33:14 +02:00
Emelia Smith 663f801337 Refactor streaming's filtering logic & improve documentation (#26213) 2023-07-31 14:33:14 +02:00
Claire f52763d926 Fix wrong filters sometimes applying in streaming (#26159) 2023-07-31 14:33:14 +02:00
Claire 10fcccedf2 Bump version to v4.0.7 2023-07-21 16:07:35 +02:00
Claire c46aa2348e Add check preventing Sidekiq workers from running with Makara configured (#25850)
Co-authored-by: Eugen Rochko <eugen@zeonfederated.com>
2023-07-21 16:07:35 +02:00
Claire fc4a93b937 Fix CSP headers being unintendedly wide (#26105) 2023-07-21 16:07:35 +02:00
Claire aca0db4bd6 Change request timeout handling to use a longer deadline (#26055) 2023-07-21 16:07:35 +02:00
Claire 73b16b674d Fix moderation interface for remote instances with a .zip TLD (#25885) 2023-07-21 16:07:35 +02:00
Claire bd2429b716 Fix remote accounts being possibly persisted to database with incomplete protocol values (#25886) 2023-07-21 16:07:35 +02:00
Michael Stanclift 8695409035 Fix trending publishers table not rendering correctly on narrow screens (#25945) 2023-07-21 16:07:35 +02:00
39 changed files with 602 additions and 430 deletions

View File

@ -1,225 +0,0 @@
version: 2.1
orbs:
ruby: circleci/ruby@1.4.1
node: circleci/node@5.0.1
executors:
default:
parameters:
ruby-version:
type: string
docker:
- image: cimg/ruby:<< parameters.ruby-version >>
environment:
BUNDLE_JOBS: 3
BUNDLE_RETRY: 3
CONTINUOUS_INTEGRATION: true
DB_HOST: localhost
DB_USER: root
DISABLE_SIMPLECOV: true
RAILS_ENV: test
- image: cimg/postgres:14.0
environment:
POSTGRES_USER: root
POSTGRES_HOST_AUTH_METHOD: trust
- image: cimg/redis:6.2
commands:
install-system-dependencies:
steps:
- run:
name: Install system dependencies
command: |
sudo apt-get update
sudo apt-get install -y libicu-dev libidn11-dev
install-ruby-dependencies:
parameters:
ruby-version:
type: string
steps:
- run:
command: |
bundle config clean 'true'
bundle config frozen 'true'
bundle config without 'development production'
name: Set bundler settings
- ruby/install-deps:
bundler-version: '2.3.8'
key: ruby<< parameters.ruby-version >>-gems-v1
wait-db:
steps:
- run:
command: dockerize -wait tcp://localhost:5432 -wait tcp://localhost:6379 -timeout 1m
name: Wait for PostgreSQL and Redis
jobs:
build:
docker:
- image: cimg/ruby:3.0-node
environment:
RAILS_ENV: test
steps:
- checkout
- install-system-dependencies
- install-ruby-dependencies:
ruby-version: '3.0'
- node/install-packages:
cache-version: v1
pkg-manager: yarn
- run:
command: |
export NODE_OPTIONS=--openssl-legacy-provider
./bin/rails assets:precompile
name: Precompile assets
- persist_to_workspace:
paths:
- public/assets
- public/packs-test
root: .
test:
parameters:
ruby-version:
type: string
executor:
name: default
ruby-version: << parameters.ruby-version >>
environment:
ALLOW_NOPAM: true
PAM_ENABLED: true
PAM_DEFAULT_SERVICE: pam_test
PAM_CONTROLLED_SERVICE: pam_test_controlled
parallelism: 4
steps:
- checkout
- install-system-dependencies
- run:
command: sudo apt-get install -y ffmpeg imagemagick libpam-dev
name: Install additional system dependencies
- run:
command: bundle config with 'pam_authentication'
name: Enable PAM authentication
- install-ruby-dependencies:
ruby-version: << parameters.ruby-version >>
- attach_workspace:
at: .
- wait-db
- run:
command: ./bin/rails db:create db:schema:load db:seed
name: Load database schema
- ruby/rspec-test
test-migrations:
executor:
name: default
ruby-version: '3.0'
steps:
- checkout
- install-system-dependencies
- install-ruby-dependencies:
ruby-version: '3.0'
- wait-db
- run:
command: ./bin/rails db:create
name: Create database
- run:
command: ./bin/rails db:migrate VERSION=20171010025614
name: Run migrations up to v2.0.0
- run:
command: ./bin/rails tests:migrations:populate_v2
name: Populate database with test data
- run:
command: ./bin/rails db:migrate VERSION=20180514140000
name: Run migrations up to v2.4.0
- run:
command: ./bin/rails tests:migrations:populate_v2_4
name: Populate database with test data
- run:
command: ./bin/rails db:migrate VERSION=20180707154237
name: Run migrations up to v2.4.3
- run:
command: ./bin/rails tests:migrations:populate_v2_4_3
name: Populate database with test data
- run:
command: ./bin/rails db:migrate
name: Run all remaining migrations
- run:
command: ./bin/rails tests:migrations:check_database
name: Check migration result
test-two-step-migrations:
executor:
name: default
ruby-version: '3.0'
steps:
- checkout
- install-system-dependencies
- install-ruby-dependencies:
ruby-version: '3.0'
- wait-db
- run:
command: ./bin/rails db:create
name: Create database
- run:
command: ./bin/rails db:migrate VERSION=20171010025614
name: Run migrations up to v2.0.0
- run:
command: ./bin/rails tests:migrations:populate_v2
name: Populate database with test data
- run:
command: ./bin/rails db:migrate VERSION=20180514140000
name: Run pre-deployment migrations up to v2.4.0
environment:
SKIP_POST_DEPLOYMENT_MIGRATIONS: true
- run:
command: ./bin/rails tests:migrations:populate_v2_4
name: Populate database with test data
- run:
command: ./bin/rails db:migrate VERSION=20180707154237
name: Run migrations up to v2.4.3
environment:
SKIP_POST_DEPLOYMENT_MIGRATIONS: true
- run:
command: ./bin/rails tests:migrations:populate_v2_4_3
name: Populate database with test data
- run:
command: ./bin/rails db:migrate
name: Run all remaining pre-deployment migrations
environment:
SKIP_POST_DEPLOYMENT_MIGRATIONS: true
- run:
command: ./bin/rails db:migrate
name: Run all post-deployment migrations
- run:
command: ./bin/rails tests:migrations:check_database
name: Check migration result
workflows:
version: 2
build-and-test:
jobs:
- build
- test:
matrix:
parameters:
ruby-version:
- '2.7'
- '3.0'
name: test-ruby<< matrix.ruby-version >>
requires:
- build
- test-migrations:
requires:
- build
- test-two-step-migrations:
requires:
- build
- node/run:
cache-version: v1
name: test-webui
pkg-manager: yarn
requires:
- build
version: lts
yarn-run: test:jest

View File

@ -0,0 +1,92 @@
on:
workflow_call:
inputs:
platforms:
required: true
type: string
cache:
type: boolean
default: true
use_native_arm64_builder:
type: boolean
push_to_images:
type: string
flavor:
type: string
tags:
type: string
labels:
type: string
jobs:
build-image:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: docker/setup-qemu-action@v2
if: contains(inputs.platforms, 'linux/arm64') && !inputs.use_native_arm64_builder
- uses: docker/setup-buildx-action@v2
id: buildx
if: ${{ !(inputs.use_native_arm64_builder && contains(inputs.platforms, 'linux/arm64')) }}
- name: Start a local Docker Builder
if: inputs.use_native_arm64_builder && contains(inputs.platforms, 'linux/arm64')
run: |
docker run --rm -d --name buildkitd -p 1234:1234 --privileged moby/buildkit:latest --addr tcp://0.0.0.0:1234
- uses: docker/setup-buildx-action@v2
id: buildx-native
if: inputs.use_native_arm64_builder && contains(inputs.platforms, 'linux/arm64')
with:
driver: remote
endpoint: tcp://localhost:1234
platforms: linux/amd64
append: |
- endpoint: tcp://${{ vars.DOCKER_BUILDER_HETZNER_ARM64_01_HOST }}:13865
platforms: linux/arm64
name: mastodon-docker-builder-arm64-01
driver-opts:
- servername=mastodon-docker-builder-arm64-01
env:
BUILDER_NODE_1_AUTH_TLS_CACERT: ${{ secrets.DOCKER_BUILDER_HETZNER_ARM64_01_CACERT }}
BUILDER_NODE_1_AUTH_TLS_CERT: ${{ secrets.DOCKER_BUILDER_HETZNER_ARM64_01_CERT }}
BUILDER_NODE_1_AUTH_TLS_KEY: ${{ secrets.DOCKER_BUILDER_HETZNER_ARM64_01_KEY }}
- name: Log in to Docker Hub
if: contains(inputs.push_to_images, 'tootsuite')
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Log in to the Github Container registry
if: contains(inputs.push_to_images, 'ghcr.io')
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: docker/metadata-action@v4
id: meta
if: ${{ inputs.push_to_images != '' }}
with:
images: ${{ inputs.push_to_images }}
flavor: ${{ inputs.flavor }}
tags: ${{ inputs.tags }}
labels: ${{ inputs.labels }}
- uses: docker/build-push-action@v4
with:
context: .
platforms: ${{ inputs.platforms }}
provenance: false
builder: ${{ steps.buildx.outputs.name || steps.buildx-native.outputs.name }}
push: ${{ inputs.push_to_images != '' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: ${{ inputs.cache && 'type=gha' || '' }}
cache-to: ${{ inputs.cache && 'type=gha,mode=max' || '' }}

View File

@ -1,64 +0,0 @@
name: Build container image
on:
workflow_dispatch:
push:
branches:
- 'main'
tags:
- '*'
pull_request:
paths:
- .github/workflows/build-image.yml
- Dockerfile
permissions:
contents: read
packages: write
jobs:
build-image:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: docker/setup-qemu-action@v2
- uses: docker/setup-buildx-action@v2
- name: Log in to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
if: github.repository == 'mastodon/mastodon' && github.event_name != 'pull_request'
- name: Log in to the Github Container registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
if: github.repository == 'mastodon/mastodon' && github.event_name != 'pull_request'
- uses: docker/metadata-action@v4
id: meta
with:
images: |
tootsuite/mastodon
ghcr.io/mastodon/mastodon
flavor: |
latest=auto
tags: |
type=edge,branch=main
type=pep440,pattern={{raw}}
type=pep440,pattern=v{{major}}.{{minor}}
type=ref,event=pr
- uses: docker/build-push-action@v4
with:
context: .
platforms: linux/amd64,linux/arm64
provenance: false
builder: ${{ steps.buildx.outputs.name }}
push: ${{ github.repository == 'mastodon/mastodon' && github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max

27
.github/workflows/build-releases.yml vendored Normal file
View File

@ -0,0 +1,27 @@
name: Build container release images
on:
push:
tags:
- '*'
permissions:
contents: read
packages: write
jobs:
build-image:
uses: ./.github/workflows/build-container-image.yml
with:
platforms: linux/amd64,linux/arm64
use_native_arm64_builder: true
push_to_images: |
tootsuite/mastodon
ghcr.io/mastodon/mastodon
# Do not use cache when building releases, so apt update is always ran and the release always contain the latest packages
cache: false
flavor: |
latest=false
tags: |
type=pep440,pattern={{raw}}
type=pep440,pattern=v{{major}}.{{minor}}
secrets: inherit

15
.github/workflows/test-image-build.yml vendored Normal file
View File

@ -0,0 +1,15 @@
name: Test container image build
on:
pull_request:
permissions:
contents: read
jobs:
build-image:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
uses: ./.github/workflows/build-container-image.yml
with:
platforms: linux/amd64 # Testing only on native platform so it is performant

View File

@ -3,6 +3,66 @@ Changelog
All notable changes to this project will be documented in this file. All notable changes to this project will be documented in this file.
## End of life notice
**The 4.0.x branch will not receive any update after 2023-10-31.**
This means that no security fix will be made available for this branch after this date, and you will need to update to a more recent version (such as the 4.1.x branch) to receive security fixes.
## [4.0.10] - 2023-09-19
### Fixed
- Fix moderator rights inconsistencies ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26729))
- Fix crash when encountering invalid URL ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26814))
- Fix cached posts including stale stats ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26409))
- Fix uploading of video files for which `ffprobe` reports `0/0` average framerate ([NicolaiSoeborg](https://github.com/mastodon/mastodon/pull/26500))
- Fix unexpected audio stream transcoding when uploaded video is eligible to passthrough ([yufushiro](https://github.com/mastodon/mastodon/pull/26608))
### Security
- Fix missing HTML sanitization in translation API (CVE-2023-42452)
- Fix incorrect domain name normalization (CVE-2023-42451)
## [4.0.9] - 2023-09-05
### Changed
- Change remote report processing to accept reports with long comments, but truncate them ([ThisIsMissEm](https://github.com/mastodon/mastodon/pull/25028))
### Fixed
- **Fix blocking subdomains of an already-blocked domain** ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26392))
- Fix `/api/v1/timelines/tag/:hashtag` allowing for unauthenticated access when public preview is disabled ([danielmbrasil](https://github.com/mastodon/mastodon/pull/26237))
- Fix inefficiencies in `PlainTextFormatter` ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26727))
## [4.0.8] - 2023-07-31
### Fixed
- Fix memory leak in streaming server ([ThisIsMissEm](https://github.com/mastodon/mastodon/pull/26228))
- Fix wrong filters sometimes applying in streaming ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26159), [ThisIsMissEm](https://github.com/mastodon/mastodon/pull/26213), [renchap](https://github.com/mastodon/mastodon/pull/26233))
- Fix incorrect connect timeout in outgoing requests ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26116))
## [4.0.7] - 2023-07-21
### Added
- Add check preventing Sidekiq workers from running with Makara configured ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25850))
### Changed
- Change request timeout handling to use a longer deadline ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26055))
### Fixed
- Fix moderation interface for remote instances with a .zip TLD ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25886))
- Fix remote accounts being possibly persisted to database with incomplete protocol values ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25886))
- Fix trending publishers table not rendering correctly on narrow screens ([vmstan](https://github.com/mastodon/mastodon/pull/25945))
### Security
- Fix CSP headers being unintentionally wide ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26105))
## [4.0.6] - 2023-07-07 ## [4.0.6] - 2023-07-07
### Fixed ### Fixed

View File

@ -19,6 +19,7 @@ RUN ARCH= && \
esac && \ esac && \
echo "Etc/UTC" > /etc/localtime && \ echo "Etc/UTC" > /etc/localtime && \
apt-get update && \ apt-get update && \
apt-get -yq dist-upgrade && \
apt-get install -y --no-install-recommends ca-certificates wget python3 apt-utils && \ apt-get install -y --no-install-recommends ca-certificates wget python3 apt-utils && \
cd ~ && \ cd ~ && \
wget -q https://nodejs.org/download/release/v$NODE_VER/node-v$NODE_VER-linux-$ARCH.tar.gz && \ wget -q https://nodejs.org/download/release/v$NODE_VER/node-v$NODE_VER-linux-$ARCH.tar.gz && \
@ -46,7 +47,7 @@ RUN apt-get update && \
ENV PATH="${PATH}:/opt/ruby/bin:/opt/node/bin" ENV PATH="${PATH}:/opt/ruby/bin:/opt/node/bin"
RUN npm install -g npm@latest && \ RUN npm install -g npm@9 && \
npm install -g yarn && \ npm install -g yarn && \
gem install bundler && \ gem install bundler && \
apt-get update && \ apt-get update && \

View File

@ -10,8 +10,9 @@ A "vulnerability in Mastodon" is a vulnerability in the code distributed through
## Supported Versions ## Supported Versions
| Version | Supported | | Version | Supported |
| ------- | ----------| | ------- | ------------------ |
| 4.0.x | Yes | | 4.1.x | Yes |
| 3.5.x | Yes | | 4.0.x | Until 2023-10-31 |
| < 3.5 | No | | 3.5.x | Until 2023-12-31 |
| < 3.5 | No |

View File

@ -25,7 +25,7 @@ module Admin
@domain_block.errors.delete(:domain) @domain_block.errors.delete(:domain)
render :new render :new
else else
if existing_domain_block.present? if existing_domain_block.present? && existing_domain_block.domain == TagManager.instance.normalize_domain(@domain_block.domain.strip)
@domain_block = existing_domain_block @domain_block = existing_domain_block
@domain_block.update(resource_params) @domain_block.update(resource_params)
end end

View File

@ -1,6 +1,7 @@
# frozen_string_literal: true # frozen_string_literal: true
class Api::V1::Timelines::TagController < Api::BaseController class Api::V1::Timelines::TagController < Api::BaseController
before_action -> { doorkeeper_authorize! :read, :'read:statuses' }, only: :show, if: :require_auth?
before_action :load_tag before_action :load_tag
after_action :insert_pagination_headers, unless: -> { @statuses.empty? } after_action :insert_pagination_headers, unless: -> { @statuses.empty? }
@ -11,6 +12,10 @@ class Api::V1::Timelines::TagController < Api::BaseController
private private
def require_auth?
!Setting.timeline_preview
end
def load_tag def load_tag
@tag = Tag.find_normalized(params[:id]) @tag = Tag.find_normalized(params[:id])
end end

View File

@ -16,7 +16,7 @@ class ActivityPub::Activity::Flag < ActivityPub::Activity
@account, @account,
target_account, target_account,
status_ids: target_statuses.nil? ? [] : target_statuses.map(&:id), status_ids: target_statuses.nil? ? [] : target_statuses.map(&:id),
comment: @json['content'] || '', comment: report_comment,
uri: report_uri uri: report_uri
) )
end end
@ -35,4 +35,8 @@ class ActivityPub::Activity::Flag < ActivityPub::Activity
def report_uri def report_uri
@json['id'] unless @json['id'].nil? || invalid_origin?(@json['id']) @json['id'] unless @json['id'].nil? || invalid_origin?(@json['id'])
end end
def report_comment
(@json['content'] || '')[0...5000]
end
end end

View File

@ -27,6 +27,8 @@ class ActivityPub::TagManager
when :note, :comment, :activity when :note, :comment, :activity
return activity_account_status_url(target.account, target) if target.reblog? return activity_account_status_url(target.account, target) if target.reblog?
short_account_status_url(target.account, target) short_account_status_url(target.account, target)
when :flag
target.uri
end end
end end
@ -41,6 +43,8 @@ class ActivityPub::TagManager
account_status_url(target.account, target) account_status_url(target.account, target)
when :emoji when :emoji
emoji_url(target) emoji_url(target)
when :flag
target.uri
end end
end end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class Admin::AccountStatusesFilter < AccountStatusesFilter
private
def blocked?
false
end
end

View File

@ -1,9 +1,7 @@
# frozen_string_literal: true # frozen_string_literal: true
class PlainTextFormatter class PlainTextFormatter
include ActionView::Helpers::TextHelper NEWLINE_TAGS_RE = %r{(<br />|<br>|</p>)+}
NEWLINE_TAGS_RE = /(<br \/>|<br>|<\/p>)+/.freeze
attr_reader :text, :local attr_reader :text, :local
@ -18,7 +16,10 @@ class PlainTextFormatter
if local? if local?
text text
else else
html_entities.decode(strip_tags(insert_newlines)).chomp node = Nokogiri::HTML.fragment(insert_newlines)
# Elements that are entirely removed with our Sanitize config
node.xpath('.//iframe|.//math|.//noembed|.//noframes|.//noscript|.//plaintext|.//script|.//style|.//svg|.//xmp').remove
node.text.chomp
end end
end end
@ -27,8 +28,4 @@ class PlainTextFormatter
def insert_newlines def insert_newlines
text.gsub(NEWLINE_TAGS_RE) { |match| "#{match}\n" } text.gsub(NEWLINE_TAGS_RE) { |match| "#{match}\n" }
end end
def html_entities
HTMLEntities.new
end
end end

View File

@ -4,14 +4,22 @@ require 'ipaddr'
require 'socket' require 'socket'
require 'resolv' require 'resolv'
# Monkey-patch the HTTP.rb timeout class to avoid using a timeout block # Use our own timeout class to avoid using HTTP.rb's timeout block
# around the Socket#open method, since we use our own timeout blocks inside # around the Socket#open method, since we use our own timeout blocks inside
# that method # that method
# #
# Also changes how the read timeout behaves so that it is cumulative (closer # Also changes how the read timeout behaves so that it is cumulative (closer
# to HTTP::Timeout::Global, but still having distinct timeouts for other # to HTTP::Timeout::Global, but still having distinct timeouts for other
# operation types) # operation types)
class HTTP::Timeout::PerOperation class PerOperationWithDeadline < HTTP::Timeout::PerOperation
READ_DEADLINE = 30
def initialize(*args)
super
@read_deadline = options.fetch(:read_deadline, READ_DEADLINE)
end
def connect(socket_class, host, port, nodelay = false) def connect(socket_class, host, port, nodelay = false)
@socket = socket_class.open(host, port) @socket = socket_class.open(host, port)
@socket.setsockopt(Socket::IPPROTO_TCP, Socket::TCP_NODELAY, 1) if nodelay @socket.setsockopt(Socket::IPPROTO_TCP, Socket::TCP_NODELAY, 1) if nodelay
@ -24,7 +32,7 @@ class HTTP::Timeout::PerOperation
# Read data from the socket # Read data from the socket
def readpartial(size, buffer = nil) def readpartial(size, buffer = nil)
@deadline ||= Process.clock_gettime(Process::CLOCK_MONOTONIC) + @read_timeout @deadline ||= Process.clock_gettime(Process::CLOCK_MONOTONIC) + @read_deadline
timeout = false timeout = false
loop do loop do
@ -33,7 +41,8 @@ class HTTP::Timeout::PerOperation
return :eof if result.nil? return :eof if result.nil?
remaining_time = @deadline - Process.clock_gettime(Process::CLOCK_MONOTONIC) remaining_time = @deadline - Process.clock_gettime(Process::CLOCK_MONOTONIC)
raise HTTP::TimeoutError, "Read timed out after #{@read_timeout} seconds" if timeout || remaining_time <= 0 raise HTTP::TimeoutError, "Read timed out after #{@read_timeout} seconds" if timeout
raise HTTP::TimeoutError, "Read timed out after a total of #{@read_deadline} seconds" if remaining_time <= 0
return result if result != :wait_readable return result if result != :wait_readable
# marking the socket for timeout. Why is this not being raised immediately? # marking the socket for timeout. Why is this not being raised immediately?
@ -46,7 +55,7 @@ class HTTP::Timeout::PerOperation
# timeout. Else, the first timeout was a proper timeout. # timeout. Else, the first timeout was a proper timeout.
# This hack has to be done because io/wait#wait_readable doesn't provide a value for when # This hack has to be done because io/wait#wait_readable doesn't provide a value for when
# the socket is closed by the server, and HTTP::Parser doesn't provide the limit for the chunks. # the socket is closed by the server, and HTTP::Parser doesn't provide the limit for the chunks.
timeout = true unless @socket.to_io.wait_readable(remaining_time) timeout = true unless @socket.to_io.wait_readable([remaining_time, @read_timeout].min)
end end
end end
end end
@ -57,7 +66,7 @@ class Request
# We enforce a 5s timeout on DNS resolving, 5s timeout on socket opening # We enforce a 5s timeout on DNS resolving, 5s timeout on socket opening
# and 5s timeout on the TLS handshake, meaning the worst case should take # and 5s timeout on the TLS handshake, meaning the worst case should take
# about 15s in total # about 15s in total
TIMEOUT = { connect: 5, read: 10, write: 10 }.freeze TIMEOUT = { connect_timeout: 5, read_timeout: 10, write_timeout: 10, read_deadline: 30 }.freeze
include RoutingHelper include RoutingHelper
@ -68,6 +77,7 @@ class Request
@url = Addressable::URI.parse(url).normalize @url = Addressable::URI.parse(url).normalize
@http_client = options.delete(:http_client) @http_client = options.delete(:http_client)
@options = options.merge(socket_class: use_proxy? ? ProxySocket : Socket) @options = options.merge(socket_class: use_proxy? ? ProxySocket : Socket)
@options = @options.merge(timeout_class: PerOperationWithDeadline, timeout_options: TIMEOUT)
@options = @options.merge(proxy_url) if use_proxy? @options = @options.merge(proxy_url) if use_proxy?
@headers = {} @headers = {}
@ -128,7 +138,7 @@ class Request
end end
def http_client def http_client
HTTP.use(:auto_inflate).timeout(TIMEOUT.dup).follow(max_hops: 3) HTTP.use(:auto_inflate).follow(max_hops: 3)
end end
end end
@ -268,11 +278,11 @@ class Request
end end
until socks.empty? until socks.empty?
_, available_socks, = IO.select(nil, socks, nil, Request::TIMEOUT[:connect]) _, available_socks, = IO.select(nil, socks, nil, Request::TIMEOUT[:connect_timeout])
if available_socks.nil? if available_socks.nil?
socks.each(&:close) socks.each(&:close)
raise HTTP::TimeoutError, "Connect timed out after #{Request::TIMEOUT[:connect]} seconds" raise HTTP::TimeoutError, "Connect timed out after #{Request::TIMEOUT[:connect_timeout]} seconds"
end end
available_socks.each do |sock| available_socks.each do |sock|

View File

@ -7,18 +7,18 @@ class TagManager
include RoutingHelper include RoutingHelper
def web_domain?(domain) def web_domain?(domain)
domain.nil? || domain.gsub(/[\/]/, '').casecmp(Rails.configuration.x.web_domain).zero? domain.nil? || domain.delete_suffix('/').casecmp(Rails.configuration.x.web_domain).zero?
end end
def local_domain?(domain) def local_domain?(domain)
domain.nil? || domain.gsub(/[\/]/, '').casecmp(Rails.configuration.x.local_domain).zero? domain.nil? || domain.delete_suffix('/').casecmp(Rails.configuration.x.local_domain).zero?
end end
def normalize_domain(domain) def normalize_domain(domain)
return if domain.nil? return if domain.nil?
uri = Addressable::URI.new uri = Addressable::URI.new
uri.host = domain.gsub(/[\/]/, '') uri.host = domain.delete_suffix('/')
uri.normalized_host uri.normalized_host
end end
@ -28,7 +28,7 @@ class TagManager
domain = uri.host + (uri.port ? ":#{uri.port}" : '') domain = uri.host + (uri.port ? ":#{uri.port}" : '')
TagManager.instance.web_domain?(domain) TagManager.instance.web_domain?(domain)
rescue Addressable::URI::InvalidURIError rescue Addressable::URI::InvalidURIError, IDN::Idna::IdnaError
false false
end end
end end

View File

@ -43,6 +43,9 @@ class VideoMetadataExtractor
@height = video_stream[:height] @height = video_stream[:height]
@frame_rate = video_stream[:avg_frame_rate] == '0/0' ? nil : Rational(video_stream[:avg_frame_rate]) @frame_rate = video_stream[:avg_frame_rate] == '0/0' ? nil : Rational(video_stream[:avg_frame_rate])
@r_frame_rate = video_stream[:r_frame_rate] == '0/0' ? nil : Rational(video_stream[:r_frame_rate]) @r_frame_rate = video_stream[:r_frame_rate] == '0/0' ? nil : Rational(video_stream[:r_frame_rate])
# For some video streams the frame_rate reported by `ffprobe` will be 0/0, but for these streams we
# should use `r_frame_rate` instead. Video screencast generated by Gnome Screencast have this issue.
@frame_rate ||= @r_frame_rate
end end
if (audio_stream = audio_streams.first) if (audio_stream = audio_streams.first)

View File

@ -137,6 +137,6 @@ class Admin::StatusBatchAction
end end
def allowed_status_ids def allowed_status_ids
AccountStatusesFilter.new(@report.target_account, current_account).results.with_discarded.where(id: status_ids).pluck(:id) Admin::AccountStatusesFilter.new(@report.target_account, current_account).results.with_discarded.where(id: status_ids).pluck(:id)
end end
end end

View File

@ -39,7 +39,10 @@ class Report < ApplicationRecord
scope :resolved, -> { where.not(action_taken_at: nil) } scope :resolved, -> { where.not(action_taken_at: nil) }
scope :with_accounts, -> { includes([:account, :target_account, :action_taken_by_account, :assigned_account].index_with({ user: [:invite_request, :invite] })) } scope :with_accounts, -> { includes([:account, :target_account, :action_taken_by_account, :assigned_account].index_with({ user: [:invite_request, :invite] })) }
validates :comment, length: { maximum: 1_000 } # A report is considered local if the reporter is local
delegate :local?, to: :account
validates :comment, length: { maximum: 1_000 }, if: :local?
validates :rule_ids, absence: true, unless: :violation? validates :rule_ids, absence: true, unless: :violation?
validate :validate_rule_ids validate :validate_rule_ids
@ -50,10 +53,6 @@ class Report < ApplicationRecord
violation: 2_000, violation: 2_000,
} }
def local?
false # Force uri_for to use uri attribute
end
before_validation :set_uri, only: :create before_validation :set_uri, only: :create
after_create_commit :trigger_webhooks after_create_commit :trigger_webhooks

View File

@ -353,13 +353,25 @@ class Status < ApplicationRecord
account_ids.uniq! account_ids.uniq!
status_ids = cached_items.map { |item| item.reblog? ? item.reblog_of_id : item.id }.uniq
return if account_ids.empty? return if account_ids.empty?
accounts = Account.where(id: account_ids).includes(:account_stat, :user).index_by(&:id) accounts = Account.where(id: account_ids).includes(:account_stat, :user).index_by(&:id)
status_stats = StatusStat.where(status_id: status_ids).index_by(&:status_id)
cached_items.each do |item| cached_items.each do |item|
item.account = accounts[item.account_id] item.account = accounts[item.account_id]
item.reblog.account = accounts[item.reblog.account_id] if item.reblog? item.reblog.account = accounts[item.reblog.account_id] if item.reblog?
if item.reblog?
status_stat = status_stats[item.reblog.id]
item.reblog.status_stat = status_stat if status_stat.present?
else
status_stat = status_stats[item.id]
item.status_stat = status_stat if status_stat.present?
end
end end
end end

View File

@ -12,7 +12,7 @@ class Admin::StatusPolicy < ApplicationPolicy
end end
def show? def show?
role.can?(:manage_reports, :manage_users) && (record.public_visibility? || record.unlisted_visibility? || record.reported?) role.can?(:manage_reports, :manage_users) && (record.public_visibility? || record.unlisted_visibility? || record.reported? || viewable_through_normal_policy?)
end end
def destroy? def destroy?
@ -26,4 +26,10 @@ class Admin::StatusPolicy < ApplicationPolicy
def review? def review?
role.can?(:manage_taxonomies) role.can?(:manage_taxonomies)
end end
private
def viewable_through_normal_policy?
StatusPolicy.new(current_account, record, @preloaded_relations).show?
end
end end

View File

@ -59,6 +59,9 @@ class ActivityPub::ProcessAccountService < BaseService
@account.suspended_at = domain_block.created_at if auto_suspend? @account.suspended_at = domain_block.created_at if auto_suspend?
@account.suspension_origin = :local if auto_suspend? @account.suspension_origin = :local if auto_suspend?
@account.silenced_at = domain_block.created_at if auto_silence? @account.silenced_at = domain_block.created_at if auto_silence?
set_immediate_protocol_attributes!
@account.save @account.save
end end

View File

@ -12,7 +12,9 @@ class TranslateStatusService < BaseService
@content = status_content_format(@status) @content = status_content_format(@status)
@target_language = target_language @target_language = target_language
Rails.cache.fetch("translations/#{@status.language}/#{@target_language}/#{content_hash}", expires_in: CACHE_TTL) { translation_backend.translate(@content, @status.language, @target_language) } Rails.cache.fetch("translations/#{@status.language}/#{@target_language}/#{content_hash}", expires_in: CACHE_TTL) do
Sanitize.fragment(translation_backend.translate(@content, @status.language, @target_language), Sanitize::Config::MASTODON_STRICT)
end
end end
private private

View File

@ -29,7 +29,7 @@
- Trends::PreviewCardProviderFilter::KEYS.each do |key| - Trends::PreviewCardProviderFilter::KEYS.each do |key|
= hidden_field_tag key, params[key] if params[key].present? = hidden_field_tag key, params[key] if params[key].present?
.batch-table.optional .batch-table
.batch-table__toolbar .batch-table__toolbar
%label.batch-table__toolbar__select.batch-checkbox-all %label.batch-table__toolbar__select.batch-checkbox-all
= check_box_tag :batch_checkbox_all, nil, false = check_box_tag :batch_checkbox_all, nil, false

View File

@ -3,7 +3,7 @@
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy # https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy
def host_to_url(str) def host_to_url(str)
"http#{Rails.configuration.x.use_https ? 's' : ''}://#{str}".split('/').first if str.present? "http#{Rails.configuration.x.use_https ? 's' : ''}://#{str.split('/').first}" if str.present?
end end
base_host = Rails.configuration.x.web_domain base_host = Rails.configuration.x.web_domain

View File

@ -3,6 +3,11 @@
require_relative '../../lib/mastodon/sidekiq_middleware' require_relative '../../lib/mastodon/sidekiq_middleware'
Sidekiq.configure_server do |config| Sidekiq.configure_server do |config|
if Rails.configuration.database_configuration.dig('production', 'adapter') == 'postgresql_makara'
STDERR.puts 'ERROR: Database replication is not currently supported in Sidekiq workers. Check your configuration.'
exit 1
end
config.redis = REDIS_SIDEKIQ_PARAMS config.redis = REDIS_SIDEKIQ_PARAMS
config.server_middleware do |chain| config.server_middleware do |chain|

View File

@ -273,7 +273,7 @@ Rails.application.routes.draw do
end end
end end
resources :instances, only: [:index, :show, :destroy], constraints: { id: /[^\/]+/ } do resources :instances, only: [:index, :show, :destroy], constraints: { id: /[^\/]+/ }, format: 'html' do
member do member do
post :clear_delivery_errors post :clear_delivery_errors
post :restart_delivery post :restart_delivery

View File

@ -56,7 +56,7 @@ services:
web: web:
build: . build: .
image: ghcr.io/mastodon/mastodon image: ghcr.io/mastodon/mastodon:v4.0.10
restart: always restart: always
env_file: .env.production env_file: .env.production
command: bash -c "rm -f /mastodon/tmp/pids/server.pid; bundle exec rails s -p 3000" command: bash -c "rm -f /mastodon/tmp/pids/server.pid; bundle exec rails s -p 3000"
@ -77,7 +77,7 @@ services:
streaming: streaming:
build: . build: .
image: ghcr.io/mastodon/mastodon image: ghcr.io/mastodon/mastodon:v4.0.10
restart: always restart: always
env_file: .env.production env_file: .env.production
command: node ./streaming command: node ./streaming
@ -95,7 +95,7 @@ services:
sidekiq: sidekiq:
build: . build: .
image: ghcr.io/mastodon/mastodon image: ghcr.io/mastodon/mastodon:v4.0.10
restart: always restart: always
env_file: .env.production env_file: .env.production
command: bundle exec sidekiq command: bundle exec sidekiq

View File

@ -13,7 +13,7 @@ module Mastodon
end end
def patch def patch
6 10
end end
def flags def flags

View File

@ -37,12 +37,14 @@ module Paperclip
@output_options['f'] = 'image2' @output_options['f'] = 'image2'
@output_options['vframes'] = 1 @output_options['vframes'] = 1
when 'mp4' when 'mp4'
@output_options['acodec'] = 'aac' unless eligible_to_passthrough?(metadata)
@output_options['strict'] = 'experimental' @output_options['acodec'] = 'aac'
@output_options['strict'] = 'experimental'
if high_vfr?(metadata) && !eligible_to_passthrough?(metadata) if high_vfr?(metadata)
@output_options['vsync'] = 'vfr' @output_options['vsync'] = 'vfr'
@output_options['r'] = @vfr_threshold @output_options['r'] = @vfr_threshold
end
end end
end end

View File

@ -40,24 +40,36 @@ describe Admin::StatusesController do
end end
describe 'POST #batch' do describe 'POST #batch' do
before do subject { post :batch, params: { :account_id => account.id, action => '', :admin_status_batch_action => { status_ids: status_ids } } }
post :batch, params: { :account_id => account.id, action => '', :admin_status_batch_action => { status_ids: status_ids } }
end
let(:status_ids) { [media_attached_status.id] } let(:status_ids) { [media_attached_status.id] }
context 'when action is report' do shared_examples 'when action is report' do
let(:action) { 'report' } let(:action) { 'report' }
it 'creates a report' do it 'creates a report' do
subject
report = Report.last report = Report.last
expect(report.target_account_id).to eq account.id expect(report.target_account_id).to eq account.id
expect(report.status_ids).to eq status_ids expect(report.status_ids).to eq status_ids
end end
it 'redirects to report page' do it 'redirects to report page' do
subject
expect(response).to redirect_to(admin_report_path(Report.last.id)) expect(response).to redirect_to(admin_report_path(Report.last.id))
end end
end end
it_behaves_like 'when action is report'
context 'when the moderator is blocked by the author' do
before do
account.block!(user.account)
end
it_behaves_like 'when action is report'
end
end end
end end

View File

@ -5,36 +5,66 @@ require 'rails_helper'
describe Api::V1::Timelines::TagController do describe Api::V1::Timelines::TagController do
render_views render_views
let(:user) { Fabricate(:user) } let(:user) { Fabricate(:user) }
let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id, scopes: 'read:statuses') }
before do before do
allow(controller).to receive(:doorkeeper_token) { token } allow(controller).to receive(:doorkeeper_token) { token }
end end
context 'with a user context' do describe 'GET #show' do
let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id) } subject do
get :show, params: { id: 'test' }
end
describe 'GET #show' do before do
before do PostStatusService.new.call(user.account, text: 'It is a #test')
PostStatusService.new.call(user.account, text: 'It is a #test') end
context 'when the instance allows public preview' do
context 'when the user is not authenticated' do
let(:token) { nil }
it 'returns http success', :aggregate_failures do
subject
expect(response).to have_http_status(200)
expect(response.headers['Link'].links.size).to eq(2)
end
end end
it 'returns http success' do context 'when the user is authenticated' do
get :show, params: { id: 'test' } it 'returns http success', :aggregate_failures do
expect(response).to have_http_status(200) subject
expect(response.headers['Link'].links.size).to eq(2)
expect(response).to have_http_status(200)
expect(response.headers['Link'].links.size).to eq(2)
end
end end
end end
end
context 'without a user context' do context 'when the instance does not allow public preview' do
let(:token) { Fabricate(:accessible_access_token, resource_owner_id: nil) } before do
Form::AdminSettings.new(timeline_preview: false).save
end
describe 'GET #show' do context 'when the user is not authenticated' do
it 'returns http success' do let(:token) { nil }
get :show, params: { id: 'test' }
expect(response).to have_http_status(200) it 'returns http unauthorized' do
expect(response.headers['Link']).to be_nil subject
expect(response).to have_http_status(401)
end
end
context 'when the user is authenticated' do
it 'returns http success', :aggregate_failures do
subject
expect(response).to have_http_status(200)
expect(response.headers['Link'].links.size).to eq(2)
end
end end
end end
end end

View File

@ -13,12 +13,17 @@ RSpec.describe CacheConcern, type: :controller do
def empty_relation def empty_relation
render plain: cache_collection(Status.none, Status).size render plain: cache_collection(Status.none, Status).size
end end
def account_statuses_favourites
render plain: cache_collection(Status.where(account_id: params[:id]), Status).map(&:favourites_count)
end
end end
before do before do
routes.draw do routes.draw do
get 'empty_array' => 'anonymous#empty_array' get 'empty_array' => 'anonymous#empty_array'
post 'empty_relation' => 'anonymous#empty_relation' get 'empty_relation' => 'anonymous#empty_relation'
get 'account_statuses_favourites' => 'anonymous#account_statuses_favourites'
end end
end end
@ -36,5 +41,20 @@ RSpec.describe CacheConcern, type: :controller do
expect(response.body).to eq '0' expect(response.body).to eq '0'
end end
end end
context 'when given a collection of statuses' do
let!(:account) { Fabricate(:account) }
let!(:status) { Fabricate(:status, account: account) }
it 'correctly updates with new interactions' do
get :account_statuses_favourites, params: { id: account.id }
expect(response.body).to eq '[0]'
FavouriteService.new.call(account, status)
get :account_statuses_favourites, params: { id: account.id }
expect(response.body).to eq '[1]'
end
end
end end
end end

View File

@ -37,6 +37,37 @@ RSpec.describe ActivityPub::Activity::Flag do
end end
end end
context 'when the report comment is excessively long' do
subject do
described_class.new({
'@context': 'https://www.w3.org/ns/activitystreams',
id: flag_id,
type: 'Flag',
content: long_comment,
actor: ActivityPub::TagManager.instance.uri_for(sender),
object: [
ActivityPub::TagManager.instance.uri_for(flagged),
ActivityPub::TagManager.instance.uri_for(status),
],
}.with_indifferent_access, sender)
end
let(:long_comment) { Faker::Lorem.characters(number: 6000) }
before do
subject.perform
end
it 'creates a report but with a truncated comment' do
report = Report.find_by(account: sender, target_account: flagged)
expect(report).to_not be_nil
expect(report.comment.length).to eq 5000
expect(report.comment).to eq long_comment[0...5000]
expect(report.status_ids).to eq [status.id]
end
end
context 'when the reported status is private and should not be visible to the remote server' do context 'when the reported status is private and should not be visible to the remote server' do
let(:status) { Fabricate(:status, account: flagged, uri: 'foobar', visibility: :private) } let(:status) { Fabricate(:status, account: flagged, uri: 'foobar', visibility: :private) }

View File

@ -125,10 +125,17 @@ describe Report do
expect(report).to be_valid expect(report).to be_valid
end end
it 'is invalid if comment is longer than 1000 characters' do let(:remote_account) { Fabricate(:account, domain: 'example.com', protocol: :activitypub, inbox_url: 'http://example.com/inbox') }
it 'is invalid if comment is longer than 1000 characters only if reporter is local' do
report = Fabricate.build(:report, comment: Faker::Lorem.characters(number: 1001)) report = Fabricate.build(:report, comment: Faker::Lorem.characters(number: 1001))
report.valid? expect(report.valid?).to be false
expect(report).to model_have_error_on_field(:comment) expect(report).to model_have_error_on_field(:comment)
end end
it 'is valid if comment is longer than 1000 characters and reporter is not local' do
report = Fabricate.build(:report, account: remote_account, comment: Faker::Lorem.characters(number: 1001))
expect(report.valid?).to be true
end
end end
end end

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
require 'rails_helper'
describe 'Content-Security-Policy' do
it 'sets the expected CSP headers' do
allow(SecureRandom).to receive(:base64).with(16).and_return('ZbA+JmE7+bK8F5qvADZHuQ==')
get '/'
expect(response.headers['Content-Security-Policy'].split(';').map(&:strip)).to contain_exactly(
"base-uri 'none'",
"default-src 'none'",
"frame-ancestors 'none'",
"font-src 'self' https://cb6e6126.ngrok.io",
"img-src 'self' https: data: blob: https://cb6e6126.ngrok.io",
"style-src 'self' https://cb6e6126.ngrok.io 'nonce-ZbA+JmE7+bK8F5qvADZHuQ=='",
"media-src 'self' https: data: https://cb6e6126.ngrok.io",
"frame-src 'self' https:",
"manifest-src 'self' https://cb6e6126.ngrok.io",
"child-src 'self' blob: https://cb6e6126.ngrok.io",
"worker-src 'self' blob: https://cb6e6126.ngrok.io",
"connect-src 'self' data: blob: https://cb6e6126.ngrok.io https://cb6e6126.ngrok.io ws://localhost:4000",
"script-src 'self' https://cb6e6126.ngrok.io 'wasm-unsafe-eval'"
)
end
end

View File

@ -4,6 +4,14 @@ RSpec.describe ReportService, type: :service do
subject { described_class.new } subject { described_class.new }
let(:source_account) { Fabricate(:account) } let(:source_account) { Fabricate(:account) }
let(:target_account) { Fabricate(:account) }
context 'with a local account' do
it 'has a uri' do
report = subject.call(source_account, target_account)
expect(report.uri).to_not be_nil
end
end
context 'for a remote account' do context 'for a remote account' do
let(:remote_account) { Fabricate(:account, domain: 'example.com', protocol: :activitypub, inbox_url: 'http://example.com/inbox') } let(:remote_account) { Fabricate(:account, domain: 'example.com', protocol: :activitypub, inbox_url: 'http://example.com/inbox') }

View File

@ -228,9 +228,15 @@ const startWorker = async (workerId) => {
callbacks.forEach(callback => callback(json)); callbacks.forEach(callback => callback(json));
}; };
/**
* @callback SubscriptionListener
* @param {ReturnType<parseJSON>} json of the message
* @returns void
*/
/** /**
* @param {string} channel * @param {string} channel
* @param {function(string): void} callback * @param {SubscriptionListener} callback
*/ */
const subscribe = (channel, callback) => { const subscribe = (channel, callback) => {
log.silly(`Adding listener for ${channel}`); log.silly(`Adding listener for ${channel}`);
@ -247,7 +253,7 @@ const startWorker = async (workerId) => {
/** /**
* @param {string} channel * @param {string} channel
* @param {function(Object<string, any>): void} callback * @param {SubscriptionListener} callback
*/ */
const unsubscribe = (channel, callback) => { const unsubscribe = (channel, callback) => {
log.silly(`Removing listener for ${channel}`); log.silly(`Removing listener for ${channel}`);
@ -625,51 +631,66 @@ const startWorker = async (workerId) => {
* @param {string[]} ids * @param {string[]} ids
* @param {any} req * @param {any} req
* @param {function(string, string): void} output * @param {function(string, string): void} output
* @param {function(string[], function(string): void): void} attachCloseHandler * @param {undefined | function(string[], SubscriptionListener): void} attachCloseHandler
* @param {boolean=} needsFiltering * @param {boolean=} needsFiltering
* @returns {function(object): void} * @returns {SubscriptionListener}
*/ */
const streamFrom = (ids, req, output, attachCloseHandler, needsFiltering = false) => { const streamFrom = (ids, req, output, attachCloseHandler, needsFiltering = false) => {
const accountId = req.accountId || req.remoteAddress; const accountId = req.accountId || req.remoteAddress;
log.verbose(req.requestId, `Starting stream from ${ids.join(', ')} for ${accountId}`); log.verbose(req.requestId, `Starting stream from ${ids.join(', ')} for ${accountId}`);
// Currently message is of type string, soon it'll be Record<string, any> const transmit = (event, payload) => {
// TODO: Replace "string"-based delete payloads with object payloads:
const encodedPayload = typeof payload === 'object' ? JSON.stringify(payload) : payload;
log.silly(req.requestId, `Transmitting for ${accountId}: ${event} ${encodedPayload}`);
output(event, encodedPayload);
};
// The listener used to process each message off the redis subscription,
// message here is an object with an `event` and `payload` property. Some
// events also include a queued_at value, but this is being removed shortly.
/** @type {SubscriptionListener} */
const listener = message => { const listener = message => {
const { event, payload, queued_at } = message; const { event, payload } = message;
const transmit = () => { // Streaming only needs to apply filtering to some channels and only to
const now = new Date().getTime(); // some events. This is because majority of the filtering happens on the
const delta = now - queued_at; // Ruby on Rails side when producing the event for streaming.
const encodedPayload = typeof payload === 'object' ? JSON.stringify(payload) : payload; //
// The only events that require filtering from the streaming server are
log.silly(req.requestId, `Transmitting for ${accountId}: ${event} ${encodedPayload} Delay: ${delta}ms`); // `update` and `status.update`, all other events are transmitted to the
output(event, encodedPayload); // client as soon as they're received (pass-through).
}; //
// The channels that need filtering are determined in the function
// Only messages that may require filtering are statuses, since notifications // `channelNameToIds` defined below:
// are already personalized and deletes do not matter if (!needsFiltering || (event !== 'update' && event !== 'status.update')) {
if (!needsFiltering || event !== 'update') { transmit(event, payload);
transmit();
return; return;
} }
const unpackedPayload = payload; // The rest of the logic from here on in this function is to handle
const targetAccountIds = [unpackedPayload.account.id].concat(unpackedPayload.mentions.map(item => item.id)); // filtering of statuses:
const accountDomain = unpackedPayload.account.acct.split('@')[1];
if (Array.isArray(req.chosenLanguages) && unpackedPayload.language !== null && req.chosenLanguages.indexOf(unpackedPayload.language) === -1) { // Filter based on language:
log.silly(req.requestId, `Message ${unpackedPayload.id} filtered by language (${unpackedPayload.language})`); if (Array.isArray(req.chosenLanguages) && payload.language !== null && req.chosenLanguages.indexOf(payload.language) === -1) {
log.silly(req.requestId, `Message ${payload.id} filtered by language (${payload.language})`);
return; return;
} }
// When the account is not logged in, it is not necessary to confirm the block or mute // When the account is not logged in, it is not necessary to confirm the block or mute
if (!req.accountId) { if (!req.accountId) {
transmit(); transmit(event, payload);
return; return;
} }
pgPool.connect((err, client, done) => { // Filter based on domain blocks, blocks, mutes, or custom filters:
const targetAccountIds = [payload.account.id].concat(payload.mentions.map(item => item.id));
const accountDomain = payload.account.acct.split('@')[1];
// TODO: Move this logic out of the message handling loop
pgPool.connect((err, client, releasePgConnection) => {
if (err) { if (err) {
log.error(err); log.error(err);
return; return;
@ -684,40 +705,57 @@ const startWorker = async (workerId) => {
SELECT 1 SELECT 1
FROM mutes FROM mutes
WHERE account_id = $1 WHERE account_id = $1
AND target_account_id IN (${placeholders(targetAccountIds, 2)})`, [req.accountId, unpackedPayload.account.id].concat(targetAccountIds)), AND target_account_id IN (${placeholders(targetAccountIds, 2)})`, [req.accountId, payload.account.id].concat(targetAccountIds)),
]; ];
if (accountDomain) { if (accountDomain) {
queries.push(client.query('SELECT 1 FROM account_domain_blocks WHERE account_id = $1 AND domain = $2', [req.accountId, accountDomain])); queries.push(client.query('SELECT 1 FROM account_domain_blocks WHERE account_id = $1 AND domain = $2', [req.accountId, accountDomain]));
} }
if (!unpackedPayload.filtered && !req.cachedFilters) { if (!payload.filtered && !req.cachedFilters) {
queries.push(client.query('SELECT filter.id AS id, filter.phrase AS title, filter.context AS context, filter.expires_at AS expires_at, filter.action AS filter_action, keyword.keyword AS keyword, keyword.whole_word AS whole_word FROM custom_filter_keywords keyword JOIN custom_filters filter ON keyword.custom_filter_id = filter.id WHERE filter.account_id = $1 AND (filter.expires_at IS NULL OR filter.expires_at > NOW())', [req.accountId])); queries.push(client.query('SELECT filter.id AS id, filter.phrase AS title, filter.context AS context, filter.expires_at AS expires_at, filter.action AS filter_action, keyword.keyword AS keyword, keyword.whole_word AS whole_word FROM custom_filter_keywords keyword JOIN custom_filters filter ON keyword.custom_filter_id = filter.id WHERE filter.account_id = $1 AND (filter.expires_at IS NULL OR filter.expires_at > NOW())', [req.accountId]));
} }
Promise.all(queries).then(values => { Promise.all(queries).then(values => {
done(); releasePgConnection();
// Handling blocks & mutes and domain blocks: If one of those applies,
// then we don't transmit the payload of the event to the client
if (values[0].rows.length > 0 || (accountDomain && values[1].rows.length > 0)) { if (values[0].rows.length > 0 || (accountDomain && values[1].rows.length > 0)) {
return; return;
} }
if (!unpackedPayload.filtered && !req.cachedFilters) { // If the payload already contains the `filtered` property, it means
// that filtering has been applied on the ruby on rails side, as
// such, we don't need to construct or apply the filters in streaming:
if (Object.prototype.hasOwnProperty.call(payload, "filtered")) {
transmit(event, payload);
return;
}
// Handling for constructing the custom filters and caching them on the request
// TODO: Move this logic out of the message handling lifecycle
if (!req.cachedFilters) {
const filterRows = values[accountDomain ? 2 : 1].rows; const filterRows = values[accountDomain ? 2 : 1].rows;
req.cachedFilters = filterRows.reduce((cache, row) => { req.cachedFilters = filterRows.reduce((cache, filter) => {
if (cache[row.id]) { if (cache[filter.id]) {
cache[row.id].keywords.push([row.keyword, row.whole_word]); cache[filter.id].keywords.push([filter.keyword, filter.whole_word]);
} else { } else {
cache[row.id] = { cache[filter.id] = {
keywords: [[row.keyword, row.whole_word]], keywords: [[filter.keyword, filter.whole_word]],
expires_at: row.expires_at, expires_at: filter.expires_at,
repr: { filter: {
id: row.id, id: filter.id,
title: row.title, title: filter.title,
context: row.context, context: filter.context,
expires_at: row.expires_at, expires_at: filter.expires_at,
filter_action: ['warn', 'hide'][row.filter_action], // filter.filter_action is the value from the
// custom_filters.action database column, it is an integer
// representing a value in an enum defined by Ruby on Rails:
//
// enum { warn: 0, hide: 1 }
filter_action: ['warn', 'hide'][filter.filter_action],
}, },
}; };
} }
@ -725,6 +763,10 @@ const startWorker = async (workerId) => {
return cache; return cache;
}, {}); }, {});
// Construct the regular expressions for the custom filters: This
// needs to be done in a separate loop as the database returns one
// filterRow per keyword, so we need all the keywords before
// constructing the regular expression
Object.keys(req.cachedFilters).forEach((key) => { Object.keys(req.cachedFilters).forEach((key) => {
req.cachedFilters[key].regexp = new RegExp(req.cachedFilters[key].keywords.map(([keyword, whole_word]) => { req.cachedFilters[key].regexp = new RegExp(req.cachedFilters[key].keywords.map(([keyword, whole_word]) => {
let expr = keyword.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');; let expr = keyword.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');;
@ -744,31 +786,58 @@ const startWorker = async (workerId) => {
}); });
} }
// Check filters // Apply cachedFilters against the payload, constructing a
if (req.cachedFilters && !unpackedPayload.filtered) { // `filter_results` array of FilterResult entities
const status = unpackedPayload; if (req.cachedFilters) {
const searchContent = ([status.spoiler_text || '', status.content].concat((status.poll && status.poll.options) ? status.poll.options.map(option => option.title) : [])).concat(status.media_attachments.map(att => att.description)).join('\n\n').replace(/<br\s*\/?>/g, '\n').replace(/<\/p><p>/g, '\n\n'); const status = payload;
const searchIndex = JSDOM.fragment(searchContent).textContent; // TODO: Calculate searchableContent in Ruby on Rails:
const searchableContent = ([status.spoiler_text || '', status.content].concat((status.poll && status.poll.options) ? status.poll.options.map(option => option.title) : [])).concat(status.media_attachments.map(att => att.description)).join('\n\n').replace(/<br\s*\/?>/g, '\n').replace(/<\/p><p>/g, '\n\n');
const searchableTextContent = JSDOM.fragment(searchableContent).textContent;
const now = new Date(); const now = new Date();
payload.filtered = []; const filter_results = Object.values(req.cachedFilters).reduce((results, cachedFilter) => {
Object.values(req.cachedFilters).forEach((cachedFilter) => { // Check the filter hasn't expired before applying:
if ((cachedFilter.expires_at === null || cachedFilter.expires_at > now)) { if (cachedFilter.expires_at !== null && cachedFilter.expires_at < now) {
const keyword_matches = searchIndex.match(cachedFilter.regexp); return results;
if (keyword_matches) {
payload.filtered.push({
filter: cachedFilter.repr,
keyword_matches,
});
}
} }
});
}
transmit(); // Just in-case JSDOM fails to find textContent in searchableContent
if (!searchableTextContent) {
return results;
}
const keyword_matches = searchableTextContent.match(cachedFilter.regexp);
if (keyword_matches) {
// results is an Array of FilterResult; status_matches is always
// null as we only are only applying the keyword-based custom
// filters, not the status-based custom filters.
// https://docs.joinmastodon.org/entities/FilterResult/
results.push({
filter: cachedFilter.filter,
keyword_matches,
status_matches: null
});
}
return results;
}, []);
// Send the payload + the FilterResults as the `filtered` property
// to the streaming connection. To reach this code, the `event` must
// have been either `update` or `status.update`, meaning the
// `payload` is a Status entity, which has a `filtered` property:
//
// filtered: https://docs.joinmastodon.org/entities/Status/#filtered
transmit(event, {
...payload,
filtered: filter_results
});
} else {
transmit(event, payload);
}
}).catch(err => { }).catch(err => {
releasePgConnection();
log.error(err); log.error(err);
done();
}); });
}); });
}; };
@ -777,7 +846,7 @@ const startWorker = async (workerId) => {
subscribe(`${redisPrefix}${id}`, listener); subscribe(`${redisPrefix}${id}`, listener);
}); });
if (attachCloseHandler) { if (typeof attachCloseHandler === 'function') {
attachCloseHandler(ids.map(id => `${redisPrefix}${id}`), listener); attachCloseHandler(ids.map(id => `${redisPrefix}${id}`), listener);
} }
@ -814,12 +883,13 @@ const startWorker = async (workerId) => {
/** /**
* @param {any} req * @param {any} req
* @param {function(): void} [closeHandler] * @param {function(): void} [closeHandler]
* @return {function(string[]): void} * @returns {function(string[], SubscriptionListener): void}
*/ */
const streamHttpEnd = (req, closeHandler = undefined) => (ids) => {
const streamHttpEnd = (req, closeHandler = undefined) => (ids, listener) => {
req.on('close', () => { req.on('close', () => {
ids.forEach(id => { ids.forEach(id => {
unsubscribe(id); unsubscribe(id, listener);
}); });
if (closeHandler) { if (closeHandler) {
@ -1058,7 +1128,7 @@ const startWorker = async (workerId) => {
* @typedef WebSocketSession * @typedef WebSocketSession
* @property {any} socket * @property {any} socket
* @property {any} request * @property {any} request
* @property {Object.<string, { listener: function(string): void, stopHeartbeat: function(): void }>} subscriptions * @property {Object.<string, { listener: SubscriptionListener, stopHeartbeat: function(): void }>} subscriptions
*/ */
/** /**