Compare commits

..

No commits in common. "9fba3359374f683fde59b0d10a164901f2ada86d" and "9c90c4efbd09826e95ed540f231ac6e96212b628" have entirely different histories.

36 changed files with 206 additions and 776 deletions

View file

@ -1,9 +1,14 @@
on:
workflow_call:
inputs:
platforms:
required: true
type: string
cache:
type: boolean
default: true
use_native_arm64_builder:
type: boolean
push_to_images:
type: string
version_prerelease:
@ -17,36 +22,42 @@ on:
labels:
type: string
# This builds multiple images with one runner each, allowing us to build for multiple architectures
# using Github's runners.
# The two-step process is adapted form:
# https://docs.docker.com/build/ci/github-actions/multi-platform/#distribute-build-across-multiple-runners
jobs:
# Build each (amd64 and arm64) image separately
build-image:
runs-on: ${{ startsWith(matrix.platform, 'linux/arm') && 'ubuntu-24.04-arm' || 'ubuntu-24.04' }}
strategy:
fail-fast: false
matrix:
platform:
- linux/amd64
- linux/arm64
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Prepare
env:
PUSH_TO_IMAGES: ${{ inputs.push_to_images }}
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
# Transform multi-line variable into comma-separated variable
image_names=${PUSH_TO_IMAGES//$'\n'/,}
echo "IMAGE_NAMES=${image_names%,}" >> $GITHUB_ENV
- uses: docker/setup-qemu-action@v2
if: contains(inputs.platforms, 'linux/arm64') && !inputs.use_native_arm64_builder
- uses: docker/setup-buildx-action@v2
id: buildx
if: ${{ !(inputs.use_native_arm64_builder && contains(inputs.platforms, 'linux/arm64')) }}
- name: Start a local Docker Builder
if: inputs.use_native_arm64_builder && contains(inputs.platforms, 'linux/arm64')
run: |
docker run --rm -d --name buildkitd -p 1234:1234 --privileged moby/buildkit:latest --addr tcp://0.0.0.0:1234
- uses: docker/setup-buildx-action@v2
id: buildx-native
if: inputs.use_native_arm64_builder && contains(inputs.platforms, 'linux/arm64')
with:
driver: remote
endpoint: tcp://localhost:1234
platforms: linux/amd64
append: |
- endpoint: tcp://${{ vars.DOCKER_BUILDER_HETZNER_ARM64_01_HOST }}:13865
platforms: linux/arm64
name: mastodon-docker-builder-arm64-01
driver-opts:
- servername=mastodon-docker-builder-arm64-01
env:
BUILDER_NODE_1_AUTH_TLS_CACERT: ${{ secrets.DOCKER_BUILDER_HETZNER_ARM64_01_CACERT }}
BUILDER_NODE_1_AUTH_TLS_CERT: ${{ secrets.DOCKER_BUILDER_HETZNER_ARM64_01_CERT }}
BUILDER_NODE_1_AUTH_TLS_KEY: ${{ secrets.DOCKER_BUILDER_HETZNER_ARM64_01_KEY }}
- name: Log in to Docker Hub
if: contains(inputs.push_to_images, 'tootsuite')
@ -63,88 +74,8 @@ jobs:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Docker meta
- uses: docker/metadata-action@v4
id: meta
uses: docker/metadata-action@v5
if: ${{ inputs.push_to_images != '' }}
with:
images: ${{ inputs.push_to_images }}
flavor: ${{ inputs.flavor }}
labels: ${{ inputs.labels }}
- name: Build and push by digest
id: build
uses: docker/build-push-action@v6
with:
context: .
build-args: |
MASTODON_VERSION_PRERELEASE=${{ inputs.version_prerelease }}
MASTODON_VERSION_METADATA=${{ inputs.version_metadata }}
SOURCE_COMMIT=${{ github.sha }}
platforms: ${{ matrix.platform }}
provenance: false
push: ${{ inputs.push_to_images != '' }}
cache-from: ${{ inputs.cache && 'type=gha' || '' }}
cache-to: ${{ inputs.cache && 'type=gha,mode=max' || '' }}
outputs: type=image,"name=${{ env.IMAGE_NAMES }}",push-by-digest=true,name-canonical=true,push=${{ inputs.push_to_images != '' }}
- name: Export digest
if: ${{ inputs.push_to_images != '' }}
run: |
mkdir -p "${{ runner.temp }}/digests"
digest="${{ steps.build.outputs.digest }}"
touch "${{ runner.temp }}/digests/${digest#sha256:}"
- name: Upload digest
if: ${{ inputs.push_to_images != '' }}
uses: actions/upload-artifact@v4
with:
name: digests-${{ env.PLATFORM_PAIR }}
path: ${{ runner.temp }}/digests/*
if-no-files-found: error
retention-days: 1
# Then merge the docker images into a single one
merge-images:
if: ${{ inputs.push_to_images != '' }}
runs-on: ubuntu-24.04
needs:
- build-image
env:
PUSH_TO_IMAGES: ${{ inputs.push_to_images }}
steps:
- uses: actions/checkout@v4
- name: Download digests
uses: actions/download-artifact@v4
with:
path: ${{ runner.temp }}/digests
pattern: digests-*
merge-multiple: true
- name: Log in to Docker Hub
if: contains(inputs.push_to_images, 'tootsuite')
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Log in to the GitHub Container registry
if: contains(inputs.push_to_images, 'ghcr.io')
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
if: ${{ inputs.push_to_images != '' }}
with:
images: ${{ inputs.push_to_images }}
@ -152,14 +83,17 @@ jobs:
tags: ${{ inputs.tags }}
labels: ${{ inputs.labels }}
- name: Create manifest list and push
working-directory: ${{ runner.temp }}/digests
run: |
echo "$PUSH_TO_IMAGES" | xargs -I{} \
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '{}@sha256:%s ' *)
- name: Inspect image
run: |
echo "$PUSH_TO_IMAGES" | xargs -i{} \
docker buildx imagetools inspect {}:${{ steps.meta.outputs.version }}
- uses: docker/build-push-action@v4
with:
context: .
build-args: |
MASTODON_VERSION_PRERELEASE=${{ inputs.version_prerelease }}
MASTODON_VERSION_METADATA=${{ inputs.version_metadata }}
platforms: ${{ inputs.platforms }}
provenance: false
builder: ${{ steps.buildx.outputs.name || steps.buildx-native.outputs.name }}
push: ${{ inputs.push_to_images != '' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: ${{ inputs.cache && 'type=gha' || '' }}
cache-to: ${{ inputs.cache && 'type=gha,mode=max' || '' }}

View file

@ -24,6 +24,8 @@ jobs:
needs: compute-suffix
uses: ./.github/workflows/build-container-image.yml
with:
platforms: linux/amd64,linux/arm64
use_native_arm64_builder: true
cache: false
push_to_images: |
tootsuite/mastodon

View file

@ -29,6 +29,8 @@ jobs:
needs: compute-suffix
uses: ./.github/workflows/build-container-image.yml
with:
platforms: linux/amd64,linux/arm64
use_native_arm64_builder: true
push_to_images: |
ghcr.io/mastodon/mastodon
version_metadata: ${{ needs.compute-suffix.outputs.metadata }}

View file

@ -12,6 +12,8 @@ jobs:
build-image:
uses: ./.github/workflows/build-container-image.yml
with:
platforms: linux/amd64,linux/arm64
use_native_arm64_builder: true
push_to_images: |
tootsuite/mastodon
ghcr.io/mastodon/mastodon

View file

@ -17,3 +17,5 @@ jobs:
cancel-in-progress: true
uses: ./.github/workflows/build-container-image.yml
with:
platforms: linux/amd64 # Testing only on native platform so it is performant

View file

@ -58,7 +58,7 @@ jobs:
run: |-
./bin/rails assets:precompile
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
if: matrix.mode == 'test'
with:
path: |-
@ -118,6 +118,7 @@ jobs:
fail-fast: false
matrix:
ruby-version:
- '3.0'
- '3.1'
- '.ruby-version'
ci_job:
@ -128,7 +129,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
- uses: actions/download-artifact@v3
with:
path: './public'
name: ${{ github.sha }}
@ -196,13 +197,14 @@ jobs:
fail-fast: false
matrix:
ruby-version:
- '3.0'
- '3.1'
- '.ruby-version'
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
- uses: actions/download-artifact@v3
with:
path: './public'
name: ${{ github.sha }}
@ -236,14 +238,14 @@ jobs:
- run: bundle exec rake spec:system
- name: Archive logs
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
if: failure()
with:
name: e2e-logs-${{ matrix.ruby-version }}
path: log/
- name: Archive test screenshots
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
if: failure()
with:
name: e2e-screenshots
@ -308,13 +310,14 @@ jobs:
fail-fast: false
matrix:
ruby-version:
- '3.0'
- '3.1'
- '.ruby-version'
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
- uses: actions/download-artifact@v3
with:
path: './public'
name: ${{ github.sha }}
@ -348,14 +351,14 @@ jobs:
- run: bundle exec rake spec:search
- name: Archive logs
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
if: failure()
with:
name: test-search-logs-${{ matrix.ruby-version }}
path: log/
- name: Archive test screenshots
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
if: failure()
with:
name: test-search-screenshots

View file

@ -2,33 +2,6 @@
All notable changes to this project will be documented in this file.
## [4.2.17] - 2025-02-27
### Security
- Update dependencies
### Removed
- Remove support for Ruby 3.0
## [4.2.16] - 2025-02-27
### Security
- Update dependencies
- Change HTML sanitization to remove unusable and unused `embed` tag (#34021 by @ClearlyClaire, [GHSA-mq2m-hr29-8gqf](https://github.com/mastodon/mastodon/security/advisories/GHSA-mq2m-hr29-8gqf))
- Fix rate-limit on sign-up email verification ([GHSA-v39f-c9jj-8w7h](https://github.com/mastodon/mastodon/security/advisories/GHSA-v39f-c9jj-8w7h))
- Fix improper disclosure of domain blocks to unverified users ([GHSA-94h4-fj37-c825](https://github.com/mastodon/mastodon/security/advisories/GHSA-94h4-fj37-c825))
### Fixed
- Fix emoji rewrite adding unnecessary curft to the DOM for most emoji (#33818 by @ClearlyClaire)
- Fix incorrect signature after HTTP redirect (#33757 and #33769 by @ClearlyClaire)
- Fix polls not being validated on edition (#33755 by @ClearlyClaire)
- Fix featured tags for remote accounts not being kept up to date (#33372, #33406, and #33425 by @ClearlyClaire and @mjankowski)
- Fix exclusive lists interfering with notifications (#28162 by @ShadowJonathan)
## [4.2.15] - 2025-01-16
### Security

View file

@ -1,7 +1,7 @@
# frozen_string_literal: true
source 'https://rubygems.org'
ruby '>= 3.1.0'
ruby '>= 3.0.0'
gem 'puma', '~> 6.3'
gem 'rails', '~> 7.0'
@ -60,7 +60,7 @@ gem 'idn-ruby', require: 'idn'
gem 'kaminari', '~> 1.2'
gem 'link_header', '~> 0.0'
gem 'mime-types', '~> 3.5.0', require: 'mime/types/columnar'
gem 'nokogiri', '~> 1.17'
gem 'nokogiri', '~> 1.15'
gem 'nsa'
gem 'oj', '~> 3.14'
gem 'ox', '~> 2.14'

View file

@ -455,7 +455,7 @@ GEM
uri
net-http-persistent (4.0.2)
connection_pool (~> 2.2)
net-imap (0.3.8)
net-imap (0.3.7)
date
net-protocol
net-ldap (0.18.0)
@ -469,7 +469,7 @@ GEM
net-protocol
net-ssh (7.1.0)
nio4r (2.7.4)
nokogiri (1.18.3)
nokogiri (1.16.8)
mini_portile2 (~> 2.8.2)
racc (~> 1.4)
nsa (0.3.0)
@ -533,7 +533,7 @@ GEM
activesupport (>= 3.0.0)
raabro (1.4.0)
racc (1.8.1)
rack (2.2.11)
rack (2.2.10)
rack-attack (6.7.0)
rack (>= 1.0, < 4)
rack-cors (2.0.2)
@ -769,7 +769,7 @@ GEM
unf_ext
unf_ext (0.0.8.2)
unicode-display_width (2.4.2)
uri (0.12.4)
uri (0.12.2)
validate_email (0.1.6)
activemodel (>= 3.0)
mail (>= 2.2.5)
@ -878,7 +878,7 @@ DEPENDENCIES
mime-types (~> 3.5.0)
net-http (~> 0.3.2)
net-ldap (~> 0.18)
nokogiri (~> 1.17)
nokogiri (~> 1.15)
nsa
oj (~> 3.14)
omniauth (~> 2.0)

View file

@ -15,40 +15,16 @@ class Api::V1::Instances::DomainBlocksController < Api::BaseController
cache_if_unauthenticated!
end
render json: @domain_blocks, each_serializer: REST::DomainBlockSerializer, with_comment: show_rationale_in_response?
render json: @domain_blocks, each_serializer: REST::DomainBlockSerializer, with_comment: (Setting.show_domain_blocks_rationale == 'all' || (Setting.show_domain_blocks_rationale == 'users' && user_signed_in?))
end
private
def require_enabled_api!
head 404 unless api_enabled?
end
def api_enabled?
show_domain_blocks_for_all? || show_domain_blocks_to_user?
end
def show_domain_blocks_for_all?
Setting.show_domain_blocks == 'all'
end
def show_domain_blocks_to_user?
Setting.show_domain_blocks == 'users' && user_signed_in? && current_user.functional_or_moved?
head 404 unless Setting.show_domain_blocks == 'all' || (Setting.show_domain_blocks == 'users' && user_signed_in?)
end
def set_domain_blocks
@domain_blocks = DomainBlock.with_user_facing_limitations.by_severity
end
def show_rationale_in_response?
always_show_rationale? || show_rationale_for_user?
end
def always_show_rationale?
Setting.show_domain_blocks_rationale == 'all'
end
def show_rationale_for_user?
Setting.show_domain_blocks_rationale == 'users' && user_signed_in? && current_user.functional_or_moved?
end
end

View file

@ -154,7 +154,7 @@ module SignatureVerification
def verify_signature_strength!
raise SignatureVerificationError, 'Mastodon requires the Date header or (created) pseudo-header to be signed' unless signed_headers.include?('date') || signed_headers.include?('(created)')
raise SignatureVerificationError, 'Mastodon requires the Digest header or (request-target) pseudo-header to be signed' unless signed_headers.include?(HttpSignatureDraft::REQUEST_TARGET) || signed_headers.include?('digest')
raise SignatureVerificationError, 'Mastodon requires the Digest header or (request-target) pseudo-header to be signed' unless signed_headers.include?(Request::REQUEST_TARGET) || signed_headers.include?('digest')
raise SignatureVerificationError, 'Mastodon requires the Host header to be signed when doing a GET request' if request.get? && !signed_headers.include?('host')
raise SignatureVerificationError, 'Mastodon requires the Digest header to be signed when doing a POST request' if request.post? && !signed_headers.include?('digest')
end
@ -192,14 +192,14 @@ module SignatureVerification
def build_signed_string(include_query_string: true)
signed_headers.map do |signed_header|
case signed_header
when HttpSignatureDraft::REQUEST_TARGET
when Request::REQUEST_TARGET
if include_query_string
"#{HttpSignatureDraft::REQUEST_TARGET}: #{request.method.downcase} #{request.original_fullpath}"
"#{Request::REQUEST_TARGET}: #{request.method.downcase} #{request.original_fullpath}"
else
# Current versions of Mastodon incorrectly omit the query string from the (request-target) pseudo-header.
# Therefore, temporarily support such incorrect signatures for compatibility.
# TODO: remove eventually some time after release of the fixed version
"#{HttpSignatureDraft::REQUEST_TARGET}: #{request.method.downcase} #{request.path}"
"#{Request::REQUEST_TARGET}: #{request.method.downcase} #{request.path}"
end
when '(created)'
raise SignatureVerificationError, 'Invalid pseudo-header (created) for rsa-sha256' unless signature_algorithm == 'hs2019'

View file

@ -112,7 +112,7 @@ const emojifyTextNode = (node, customEmojis) => {
};
const emojifyNode = (node, customEmojis) => {
for (const child of Array.from(node.childNodes)) {
for (const child of node.childNodes) {
switch(child.nodeType) {
case Node.TEXT_NODE:
emojifyTextNode(child, customEmojis);

View file

@ -85,7 +85,6 @@ class ActivityPub::Activity::Create < ActivityPub::Activity
ApplicationRecord.transaction do
@status = Status.create!(@params)
attach_tags(@status)
attach_mentions(@status)
end
resolve_thread(@status)
@ -189,15 +188,6 @@ class ActivityPub::Activity::Create < ActivityPub::Activity
# not a big deal
Trends.tags.register(status)
# Update featured tags
return if @tags.empty? || !status.distributable?
@account.featured_tags.where(tag_id: @tags.pluck(:id)).find_each do |featured_tag|
featured_tag.increment(status.created_at)
end
end
def attach_mentions(status)
@mentions.each do |mention|
mention.status = status
mention.save

View file

@ -32,31 +32,24 @@ class FeedManager
"feed:#{type}:#{id}:#{subtype}"
end
# The filter result of the status to a particular feed
# @param [Symbol] timeline_type
# @param [Status] status
# @param [Account|List] receiver
# @return [void|Symbol] nil, :filter, or :skip_home
def filter(timeline_type, status, receiver)
case timeline_type
when :home
filter_from_home(status, receiver.id, build_crutches(receiver.id, [status]), :home)
when :list
(filter_from_list?(status, receiver) ? :filter : nil) || filter_from_home(status, receiver.account_id, build_crutches(receiver.account_id, [status]), :list)
when :mentions
filter_from_mentions?(status, receiver.id) ? :filter : nil
when :tags
filter_from_tags?(status, receiver.id, build_crutches(receiver.id, [status])) ? :filter : nil
end
end
# Check if the status should not be added to a feed
# @param [Symbol] timeline_type
# @param [Status] status
# @param [Account|List] receiver
# @return [Boolean]
def filter?(timeline_type, status, receiver)
!!filter(timeline_type, status, receiver)
case timeline_type
when :home
filter_from_home?(status, receiver.id, build_crutches(receiver.id, [status]), :home)
when :list
filter_from_list?(status, receiver) || filter_from_home?(status, receiver.account_id, build_crutches(receiver.account_id, [status]), :list)
when :mentions
filter_from_mentions?(status, receiver.id)
when :tags
filter_from_tags?(status, receiver.id, build_crutches(receiver.id, [status]))
else
false
end
end
# Add a status to a home feed and send a streaming API update
@ -132,7 +125,7 @@ class FeedManager
crutches = build_crutches(into_account.id, statuses)
statuses.each do |status|
next if filter_from_home(status, into_account.id, crutches)
next if filter_from_home?(status, into_account.id, crutches)
add_to_feed(:home, into_account.id, status, aggregate_reblogs: aggregate)
end
@ -160,7 +153,7 @@ class FeedManager
crutches = build_crutches(list.account_id, statuses)
statuses.each do |status|
next if filter_from_home(status, list.account_id, crutches) || filter_from_list?(status, list)
next if filter_from_home?(status, list.account_id, crutches) || filter_from_list?(status, list)
add_to_feed(:list, list.id, status, aggregate_reblogs: aggregate)
end
@ -292,7 +285,7 @@ class FeedManager
crutches = build_crutches(account.id, statuses)
statuses.each do |status|
next if filter_from_home(status, account.id, crutches)
next if filter_from_home?(status, account.id, crutches)
add_to_feed(:home, account.id, status, aggregate_reblogs: aggregate)
end
@ -385,12 +378,12 @@ class FeedManager
# @param [Status] status
# @param [Integer] receiver_id
# @param [Hash] crutches
# @return [void|Symbol] nil, :skip_home, or :filter
def filter_from_home(status, receiver_id, crutches, timeline_type = :home)
return if receiver_id == status.account_id
return :filter if status.reply? && (status.in_reply_to_id.nil? || status.in_reply_to_account_id.nil?)
return :skip_home if timeline_type != :list && crutches[:exclusive_list_users][status.account_id].present?
return :filter if crutches[:languages][status.account_id].present? && status.language.present? && !crutches[:languages][status.account_id].include?(status.language)
# @return [Boolean]
def filter_from_home?(status, receiver_id, crutches, timeline_type = :home)
return false if receiver_id == status.account_id
return true if status.reply? && (status.in_reply_to_id.nil? || status.in_reply_to_account_id.nil?)
return true if timeline_type != :list && crutches[:exclusive_list_users][status.account_id].present?
return true if crutches[:languages][status.account_id].present? && status.language.present? && !crutches[:languages][status.account_id].include?(status.language)
check_for_blocks = crutches[:active_mentions][status.id] || []
check_for_blocks.push(status.account_id)
@ -400,22 +393,24 @@ class FeedManager
check_for_blocks.concat(crutches[:active_mentions][status.reblog_of_id] || [])
end
return :filter if check_for_blocks.any? { |target_account_id| crutches[:blocking][target_account_id] || crutches[:muting][target_account_id] }
return :filter if crutches[:blocked_by][status.account_id]
return true if check_for_blocks.any? { |target_account_id| crutches[:blocking][target_account_id] || crutches[:muting][target_account_id] }
return true if crutches[:blocked_by][status.account_id]
if status.reply? && !status.in_reply_to_account_id.nil? # Filter out if it's a reply
should_filter = !crutches[:following][status.in_reply_to_account_id] # and I'm not following the person it's a reply to
should_filter &&= receiver_id != status.in_reply_to_account_id # and it's not a reply to me
should_filter &&= status.account_id != status.in_reply_to_account_id # and it's not a self-reply
return !!should_filter
elsif status.reblog? # Filter out a reblog
should_filter = crutches[:hiding_reblogs][status.account_id] # if the reblogger's reblogs are suppressed
should_filter ||= crutches[:blocked_by][status.reblog.account_id] # or if the author of the reblogged status is blocking me
should_filter ||= crutches[:domain_blocking][status.reblog.account.domain] # or the author's domain is blocked
else
should_filter = false
return !!should_filter
end
should_filter ? :filter : nil
false
end
# Check if status should not be added to the mentions feed

View file

@ -1,31 +0,0 @@
# frozen_string_literal: true
# This implements an older draft of HTTP Signatures:
# https://datatracker.ietf.org/doc/html/draft-cavage-http-signatures
class HttpSignatureDraft
REQUEST_TARGET = '(request-target)'
def initialize(keypair, key_id, full_path: true)
@keypair = keypair
@key_id = key_id
@full_path = full_path
end
def request_target(verb, url)
if url.query.nil? || !@full_path
"#{verb} #{url.path}"
else
"#{verb} #{url.path}?#{url.query}"
end
end
def sign(signed_headers, verb, url)
signed_headers = signed_headers.merge(REQUEST_TARGET => request_target(verb, url))
signed_string = signed_headers.map { |key, value| "#{key.downcase}: #{value}" }.join("\n")
algorithm = 'rsa-sha256'
signature = Base64.strict_encode64(@keypair.sign(OpenSSL::Digest.new('SHA256'), signed_string))
"keyId=\"#{@key_id}\",algorithm=\"#{algorithm}\",headers=\"#{signed_headers.keys.join(' ').downcase}\",signature=\"#{signature}\""
end
end

View file

@ -61,6 +61,8 @@ class PerOperationWithDeadline < HTTP::Timeout::PerOperation
end
class Request
REQUEST_TARGET = '(request-target)'
# We enforce a 5s timeout on DNS resolving, 5s timeout on socket opening
# and 5s timeout on the TLS handshake, meaning the worst case should take
# about 15s in total
@ -76,21 +78,11 @@ class Request
@http_client = options.delete(:http_client)
@allow_local = options.delete(:allow_local)
@full_path = !options.delete(:omit_query_string)
@options = {
follow: {
max_hops: 3,
on_redirect: ->(response, request) { re_sign_on_redirect(response, request) },
},
}.merge(options).merge(
socket_class: use_proxy? || @allow_local ? ProxySocket : Socket,
timeout_class: PerOperationWithDeadline,
timeout_options: TIMEOUT
)
@options = options.merge(socket_class: use_proxy? || @allow_local ? ProxySocket : Socket)
@options = @options.merge(timeout_class: PerOperationWithDeadline, timeout_options: TIMEOUT)
@options = @options.merge(proxy_url) if use_proxy?
@headers = {}
@signing = nil
raise Mastodon::HostValidationError, 'Instance does not support hidden service connections' if block_hidden_service?
set_common_headers!
@ -100,9 +92,8 @@ class Request
def on_behalf_of(actor, sign_with: nil)
raise ArgumentError, 'actor must not be nil' if actor.nil?
key_id = ActivityPub::TagManager.instance.key_uri_for(actor)
keypair = sign_with.present? ? OpenSSL::PKey::RSA.new(sign_with) : actor.keypair
@signing = HttpSignatureDraft.new(keypair, key_id, full_path: @full_path)
@actor = actor
@keypair = sign_with.present? ? OpenSSL::PKey::RSA.new(sign_with) : @actor.keypair
self
end
@ -134,7 +125,7 @@ class Request
end
def headers
(@signing ? @headers.merge('Signature' => signature) : @headers)
(@actor ? @headers.merge('Signature' => signature) : @headers).without(REQUEST_TARGET)
end
class << self
@ -149,13 +140,14 @@ class Request
end
def http_client
HTTP.use(:auto_inflate)
HTTP.use(:auto_inflate).follow(max_hops: 3)
end
end
private
def set_common_headers!
@headers[REQUEST_TARGET] = request_target
@headers['User-Agent'] = Mastodon::Version.user_agent
@headers['Host'] = @url.host
@headers['Date'] = Time.now.utc.httpdate
@ -166,28 +158,31 @@ class Request
@headers['Digest'] = "SHA-256=#{Digest::SHA256.base64digest(@options[:body])}"
end
def request_target
if @url.query.nil? || !@full_path
"#{@verb} #{@url.path}"
else
"#{@verb} #{@url.path}?#{@url.query}"
end
end
def signature
@signing.sign(@headers.without('User-Agent', 'Accept-Encoding'), @verb, @url)
algorithm = 'rsa-sha256'
signature = Base64.strict_encode64(@keypair.sign(OpenSSL::Digest.new('SHA256'), signed_string))
"keyId=\"#{key_id}\",algorithm=\"#{algorithm}\",headers=\"#{signed_headers.keys.join(' ').downcase}\",signature=\"#{signature}\""
end
def re_sign_on_redirect(_response, request)
# Delete existing signature if there is one, since it will be invalid
request.headers.delete('Signature')
return unless @signing.present? && @verb == :get
signed_headers = request.headers.to_h.slice(*@headers.keys)
unless @headers.keys.all? { |key| signed_headers.key?(key) }
# We have lost some headers in the process, so don't sign the new
# request, in order to avoid issuing a valid signature with fewer
# conditions than expected.
Rails.logger.warn { "Some headers (#{@headers.keys - signed_headers.keys}) have been lost on redirect from {@uri} to #{request.uri}, this should not happen. Skipping signatures" }
return
def signed_string
signed_headers.map { |key, value| "#{key.downcase}: #{value}" }.join("\n")
end
signature_value = @signing.sign(signed_headers.without('User-Agent', 'Accept-Encoding'), @verb, Addressable::URI.parse(request.uri))
request.headers['Signature'] = signature_value
def signed_headers
@headers.without('User-Agent', 'Accept-Encoding')
end
def key_id
ActivityPub::TagManager.instance.key_uri_for(@actor)
end
def http_client

View file

@ -34,8 +34,7 @@ class Poll < ApplicationRecord
validates :options, presence: true
validates :expires_at, presence: true, if: :local?
validates_with PollOptionsValidator, if: :local?
validates_with PollExpirationValidator, if: -> { local? && expires_at_changed? }
validates_with PollValidator, on: :create, if: :local?
scope :attached, -> { where.not(status_id: nil) }
scope :unattached, -> { where(status_id: nil) }

View file

@ -72,10 +72,10 @@ class REST::InstanceSerializer < ActiveModel::Serializer
},
polls: {
max_options: PollOptionsValidator::MAX_OPTIONS,
max_characters_per_option: PollOptionsValidator::MAX_OPTION_CHARS,
min_expiration: PollExpirationValidator::MIN_EXPIRATION,
max_expiration: PollExpirationValidator::MAX_EXPIRATION,
max_options: PollValidator::MAX_OPTIONS,
max_characters_per_option: PollValidator::MAX_OPTION_CHARS,
min_expiration: PollValidator::MIN_EXPIRATION,
max_expiration: PollValidator::MAX_EXPIRATION,
},
translation: {

View file

@ -78,10 +78,10 @@ class REST::V1::InstanceSerializer < ActiveModel::Serializer
},
polls: {
max_options: PollOptionsValidator::MAX_OPTIONS,
max_characters_per_option: PollOptionsValidator::MAX_OPTION_CHARS,
min_expiration: PollExpirationValidator::MIN_EXPIRATION,
max_expiration: PollExpirationValidator::MAX_EXPIRATION,
max_options: PollValidator::MAX_OPTIONS,
max_characters_per_option: PollValidator::MAX_OPTION_CHARS,
min_expiration: PollValidator::MIN_EXPIRATION,
max_expiration: PollValidator::MAX_EXPIRATION,
},
}
end

View file

@ -186,26 +186,7 @@ class ActivityPub::ProcessStatusUpdateService < BaseService
end
def update_tags!
previous_tags = @status.tags.to_a
current_tags = @status.tags = Tag.find_or_create_by_names(@raw_tags)
return unless @status.distributable?
added_tags = current_tags - previous_tags
unless added_tags.empty?
@account.featured_tags.where(tag_id: added_tags.pluck(:id)).find_each do |featured_tag|
featured_tag.increment(@status.created_at)
end
end
removed_tags = previous_tags - current_tags
return if removed_tags.empty?
@account.featured_tags.where(tag_id: removed_tags.pluck(:id)).find_each do |featured_tag|
featured_tag.decrement(@status)
end
@status.tags = Tag.find_or_create_by_names(@raw_tags)
end
def update_mentions!

View file

@ -1,13 +0,0 @@
# frozen_string_literal: true
class PollExpirationValidator < ActiveModel::Validator
MAX_EXPIRATION = 1.month.freeze
MIN_EXPIRATION = 5.minutes.freeze
def validate(poll)
current_time = Time.now.utc
poll.errors.add(:expires_at, I18n.t('polls.errors.duration_too_long')) if poll.expires_at.nil? || poll.expires_at - current_time > MAX_EXPIRATION
poll.errors.add(:expires_at, I18n.t('polls.errors.duration_too_short')) if poll.expires_at.present? && (poll.expires_at - current_time).ceil < MIN_EXPIRATION
end
end

View file

@ -1,13 +1,19 @@
# frozen_string_literal: true
class PollOptionsValidator < ActiveModel::Validator
class PollValidator < ActiveModel::Validator
MAX_OPTIONS = 25
MAX_OPTION_CHARS = 150
MAX_EXPIRATION = 1.month.freeze
MIN_EXPIRATION = 5.minutes.freeze
def validate(poll)
current_time = Time.now.utc
poll.errors.add(:options, I18n.t('polls.errors.too_few_options')) unless poll.options.size > 1
poll.errors.add(:options, I18n.t('polls.errors.too_many_options', max: MAX_OPTIONS)) if poll.options.size > MAX_OPTIONS
poll.errors.add(:options, I18n.t('polls.errors.over_character_limit', max: MAX_OPTION_CHARS)) if poll.options.any? { |option| option.mb_chars.grapheme_length > MAX_OPTION_CHARS }
poll.errors.add(:options, I18n.t('polls.errors.duplicate_options')) unless poll.options.uniq.size == poll.options.size
poll.errors.add(:expires_at, I18n.t('polls.errors.duration_too_long')) if poll.expires_at.nil? || poll.expires_at - current_time > MAX_EXPIRATION
poll.errors.add(:expires_at, I18n.t('polls.errors.duration_too_short')) if poll.expires_at.present? && (poll.expires_at - current_time).ceil < MIN_EXPIRATION
end
end

View file

@ -29,31 +29,27 @@ class FeedInsertWorker
private
def check_and_insert
filter_result = feed_filter
if filter_result
if feed_filtered?
perform_unpush if update?
else
perform_push
perform_notify if notify?
end
end
perform_notify if notify?(filter_result)
end
def feed_filter
def feed_filtered?
case @type
when :home
FeedManager.instance.filter(:home, @status, @follower)
FeedManager.instance.filter?(:home, @status, @follower)
when :tags
FeedManager.instance.filter(:tags, @status, @follower)
FeedManager.instance.filter?(:tags, @status, @follower)
when :list
FeedManager.instance.filter(:list, @status, @list)
FeedManager.instance.filter?(:list, @status, @list)
end
end
def notify?(filter_result)
return false if @type != :home || @status.reblog? || (@status.reply? && @status.in_reply_to_account_id != @status.account_id) ||
filter_result == :filter
def notify?
return false if @type != :home || @status.reblog? || (@status.reply? && @status.in_reply_to_account_id != @status.account_id)
Follow.find_by(account: @follower, target_account: @status.account)&.notify?
end

View file

@ -122,7 +122,7 @@ class Rack::Attack
end
throttle('throttle_email_confirmations/ip', limit: 25, period: 5.minutes) do |req|
req.throttleable_remote_ip if (req.post? && (req.path_matches?('/auth/confirmation') || req.path == '/api/v1/emails/confirmations')) || ((req.put? || req.patch?) && req.path_matches?('/auth/setup'))
req.throttleable_remote_ip if req.post? && (req.path_matches?('/auth/confirmation') || req.path == '/api/v1/emails/confirmations')
end
throttle('throttle_email_confirmations/email', limit: 5, period: 30.minutes) do |req|
@ -133,14 +133,6 @@ class Rack::Attack
end
end
throttle('throttle_auth_setup/email', limit: 5, period: 10.minutes) do |req|
req.params.dig('user', 'email').presence if (req.put? || req.patch?) && req.path_matches?('/auth/setup')
end
throttle('throttle_auth_setup/account', limit: 5, period: 10.minutes) do |req|
req.warden_user_id if (req.put? || req.patch?) && req.path_matches?('/auth/setup')
end
throttle('throttle_login_attempts/ip', limit: 25, period: 5.minutes) do |req|
req.throttleable_remote_ip if req.post? && req.path_matches?('/auth/sign_in')
end

View file

@ -56,7 +56,7 @@ services:
web:
build: .
image: ghcr.io/mastodon/mastodon:v4.2.17
image: ghcr.io/mastodon/mastodon:v4.2.15
restart: always
env_file: .env.production
command: bash -c "rm -f /mastodon/tmp/pids/server.pid; bundle exec rails s -p 3000"
@ -77,7 +77,7 @@ services:
streaming:
build: .
image: ghcr.io/mastodon/mastodon:v4.2.17
image: ghcr.io/mastodon/mastodon:v4.2.15
restart: always
env_file: .env.production
command: node ./streaming
@ -95,7 +95,7 @@ services:
sidekiq:
build: .
image: ghcr.io/mastodon/mastodon:v4.2.17
image: ghcr.io/mastodon/mastodon:v4.2.15
restart: always
env_file: .env.production
command: bundle exec sidekiq

View file

@ -13,7 +13,7 @@ module Mastodon
end
def patch
17
15
end
def default_prerelease

View file

@ -91,17 +91,19 @@ class Sanitize
]
)
MASTODON_OEMBED = freeze_config(
elements: %w(audio iframe source video),
MASTODON_OEMBED ||= freeze_config(
elements: %w(audio embed iframe source video),
attributes: {
'audio' => %w(controls),
'embed' => %w(height src type width),
'iframe' => %w(allowfullscreen frameborder height scrolling src width),
'source' => %w(src type),
'video' => %w(controls height loop width),
},
protocols: {
'embed' => { 'src' => HTTP_PROTOCOLS },
'iframe' => { 'src' => HTTP_PROTOCOLS },
'source' => { 'src' => HTTP_PROTOCOLS },
},

View file

@ -130,6 +130,10 @@ RSpec.describe ActivityPub::Activity::Create do
context 'when fetching' do
subject { described_class.new(json, sender) }
before do
subject.perform
end
context 'when object publication date is below ISO8601 range' do
let(:object_json) do
{
@ -141,8 +145,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status with a valid creation date', :aggregate_failures do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
@ -163,8 +165,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status with a valid creation date', :aggregate_failures do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
@ -186,8 +186,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status with appropriate creation and edition dates', :aggregate_failures do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
@ -211,13 +209,17 @@ RSpec.describe ActivityPub::Activity::Create do
}
end
it 'creates status and does not mark it as edited' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
it 'creates status' do
status = sender.statuses.first
expect(status).to_not be_nil
expect(status.text).to eq 'Lorem ipsum'
end
it 'does not mark status as edited' do
status = sender.statuses.first
expect(status).to_not be_nil
expect(status.edited?).to be false
end
end
@ -232,7 +234,7 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'does not create a status' do
expect { subject.perform }.to_not change(sender.statuses, :count)
expect(sender.statuses.count).to be_zero
end
end
@ -246,8 +248,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
@ -255,8 +255,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'missing to/cc defaults to direct privacy' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
@ -275,8 +273,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
@ -295,8 +291,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
@ -315,8 +309,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
@ -335,8 +327,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
@ -355,8 +345,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
@ -375,8 +363,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
@ -395,8 +381,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
@ -419,8 +403,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
@ -440,13 +422,15 @@ RSpec.describe ActivityPub::Activity::Create do
}
end
it 'creates status with a silent mention' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
it 'creates status' do
status = sender.statuses.first
expect(status).to_not be_nil
expect(status.visibility).to eq 'limited'
end
it 'creates silent mention' do
status = sender.statuses.first
expect(status.mentions.first).to be_silent
end
end
@ -468,8 +452,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
@ -490,8 +472,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
@ -520,8 +500,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
@ -544,8 +522,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
end
@ -568,8 +544,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
@ -595,8 +569,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
@ -622,8 +594,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
@ -649,8 +619,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
@ -674,8 +642,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
end
@ -698,42 +664,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
expect(status.tags.map(&:name)).to include('test')
end
end
context 'with featured hashtags' do
let(:object_json) do
{
id: [ActivityPub::TagManager.instance.uri_for(sender), '#bar'].join,
type: 'Note',
content: 'Lorem ipsum',
to: 'https://www.w3.org/ns/activitystreams#Public',
tag: [
{
type: 'Hashtag',
href: 'http://example.com/blah',
name: '#test',
},
],
}
end
before do
sender.featured_tags.create!(name: 'test')
end
it 'creates status and updates featured tag' do
expect { subject.perform }
.to change(sender.statuses, :count).by(1)
.and change { sender.featured_tags.first.reload.statuses_count }.by(1)
.and change { sender.featured_tags.first.reload.last_status_at }.from(nil).to(be_present)
status = sender.statuses.first
expect(status).to_not be_nil
@ -757,8 +687,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
end
@ -781,8 +709,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
end
@ -807,8 +733,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
@ -835,8 +759,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
@ -862,8 +784,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
end
@ -885,8 +805,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'creates status' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
status = sender.statuses.first
expect(status).to_not be_nil
end
@ -917,13 +835,13 @@ RSpec.describe ActivityPub::Activity::Create do
}
end
it 'creates status with a poll' do
expect { subject.perform }.to change(sender.statuses, :count).by(1)
it 'creates status' do
status = sender.statuses.first
expect(status).to_not be_nil
expect(status.poll).to_not be_nil
end
it 'creates a poll' do
poll = sender.polls.first
expect(poll).to_not be_nil
expect(poll.status).to_not be_nil
@ -946,8 +864,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'adds a vote to the poll with correct uri' do
expect { subject.perform }.to change(poll.votes, :count).by(1)
vote = poll.votes.first
expect(vote).to_not be_nil
expect(vote.uri).to eq object_json[:id]
@ -973,8 +889,6 @@ RSpec.describe ActivityPub::Activity::Create do
end
it 'does not add a vote to the poll' do
expect { subject.perform }.to_not change(poll.votes, :count)
expect(poll.votes.first).to be_nil
end
end

View file

@ -13,13 +13,10 @@ RSpec.describe ActivityPub::LinkedDataSignature do
{
'@context' => 'https://www.w3.org/ns/activitystreams',
'id' => 'http://example.com/hello-world',
'type' => 'Note',
'content' => 'Hello world',
}
end
let(:signed_json) { raw_json.merge('signature' => signature) }
let(:json) { signed_json }
let(:json) { raw_json.merge('signature' => signature) }
before do
stub_jsonld_contexts!
@ -97,54 +94,6 @@ RSpec.describe ActivityPub::LinkedDataSignature do
expect(subject.verify_actor!).to be_nil
end
end
context 'when an attribute has been removed from the document' do
let(:signature) { raw_signature.merge('type' => 'RsaSignature2017', 'signatureValue' => sign(sender, raw_signature, raw_json)) }
let(:json) { signed_json.without('content') }
let(:raw_signature) do
{
'creator' => 'http://example.com/alice',
'created' => '2017-09-23T20:21:34Z',
}
end
it 'returns nil' do
expect(subject.verify_actor!).to be_nil
end
end
context 'when an attribute has been added to the document' do
let(:signature) { raw_signature.merge('type' => 'RsaSignature2017', 'signatureValue' => sign(sender, raw_signature, raw_json)) }
let(:json) { signed_json.merge('attributedTo' => 'http://example.com/bob') }
let(:raw_signature) do
{
'creator' => 'http://example.com/alice',
'created' => '2017-09-23T20:21:34Z',
}
end
it 'returns nil' do
expect(subject.verify_actor!).to be_nil
end
end
context 'when an existing attribute has been changed' do
let(:signature) { raw_signature.merge('type' => 'RsaSignature2017', 'signatureValue' => sign(sender, raw_signature, raw_json)) }
let(:json) { signed_json.merge('content' => 'oops') }
let(:raw_signature) do
{
'creator' => 'http://example.com/alice',
'created' => '2017-09-23T20:21:34Z',
}
end
it 'returns nil' do
expect(subject.verify_actor!).to be_nil
end
end
end
describe '#sign!' do

View file

@ -162,7 +162,6 @@ RSpec.describe FeedManager do
allow(List).to receive(:where).and_return(list)
status = Fabricate(:status, text: 'I post a lot', account: bob)
expect(described_class.instance.filter?(:home, status, alice)).to be true
expect(described_class.instance.filter(:home, status, alice)).to be :skip_home
end
it 'returns true for reblog from followee on exclusive list' do
@ -173,7 +172,6 @@ RSpec.describe FeedManager do
status = Fabricate(:status, text: 'I post a lot', account: bob)
reblog = Fabricate(:status, reblog: status, account: jeff)
expect(described_class.instance.filter?(:home, reblog, alice)).to be true
expect(described_class.instance.filter(:home, reblog, alice)).to be :skip_home
end
it 'returns false for post from followee on non-exclusive list' do

View file

@ -58,13 +58,14 @@ describe Request do
expect(a_request(:get, 'http://example.com')).to have_been_made.once
end
it 'makes a request with expected headers, yields, and closes the underlying connection' do
allow(subject.send(:http_client)).to receive(:close)
it 'sets headers' do
expect { |block| subject.perform(&block) }.to yield_control
expect(a_request(:get, 'http://example.com').with(headers: subject.headers)).to have_been_made
expect(subject.send(:http_client)).to have_received(:close)
end
it 'closes underlying connection' do
expect_any_instance_of(HTTP::Client).to receive(:close)
expect { |block| subject.perform(&block) }.to yield_control
end
it 'returns response which implements body_with_limit' do
@ -74,29 +75,6 @@ describe Request do
end
end
context 'with a redirect and HTTP signatures' do
let(:account) { Fabricate(:account) }
before do
stub_request(:get, 'http://example.com').to_return(status: 301, headers: { Location: 'http://redirected.example.com/foo' })
stub_request(:get, 'http://redirected.example.com/foo').to_return(body: 'lorem ipsum')
end
it 'makes a request with expected headers and follows redirects' do
expect { |block| subject.on_behalf_of(account).perform(&block) }.to yield_control
# request.headers includes the `Signature` sent for the first request
expect(a_request(:get, 'http://example.com').with(headers: subject.headers)).to have_been_made.once
# request.headers includes the `Signature`, but it has changed
expect(a_request(:get, 'http://redirected.example.com/foo').with(headers: subject.headers.merge({ 'Host' => 'redirected.example.com' }))).to_not have_been_made
# `with(headers: )` matching tests for inclusion, so strip `Signature`
# This doesn't actually test that there is a signature, but it tests that the original signature is not passed
expect(a_request(:get, 'http://redirected.example.com/foo').with(headers: subject.headers.without('Signature').merge({ 'Host' => 'redirected.example.com' }))).to have_been_made.once
end
end
context 'with private host' do
around do |example|
WebMock.disable!

View file

@ -1,137 +0,0 @@
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe 'Domain Blocks' do
describe 'GET /api/v1/instance/domain_blocks' do
let(:user) { Fabricate(:user) }
let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id).token }
before { Fabricate(:domain_block) }
context 'with domain blocks set to all' do
before { Setting.show_domain_blocks = 'all' }
it 'returns http success' do
get api_v1_instance_domain_blocks_path
expect(response)
.to have_http_status(200)
expect(response.content_type)
.to start_with('application/json')
expect(response.parsed_body)
.to be_present
.and(be_an(Array))
.and(have_attributes(size: 1))
end
end
context 'with domain blocks set to users' do
before { Setting.show_domain_blocks = 'users' }
context 'without authentication token' do
it 'returns http not found' do
get api_v1_instance_domain_blocks_path
expect(response)
.to have_http_status(404)
end
end
context 'with authentication token' do
context 'with unapproved user' do
before { user.update(approved: false) }
it 'returns http not found' do
get api_v1_instance_domain_blocks_path, headers: { 'Authorization' => "Bearer #{token}" }
expect(response)
.to have_http_status(404)
end
end
context 'with unconfirmed user' do
before { user.update(confirmed_at: nil) }
it 'returns http not found' do
get api_v1_instance_domain_blocks_path, headers: { 'Authorization' => "Bearer #{token}" }
expect(response)
.to have_http_status(404)
end
end
context 'with disabled user' do
before { user.update(disabled: true) }
it 'returns http not found' do
get api_v1_instance_domain_blocks_path, headers: { 'Authorization' => "Bearer #{token}" }
expect(response)
.to have_http_status(404)
end
end
context 'with suspended user' do
before { user.account.update(suspended_at: Time.zone.now) }
it 'returns http not found' do
get api_v1_instance_domain_blocks_path, headers: { 'Authorization' => "Bearer #{token}" }
expect(response)
.to have_http_status(403)
end
end
context 'with moved user' do
before { user.account.update(moved_to_account_id: Fabricate(:account).id) }
it 'returns http success' do
get api_v1_instance_domain_blocks_path, headers: { 'Authorization' => "Bearer #{token}" }
expect(response)
.to have_http_status(200)
expect(response.content_type)
.to start_with('application/json')
expect(response.parsed_body)
.to be_present
.and(be_an(Array))
.and(have_attributes(size: 1))
end
end
context 'with normal user' do
it 'returns http success' do
get api_v1_instance_domain_blocks_path, headers: { 'Authorization' => "Bearer #{token}" }
expect(response)
.to have_http_status(200)
expect(response.content_type)
.to start_with('application/json')
expect(response.parsed_body)
.to be_present
.and(be_an(Array))
.and(have_attributes(size: 1))
end
end
end
end
context 'with domain blocks set to disabled' do
before { Setting.show_domain_blocks = 'disabled' }
it 'returns http not found' do
get api_v1_instance_domain_blocks_path
expect(response)
.to have_http_status(404)
end
end
end
end

View file

@ -292,22 +292,16 @@ RSpec.describe ActivityPub::ProcessStatusUpdateService, type: :service do
updated: '2021-09-08T22:39:25Z',
tag: [
{ type: 'Hashtag', name: 'foo' },
{ type: 'Hashtag', name: 'bar' },
],
}
end
before do
status.account.featured_tags.create!(name: 'bar')
status.account.featured_tags.create!(name: 'test')
subject.call(status, json, json)
end
it 'updates tags and featured tags' do
expect { subject.call(status, json, json) }
.to change { status.tags.reload.pluck(:name) }.from(%w(test foo)).to(%w(foo bar))
.and change { status.account.featured_tags.find_by(name: 'test').statuses_count }.by(-1)
.and change { status.account.featured_tags.find_by(name: 'bar').statuses_count }.by(1)
.and change { status.account.featured_tags.find_by(name: 'bar').last_status_at }.from(nil).to(be_present)
it 'updates tags' do
expect(status.tags.reload.map(&:name)).to eq %w(foo)
end
end

View file

@ -1,45 +0,0 @@
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe PollOptionsValidator do
describe '#validate' do
before do
validator.validate(poll)
end
let(:validator) { described_class.new }
let(:poll) { instance_double(Poll, options: options, expires_at: expires_at, errors: errors) }
let(:errors) { instance_double(ActiveModel::Errors, add: nil) }
let(:options) { %w(foo bar) }
let(:expires_at) { 1.day.from_now }
it 'has no errors' do
expect(errors).to_not have_received(:add)
end
context 'when the poll has duplicate options' do
let(:options) { %w(foo foo) }
it 'adds errors' do
expect(errors).to have_received(:add)
end
end
context 'when the poll has no options' do
let(:options) { [] }
it 'adds errors' do
expect(errors).to have_received(:add)
end
end
context 'when the poll has too many options' do
let(:options) { Array.new(described_class::MAX_OPTIONS + 1) { |i| "option #{i}" } }
it 'adds errors' do
expect(errors).to have_received(:add)
end
end
end
end

View file

@ -2,7 +2,7 @@
require 'rails_helper'
RSpec.describe PollExpirationValidator, type: :validator do
RSpec.describe PollValidator, type: :validator do
describe '#validate' do
before do
validator.validate(poll)
@ -14,24 +14,16 @@ RSpec.describe PollExpirationValidator, type: :validator do
let(:options) { %w(foo bar) }
let(:expires_at) { 1.day.from_now }
it 'has no errors' do
it 'have no errors' do
expect(errors).to_not have_received(:add)
end
context 'when the poll expires in 5 min from now' do
context 'when expires is just 5 min ago' do
let(:expires_at) { 5.minutes.from_now }
it 'has no errors' do
it 'not calls errors add' do
expect(errors).to_not have_received(:add)
end
end
context 'when the poll expires in the past' do
let(:expires_at) { 5.minutes.ago }
it 'has errors' do
expect(errors).to have_received(:add)
end
end
end
end

View file

@ -8,7 +8,6 @@ describe FeedInsertWorker do
describe 'perform' do
let(:follower) { Fabricate(:account) }
let(:status) { Fabricate(:status) }
let(:list) { Fabricate(:list) }
context 'when there are no records' do
it 'skips push with missing status' do
@ -32,7 +31,7 @@ describe FeedInsertWorker do
context 'when there are real records' do
it 'skips the push when there is a filter' do
instance = instance_double(FeedManager, push_to_home: nil, filter?: true, filter: :filter)
instance = instance_double(FeedManager, push_to_home: nil, filter?: true)
allow(FeedManager).to receive(:instance).and_return(instance)
result = subject.perform(status.id, follower.id)
@ -41,31 +40,13 @@ describe FeedInsertWorker do
end
it 'pushes the status onto the home timeline without filter' do
instance = instance_double(FeedManager, push_to_home: nil, filter?: false, filter: nil)
instance = instance_double(FeedManager, push_to_home: nil, filter?: false)
allow(FeedManager).to receive(:instance).and_return(instance)
result = subject.perform(status.id, follower.id)
expect(result).to be_nil
expect(instance).to have_received(:push_to_home).with(follower, status, update: nil)
end
it 'pushes the status onto the tags timeline without filter' do
instance = instance_double(FeedManager, push_to_home: nil, filter?: false, filter: nil)
allow(FeedManager).to receive(:instance).and_return(instance)
result = subject.perform(status.id, follower.id, :tags)
expect(result).to be_nil
expect(instance).to have_received(:push_to_home).with(follower, status, update: nil)
end
it 'pushes the status onto the list timeline without filter' do
instance = instance_double(FeedManager, push_to_list: nil, filter?: false, filter: nil)
allow(FeedManager).to receive(:instance).and_return(instance)
result = subject.perform(status.id, list.id, :list)
expect(result).to be_nil
expect(instance).to have_received(:push_to_list).with(list, status, update: nil)
end
end
end
end