Compare commits

...

5 Commits

Author SHA1 Message Date
Claire
881511d283
Merge c05ab2f556 into fbe9728f36 2025-05-06 15:05:46 +00:00
Claire
fbe9728f36
Bump version to v4.3.8 (#34626)
Some checks are pending
Check i18n / check-i18n (push) Waiting to run
CodeQL / Analyze (javascript) (push) Waiting to run
CodeQL / Analyze (ruby) (push) Waiting to run
Check formatting / lint (push) Waiting to run
JavaScript Linting / lint (push) Waiting to run
Ruby Linting / lint (push) Waiting to run
JavaScript Testing / test (push) Waiting to run
Historical data migration test / test (14-alpine) (push) Waiting to run
Historical data migration test / test (15-alpine) (push) Waiting to run
Historical data migration test / test (16-alpine) (push) Waiting to run
Historical data migration test / test (17-alpine) (push) Waiting to run
Ruby Testing / build (production) (push) Waiting to run
Ruby Testing / build (test) (push) Waiting to run
Ruby Testing / test (.ruby-version) (push) Blocked by required conditions
Ruby Testing / test (3.2) (push) Blocked by required conditions
Ruby Testing / test (3.3) (push) Blocked by required conditions
Ruby Testing / Libvips tests (.ruby-version) (push) Blocked by required conditions
Ruby Testing / Libvips tests (3.2) (push) Blocked by required conditions
Ruby Testing / Libvips tests (3.3) (push) Blocked by required conditions
Ruby Testing / End to End testing (.ruby-version) (push) Blocked by required conditions
Ruby Testing / End to End testing (3.2) (push) Blocked by required conditions
Ruby Testing / End to End testing (3.3) (push) Blocked by required conditions
Ruby Testing / Elastic Search integration testing (.ruby-version, docker.elastic.co/elasticsearch/elasticsearch:7.17.13) (push) Blocked by required conditions
Ruby Testing / Elastic Search integration testing (.ruby-version, docker.elastic.co/elasticsearch/elasticsearch:8.10.2) (push) Blocked by required conditions
Ruby Testing / Elastic Search integration testing (.ruby-version, opensearchproject/opensearch:2) (push) Blocked by required conditions
Ruby Testing / Elastic Search integration testing (3.2, docker.elastic.co/elasticsearch/elasticsearch:7.17.13) (push) Blocked by required conditions
Ruby Testing / Elastic Search integration testing (3.3, docker.elastic.co/elasticsearch/elasticsearch:7.17.13) (push) Blocked by required conditions
2025-05-06 14:17:07 +00:00
Claire
3bbf3e9709
Fix code style issue (#34624) 2025-05-06 13:35:54 +00:00
Claire
79931bf3ae
Merge commit from fork
* Check scheme in account and post links

* Harden media attachments

* Client-side mitigation

* Client-side mitigation for media attachments
2025-05-06 15:02:13 +02:00
Claire
c05ab2f556 Add digest re-check before removing followers in synchronization mechanism 2025-03-26 14:17:37 +01:00
13 changed files with 201 additions and 16 deletions

View File

@ -2,9 +2,34 @@
All notable changes to this project will be documented in this file.
## [4.3.8] - 2025-05-06
### Security
- Update dependencies
- Check scheme on account, profile, and media URLs ([GHSA-x2rc-v5wx-g3m5](https://github.com/mastodon/mastodon/security/advisories/GHSA-x2rc-v5wx-g3m5))
### Added
- Add warning for REDIS_NAMESPACE deprecation at startup (#34581 by @ClearlyClaire)
- Add built-in context for interaction policies (#34574 by @ClearlyClaire)
### Changed
- Change activity distribution error handling to skip retrying for deleted accounts (#33617 by @ClearlyClaire)
### Removed
- Remove double-query for signed query strings (#34610 by @ClearlyClaire)
### Fixed
- Fix incorrect redirect in response to unauthenticated API requests in limited federation mode (#34549 by @ClearlyClaire)
- Fix sign-up e-mail confirmation page reloading on error or redirect (#34548 by @ClearlyClaire)
## [4.3.7] - 2025-04-02
### Add
### Added
- Add delay to profile updates to debounce them (#34137 by @ClearlyClaire)
- Add support for paginating partial collections in `SynchronizeFollowersService` (#34272 and #34277 by @ClearlyClaire)

View File

@ -77,6 +77,17 @@ export function normalizeStatus(status, normalOldStatus) {
normalStatus.contentHtml = emojify(normalStatus.content, emojiMap);
normalStatus.spoilerHtml = emojify(escapeTextContentForBrowser(spoilerText), emojiMap);
normalStatus.hidden = expandSpoilers ? false : spoilerText.length > 0 || normalStatus.sensitive;
if (normalStatus.url && !(normalStatus.url.startsWith('http://') || normalStatus.url.startsWith('https://'))) {
normalStatus.url = null;
}
normalStatus.url ||= normalStatus.uri;
normalStatus.media_attachments.forEach(item => {
if (item.remote_url && !(item.remote_url.startsWith('http://') || item.remote_url.startsWith('https://')))
item.remote_url = null;
});
}
if (normalOldStatus) {

View File

@ -144,5 +144,10 @@ export function createAccountFromServerJSON(serverJSON: ApiAccountJSON) {
),
note_emojified: emojify(accountJSON.note, emojiMap),
note_plain: unescapeHTML(accountJSON.note),
url:
accountJSON.url.startsWith('http://') ||
accountJSON.url.startsWith('https://')
? accountJSON.url
: accountJSON.uri,
});
}

View File

@ -15,13 +15,15 @@ class ActivityPub::Parser::MediaAttachmentParser
end
def remote_url
Addressable::URI.parse(@json['url'])&.normalize&.to_s
url = Addressable::URI.parse(@json['url'])&.normalize&.to_s
url unless unsupported_uri_scheme?(url)
rescue Addressable::URI::InvalidURIError
nil
end
def thumbnail_remote_url
Addressable::URI.parse(@json['icon'].is_a?(Hash) ? @json['icon']['url'] : @json['icon'])&.normalize&.to_s
url = Addressable::URI.parse(@json['icon'].is_a?(Hash) ? @json['icon']['url'] : @json['icon'])&.normalize&.to_s
url unless unsupported_uri_scheme?(url)
rescue Addressable::URI::InvalidURIError
nil
end

View File

@ -29,7 +29,10 @@ class ActivityPub::Parser::StatusParser
end
def url
url_to_href(@object['url'], 'text/html') if @object['url'].present?
return if @object['url'].blank?
url = url_to_href(@object['url'], 'text/html')
url unless unsupported_uri_scheme?(url)
end
def text

View File

@ -4,6 +4,7 @@ require 'singleton'
class ActivityPub::TagManager
include Singleton
include JsonLdHelper
include RoutingHelper
CONTEXT = 'https://www.w3.org/ns/activitystreams'
@ -17,7 +18,7 @@ class ActivityPub::TagManager
end
def url_for(target)
return target.url if target.respond_to?(:local?) && !target.local?
return unsupported_uri_scheme?(target.url) ? nil : target.url if target.respond_to?(:local?) && !target.local?
return unless target.respond_to?(:object_type)

View File

@ -8,6 +8,6 @@ class ActivityPub::PrepareFollowersSynchronizationService < BaseService
return if params['collectionId'] != @account.followers_url || non_matching_uri_hosts?(@account.uri, params['url']) || @account.local_followers_hash == params['digest']
ActivityPub::FollowersSynchronizationWorker.perform_async(@account.id, params['url'])
ActivityPub::FollowersSynchronizationWorker.perform_async(@account.id, params['url'], params['digest'])
end
end

View File

@ -6,13 +6,15 @@ class ActivityPub::SynchronizeFollowersService < BaseService
MAX_COLLECTION_PAGES = 10
def call(account, partial_collection_url)
def call(account, partial_collection_url, expected_digest = nil)
@account = account
@expected_followers_ids = []
@digest = [expected_digest].pack('H*') if expected_digest.present?
return unless process_collection!(partial_collection_url)
remove_unexpected_local_followers!
# Only remove followers if the digests match, as it is a destructive operation
remove_unexpected_local_followers! if expected_digest.blank? || @digest == "\x00" * 32
end
private
@ -21,6 +23,8 @@ class ActivityPub::SynchronizeFollowersService < BaseService
page_expected_followers = extract_local_followers(items)
@expected_followers_ids.concat(page_expected_followers.pluck(:id))
items.each { |uri| Xorcist.xor!(@digest, Digest::SHA256.digest(uri)) } if @digest.present?
handle_unexpected_outgoing_follows!(page_expected_followers)
end

View File

@ -5,10 +5,10 @@ class ActivityPub::FollowersSynchronizationWorker
sidekiq_options queue: 'push', lock: :until_executed
def perform(account_id, url)
def perform(account_id, url, expected_digest = nil)
@account = Account.find_by(id: account_id)
return true if @account.nil?
ActivityPub::SynchronizeFollowersService.new.call(@account, url)
ActivityPub::SynchronizeFollowersService.new.call(@account, url, expected_digest)
end
end

View File

@ -59,7 +59,7 @@ services:
web:
# You can uncomment the following line if you want to not use the prebuilt image, for example if you have local code changes
# build: .
image: ghcr.io/mastodon/mastodon:v4.3.7
image: ghcr.io/mastodon/mastodon:v4.3.8
restart: always
env_file: .env.production
command: bundle exec puma -C config/puma.rb
@ -83,7 +83,7 @@ services:
# build:
# dockerfile: ./streaming/Dockerfile
# context: .
image: ghcr.io/mastodon/mastodon-streaming:v4.3.7
image: ghcr.io/mastodon/mastodon-streaming:v4.3.8
restart: always
env_file: .env.production
command: node ./streaming/index.js
@ -102,7 +102,7 @@ services:
sidekiq:
# You can uncomment the following line if you want to not use the prebuilt image, for example if you have local code changes
# build: .
image: ghcr.io/mastodon/mastodon:v4.3.7
image: ghcr.io/mastodon/mastodon:v4.3.8
restart: always
env_file: .env.production
command: bundle exec sidekiq

View File

@ -17,7 +17,7 @@ module Mastodon
end
def default_prerelease
'alpha.4'
'alpha.5'
end
def prerelease

View File

@ -35,7 +35,7 @@ RSpec.describe ActivityPub::SynchronizeFollowersService do
shared_examples 'synchronizes followers' do
before do
subject.call(actor, collection_uri)
subject.call(actor, collection_uri, expected_digest)
end
it 'maintains following records and sends Undo Follow to actor' do
@ -51,6 +51,8 @@ RSpec.describe ActivityPub::SynchronizeFollowersService do
end
describe '#call' do
let(:expected_digest) { nil }
context 'when the endpoint is a Collection of actor URIs' do
before do
stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload), headers: { 'Content-Type': 'application/activity+json' })
@ -197,5 +199,131 @@ RSpec.describe ActivityPub::SynchronizeFollowersService do
.to be_following(actor)
end
end
context 'when passing a matching expected_digest' do
let(:expected_digest) do
digest = "\x00" * 32
items.each do |uri|
Xorcist.xor!(digest, Digest::SHA256.digest(uri))
end
digest.unpack1('H*')
end
context 'when the endpoint is a Collection of actor URIs' do
before do
stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload), headers: { 'Content-Type': 'application/activity+json' })
end
it_behaves_like 'synchronizes followers'
end
context 'when the endpoint is an OrderedCollection of actor URIs' do
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
type: 'OrderedCollection',
id: collection_uri,
orderedItems: items,
}.with_indifferent_access
end
before do
stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload), headers: { 'Content-Type': 'application/activity+json' })
end
it_behaves_like 'synchronizes followers'
end
context 'when the endpoint is a single-page paginated Collection of actor URIs' do
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
type: 'Collection',
id: collection_uri,
first: {
type: 'CollectionPage',
partOf: collection_uri,
items: items,
},
}.with_indifferent_access
end
before do
stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload), headers: { 'Content-Type': 'application/activity+json' })
end
it_behaves_like 'synchronizes followers'
end
end
context 'when passing a non-matching expected_digest' do
let(:expected_digest) { '123456789' }
context 'when the endpoint is a Collection of actor URIs' do
before do
stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload), headers: { 'Content-Type': 'application/activity+json' })
end
it 'does not remove followers' do
follower_ids = actor.followers.reload.pluck(:id)
subject.call(actor, collection_uri, expected_digest)
expect(follower_ids - actor.followers.reload.pluck(:id)).to be_empty
end
end
context 'when the endpoint is an OrderedCollection of actor URIs' do
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
type: 'OrderedCollection',
id: collection_uri,
orderedItems: items,
}.with_indifferent_access
end
before do
stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload), headers: { 'Content-Type': 'application/activity+json' })
end
it 'does not remove followers' do
follower_ids = actor.followers.reload.pluck(:id)
subject.call(actor, collection_uri, expected_digest)
expect(follower_ids - actor.followers.reload.pluck(:id)).to be_empty
end
end
context 'when the endpoint is a single-page paginated Collection of actor URIs' do
let(:payload) do
{
'@context': 'https://www.w3.org/ns/activitystreams',
type: 'Collection',
id: collection_uri,
first: {
type: 'CollectionPage',
partOf: collection_uri,
items: items,
},
}.with_indifferent_access
end
before do
stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload), headers: { 'Content-Type': 'application/activity+json' })
end
it 'does not remove followers' do
follower_ids = actor.followers.reload.pluck(:id)
subject.call(actor, collection_uri, expected_digest)
expect(follower_ids - actor.followers.reload.pluck(:id)).to be_empty
end
end
end
end
end

View File

@ -15,7 +15,13 @@ RSpec.describe ActivityPub::FollowersSynchronizationWorker do
it 'sends the status to the service' do
worker.perform(account.id, url)
expect(service).to have_received(:call).with(account, url)
expect(service).to have_received(:call).with(account, url, nil)
end
it 'sends the status to the service with the passed digest' do
worker.perform(account.id, url, 'digest-123')
expect(service).to have_received(:call).with(account, url, 'digest-123')
end
it 'returns nil for non-existent record' do