Compare commits

...

5 Commits

Author SHA1 Message Date
Emelia Smith
21a49c1881
Merge 8d8efac733 into fbe9728f36 2025-05-06 15:05:46 +00:00
Claire
fbe9728f36
Bump version to v4.3.8 (#34626)
Some checks are pending
Check i18n / check-i18n (push) Waiting to run
CodeQL / Analyze (javascript) (push) Waiting to run
CodeQL / Analyze (ruby) (push) Waiting to run
Check formatting / lint (push) Waiting to run
JavaScript Linting / lint (push) Waiting to run
Ruby Linting / lint (push) Waiting to run
JavaScript Testing / test (push) Waiting to run
Historical data migration test / test (14-alpine) (push) Waiting to run
Historical data migration test / test (15-alpine) (push) Waiting to run
Historical data migration test / test (16-alpine) (push) Waiting to run
Historical data migration test / test (17-alpine) (push) Waiting to run
Ruby Testing / build (production) (push) Waiting to run
Ruby Testing / build (test) (push) Waiting to run
Ruby Testing / test (.ruby-version) (push) Blocked by required conditions
Ruby Testing / test (3.2) (push) Blocked by required conditions
Ruby Testing / test (3.3) (push) Blocked by required conditions
Ruby Testing / Libvips tests (.ruby-version) (push) Blocked by required conditions
Ruby Testing / Libvips tests (3.2) (push) Blocked by required conditions
Ruby Testing / Libvips tests (3.3) (push) Blocked by required conditions
Ruby Testing / End to End testing (.ruby-version) (push) Blocked by required conditions
Ruby Testing / End to End testing (3.2) (push) Blocked by required conditions
Ruby Testing / End to End testing (3.3) (push) Blocked by required conditions
Ruby Testing / Elastic Search integration testing (.ruby-version, docker.elastic.co/elasticsearch/elasticsearch:7.17.13) (push) Blocked by required conditions
Ruby Testing / Elastic Search integration testing (.ruby-version, docker.elastic.co/elasticsearch/elasticsearch:8.10.2) (push) Blocked by required conditions
Ruby Testing / Elastic Search integration testing (.ruby-version, opensearchproject/opensearch:2) (push) Blocked by required conditions
Ruby Testing / Elastic Search integration testing (3.2, docker.elastic.co/elasticsearch/elasticsearch:7.17.13) (push) Blocked by required conditions
Ruby Testing / Elastic Search integration testing (3.3, docker.elastic.co/elasticsearch/elasticsearch:7.17.13) (push) Blocked by required conditions
2025-05-06 14:17:07 +00:00
Claire
3bbf3e9709
Fix code style issue (#34624) 2025-05-06 13:35:54 +00:00
Claire
79931bf3ae
Merge commit from fork
* Check scheme in account and post links

* Harden media attachments

* Client-side mitigation

* Client-side mitigation for media attachments
2025-05-06 15:02:13 +02:00
Emelia Smith
8d8efac733
Use libpq compat mode for DATABASE_URL and recommend using it for DB_SSLMODE 2025-04-23 20:22:42 +02:00
10 changed files with 115 additions and 77 deletions

View File

@ -2,9 +2,34 @@
All notable changes to this project will be documented in this file.
## [4.3.8] - 2025-05-06
### Security
- Update dependencies
- Check scheme on account, profile, and media URLs ([GHSA-x2rc-v5wx-g3m5](https://github.com/mastodon/mastodon/security/advisories/GHSA-x2rc-v5wx-g3m5))
### Added
- Add warning for REDIS_NAMESPACE deprecation at startup (#34581 by @ClearlyClaire)
- Add built-in context for interaction policies (#34574 by @ClearlyClaire)
### Changed
- Change activity distribution error handling to skip retrying for deleted accounts (#33617 by @ClearlyClaire)
### Removed
- Remove double-query for signed query strings (#34610 by @ClearlyClaire)
### Fixed
- Fix incorrect redirect in response to unauthenticated API requests in limited federation mode (#34549 by @ClearlyClaire)
- Fix sign-up e-mail confirmation page reloading on error or redirect (#34548 by @ClearlyClaire)
## [4.3.7] - 2025-04-02
### Add
### Added
- Add delay to profile updates to debounce them (#34137 by @ClearlyClaire)
- Add support for paginating partial collections in `SynchronizeFollowersService` (#34272 and #34277 by @ClearlyClaire)

View File

@ -77,6 +77,17 @@ export function normalizeStatus(status, normalOldStatus) {
normalStatus.contentHtml = emojify(normalStatus.content, emojiMap);
normalStatus.spoilerHtml = emojify(escapeTextContentForBrowser(spoilerText), emojiMap);
normalStatus.hidden = expandSpoilers ? false : spoilerText.length > 0 || normalStatus.sensitive;
if (normalStatus.url && !(normalStatus.url.startsWith('http://') || normalStatus.url.startsWith('https://'))) {
normalStatus.url = null;
}
normalStatus.url ||= normalStatus.uri;
normalStatus.media_attachments.forEach(item => {
if (item.remote_url && !(item.remote_url.startsWith('http://') || item.remote_url.startsWith('https://')))
item.remote_url = null;
});
}
if (normalOldStatus) {

View File

@ -144,5 +144,10 @@ export function createAccountFromServerJSON(serverJSON: ApiAccountJSON) {
),
note_emojified: emojify(accountJSON.note, emojiMap),
note_plain: unescapeHTML(accountJSON.note),
url:
accountJSON.url.startsWith('http://') ||
accountJSON.url.startsWith('https://')
? accountJSON.url
: accountJSON.uri,
});
}

View File

@ -15,13 +15,15 @@ class ActivityPub::Parser::MediaAttachmentParser
end
def remote_url
Addressable::URI.parse(@json['url'])&.normalize&.to_s
url = Addressable::URI.parse(@json['url'])&.normalize&.to_s
url unless unsupported_uri_scheme?(url)
rescue Addressable::URI::InvalidURIError
nil
end
def thumbnail_remote_url
Addressable::URI.parse(@json['icon'].is_a?(Hash) ? @json['icon']['url'] : @json['icon'])&.normalize&.to_s
url = Addressable::URI.parse(@json['icon'].is_a?(Hash) ? @json['icon']['url'] : @json['icon'])&.normalize&.to_s
url unless unsupported_uri_scheme?(url)
rescue Addressable::URI::InvalidURIError
nil
end

View File

@ -29,7 +29,10 @@ class ActivityPub::Parser::StatusParser
end
def url
url_to_href(@object['url'], 'text/html') if @object['url'].present?
return if @object['url'].blank?
url = url_to_href(@object['url'], 'text/html')
url unless unsupported_uri_scheme?(url)
end
def text

View File

@ -4,6 +4,7 @@ require 'singleton'
class ActivityPub::TagManager
include Singleton
include JsonLdHelper
include RoutingHelper
CONTEXT = 'https://www.w3.org/ns/activitystreams'
@ -17,7 +18,7 @@ class ActivityPub::TagManager
end
def url_for(target)
return target.url if target.respond_to?(:local?) && !target.local?
return unsupported_uri_scheme?(target.url) ? nil : target.url if target.respond_to?(:local?) && !target.local?
return unless target.respond_to?(:object_type)

View File

@ -59,7 +59,7 @@ services:
web:
# You can uncomment the following line if you want to not use the prebuilt image, for example if you have local code changes
# build: .
image: ghcr.io/mastodon/mastodon:v4.3.7
image: ghcr.io/mastodon/mastodon:v4.3.8
restart: always
env_file: .env.production
command: bundle exec puma -C config/puma.rb
@ -83,7 +83,7 @@ services:
# build:
# dockerfile: ./streaming/Dockerfile
# context: .
image: ghcr.io/mastodon/mastodon-streaming:v4.3.7
image: ghcr.io/mastodon/mastodon-streaming:v4.3.8
restart: always
env_file: .env.production
command: node ./streaming/index.js
@ -102,7 +102,7 @@ services:
sidekiq:
# You can uncomment the following line if you want to not use the prebuilt image, for example if you have local code changes
# build: .
image: ghcr.io/mastodon/mastodon:v4.3.7
image: ghcr.io/mastodon/mastodon:v4.3.8
restart: always
env_file: .env.production
command: bundle exec sidekiq

View File

@ -17,7 +17,7 @@ module Mastodon
end
def default_prerelease
'alpha.4'
'alpha.5'
end
def prerelease

View File

@ -1,14 +1,15 @@
import pg from 'pg';
import pgConnectionString from 'pg-connection-string';
import { parse, toClientConfig } from 'pg-connection-string';
import { parseIntFromEnvValue } from './utils.js';
/**
* @param {NodeJS.ProcessEnv} env the `process.env` value to read configuration from
* @param {string} environment
* @param {import('pino').Logger} logger
* @returns {pg.PoolConfig} the configuration for the PostgreSQL connection
*/
export function configFromEnv(env, environment) {
export function configFromEnv(env, environment, logger) {
/** @type {Record<string, pg.PoolConfig>} */
const pgConfigs = {
development: {
@ -16,7 +17,11 @@ export function configFromEnv(env, environment) {
password: env.DB_PASS || pg.defaults.password,
database: env.DB_NAME || 'mastodon_development',
host: env.DB_HOST || pg.defaults.host,
port: parseIntFromEnvValue(env.DB_PORT, pg.defaults.port ?? 5432, 'DB_PORT')
port: parseIntFromEnvValue(
env.DB_PORT,
pg.defaults.port ?? 5432,
'DB_PORT',
),
},
production: {
@ -24,76 +29,55 @@ export function configFromEnv(env, environment) {
password: env.DB_PASS || '',
database: env.DB_NAME || 'mastodon_production',
host: env.DB_HOST || 'localhost',
port: parseIntFromEnvValue(env.DB_PORT, 5432, 'DB_PORT')
port: parseIntFromEnvValue(env.DB_PORT, 5432, 'DB_PORT'),
},
};
/**
* @type {pg.PoolConfig}
*/
let baseConfig = {};
let config = {};
if (env.DATABASE_URL) {
const parsedUrl = pgConnectionString.parse(env.DATABASE_URL);
// The result of dbUrlToConfig from pg-connection-string is not type
// compatible with pg.PoolConfig, since parts of the connection URL may be
// `null` when pg.PoolConfig expects `undefined`, as such we have to
// manually create the baseConfig object from the properties of the
// parsedUrl.
//
// For more information see:
// https://github.com/brianc/node-postgres/issues/2280
//
// FIXME: clean up once brianc/node-postgres#3128 lands
if (typeof parsedUrl.password === 'string') baseConfig.password = parsedUrl.password;
if (typeof parsedUrl.host === 'string') baseConfig.host = parsedUrl.host;
if (typeof parsedUrl.user === 'string') baseConfig.user = parsedUrl.user;
if (typeof parsedUrl.port === 'string' && parsedUrl.port) {
const parsedPort = parseInt(parsedUrl.port, 10);
if (isNaN(parsedPort)) {
throw new Error('Invalid port specified in DATABASE_URL environment variable');
}
baseConfig.port = parsedPort;
}
if (typeof parsedUrl.database === 'string') baseConfig.database = parsedUrl.database;
if (typeof parsedUrl.options === 'string') baseConfig.options = parsedUrl.options;
// The pg-connection-string type definition isn't correct, as parsedUrl.ssl
// can absolutely be an Object, this is to work around these incorrect
// types, including the casting of parsedUrl.ssl to Record<string, any>
if (typeof parsedUrl.ssl === 'boolean') {
baseConfig.ssl = parsedUrl.ssl;
} else if (typeof parsedUrl.ssl === 'object' && !Array.isArray(parsedUrl.ssl) && parsedUrl.ssl !== null) {
/** @type {Record<string, any>} */
const sslOptions = parsedUrl.ssl;
baseConfig.ssl = {};
baseConfig.ssl.cert = sslOptions.cert;
baseConfig.ssl.key = sslOptions.key;
baseConfig.ssl.ca = sslOptions.ca;
baseConfig.ssl.rejectUnauthorized = sslOptions.rejectUnauthorized;
// parse will throw if both useLibpqCompat option is true and the
// DATABASE_URL includes uselibpqcompat, so we're handling that case ahead
// of time to give a more specific error message:
if (env.DATABASE_URL.includes('uselibpqcompat')) {
throw new Error(
'SECURITY WARNING: Mastodon forces uselibpqcompat mode, do not include it in DATABASE_URL',
);
}
// Support overriding the database password in the connection URL
if (!baseConfig.password && env.DB_PASS) {
baseConfig.password = env.DB_PASS;
}
config = toClientConfig(parse(env.DATABASE_URL, { useLibpqCompat: true }));
} else if (Object.hasOwn(pgConfigs, environment)) {
baseConfig = pgConfigs[environment];
config = pgConfigs[environment];
if (env.DB_SSLMODE) {
switch(env.DB_SSLMODE) {
case 'disable':
case '':
baseConfig.ssl = false;
break;
case 'no-verify':
baseConfig.ssl = { rejectUnauthorized: false };
break;
default:
baseConfig.ssl = {};
break;
logger.warn(
'Using DB_SSLMODE is not recommended, instead use DATABASE_URL with SSL options',
);
switch (env.DB_SSLMODE) {
case 'disable': {
config.ssl = false;
break;
}
case 'prefer': {
config.ssl.rejectUnauthorized = false;
break;
}
case 'require': {
config.ssl.rejectUnauthorized = false;
break;
}
case 'verify-ca': {
throw new Error(
'SECURITY WARNING: Using sslmode=verify-ca requires specifying a CA with sslrootcert. If a public CA is used, verify-ca allows connections to a server that somebody else may have registered with the CA, making you vulnerable to Man-in-the-Middle attacks. Either specify a custom CA certificate with sslrootcert parameter or use sslmode=verify-full for proper security. This can only be configured using DATABASE_URL.',
);
}
case 'verify-full': {
break;
}
}
}
} else {
@ -101,7 +85,7 @@ export function configFromEnv(env, environment) {
}
return {
...baseConfig,
...config,
max: parseIntFromEnvValue(env.DB_POOL, 10, 'DB_POOL'),
connectionTimeoutMillis: 15000,
// Deliberately set application_name to an empty string to prevent excessive
@ -134,16 +118,23 @@ export function getPool(config, environment, logger) {
return async (queryTextOrConfig, values, ...rest) => {
const start = process.hrtime();
const result = await originalQuery.apply(pool, [queryTextOrConfig, values, ...rest]);
const result = await originalQuery.apply(pool, [
queryTextOrConfig,
values,
...rest,
]);
const duration = process.hrtime(start);
const durationInMs = (duration[0] * 1000000000 + duration[1]) / 1000000;
logger.debug({
query: queryTextOrConfig,
values,
duration: durationInMs
}, 'Executed database query');
logger.debug(
{
query: queryTextOrConfig,
values,
duration: durationInMs,
},
'Executed database query',
);
return result;
};

View File

@ -101,7 +101,7 @@ const CHANNEL_NAMES = [
];
const startServer = async () => {
const pgConfig = Database.configFromEnv(process.env, environment);
const pgConfig = Database.configFromEnv(process.env, environment, logger);
const pgPool = Database.getPool(pgConfig, environment, logger);
const metrics = setupMetrics(CHANNEL_NAMES, pgPool);