diff --git a/app/javascript/mastodon/features/ui/index.js b/app/javascript/mastodon/features/ui/index.js
index 86b89d1e7..b8c83c8ae 100644
--- a/app/javascript/mastodon/features/ui/index.js
+++ b/app/javascript/mastodon/features/ui/index.js
@@ -475,10 +475,10 @@ class UI extends React.PureComponent {
};
handleHotkeyBack = () => {
- if (window.history && window.history.length === 1) {
- this.context.router.history.push('/');
- } else {
+ if (window.history && window.history.state) {
this.context.router.history.goBack();
+ } else {
+ this.context.router.history.push('/');
}
};
diff --git a/app/javascript/mastodon/locales/en.json b/app/javascript/mastodon/locales/en.json
index 07fc61ff0..df830457e 100644
--- a/app/javascript/mastodon/locales/en.json
+++ b/app/javascript/mastodon/locales/en.json
@@ -162,6 +162,8 @@
"confirmations.discard_edit_media.message": "You have unsaved changes to the media description or preview, discard them anyway?",
"confirmations.domain_block.confirm": "Block entire domain",
"confirmations.domain_block.message": "Are you really, really sure you want to block the entire {domain}? In most cases a few targeted blocks or mutes are sufficient and preferable. You will not see content from that domain in any public timelines or your notifications. Your followers from that domain will be removed.",
+ "confirmations.edit.confirm": "Edit",
+ "confirmations.edit.message": "Editing now will overwrite the message you are currently composing. Are you sure you want to proceed?",
"confirmations.logout.confirm": "Log out",
"confirmations.logout.message": "Are you sure you want to log out?",
"confirmations.mute.confirm": "Mute",
diff --git a/app/javascript/styles/mastodon-light/diff.scss b/app/javascript/styles/mastodon-light/diff.scss
index 9c33e011d..ee1747ae9 100644
--- a/app/javascript/styles/mastodon-light/diff.scss
+++ b/app/javascript/styles/mastodon-light/diff.scss
@@ -259,6 +259,10 @@ html {
border-color: $ui-base-color;
}
+.upload-progress__backdrop {
+ background: $ui-base-color;
+}
+
// Change the background colors of statuses
.focusable:focus {
background: $ui-base-color;
diff --git a/app/javascript/styles/mastodon/components.scss b/app/javascript/styles/mastodon/components.scss
index 41458826e..52216deac 100644
--- a/app/javascript/styles/mastodon/components.scss
+++ b/app/javascript/styles/mastodon/components.scss
@@ -4608,7 +4608,7 @@ a.status-card.compact:hover {
width: 100%;
height: 6px;
border-radius: 6px;
- background: $ui-base-lighter-color;
+ background: darken($simple-background-color, 8%);
position: relative;
margin-top: 5px;
}
diff --git a/app/lib/plain_text_formatter.rb b/app/lib/plain_text_formatter.rb
index d1ff6808b..3143cb4c9 100644
--- a/app/lib/plain_text_formatter.rb
+++ b/app/lib/plain_text_formatter.rb
@@ -16,10 +16,7 @@ class PlainTextFormatter
if local?
text
else
- node = Nokogiri::HTML.fragment(insert_newlines)
- # Elements that are entirely removed with our Sanitize config
- node.xpath('.//iframe|.//math|.//noembed|.//noframes|.//noscript|.//plaintext|.//script|.//style|.//svg|.//xmp').remove
- node.text.chomp
+ html_entities.decode(strip_tags(insert_newlines)).chomp
end
end
@@ -28,4 +25,8 @@ class PlainTextFormatter
def insert_newlines
text.gsub(NEWLINE_TAGS_RE) { |match| "#{match}\n" }
end
+
+ def html_entities
+ HTMLEntities.new
+ end
end
diff --git a/app/models/account.rb b/app/models/account.rb
index 262285a09..28bd828b0 100644
--- a/app/models/account.rb
+++ b/app/models/account.rb
@@ -107,7 +107,7 @@ class Account < ApplicationRecord
scope :bots, -> { where(actor_type: %w(Application Service)) }
scope :groups, -> { where(actor_type: 'Group') }
scope :alphabetic, -> { order(domain: :asc, username: :asc) }
- scope :matches_username, ->(value) { where(arel_table[:username].matches("#{value}%")) }
+ scope :matches_username, ->(value) { where('lower((username)::text) LIKE lower(?)', "#{value}%") }
scope :matches_display_name, ->(value) { where(arel_table[:display_name].matches("#{value}%")) }
scope :matches_domain, ->(value) { where(arel_table[:domain].matches("%#{value}%")) }
scope :without_unapproved, -> { left_outer_joins(:user).remote.or(left_outer_joins(:user).merge(User.approved.confirmed)) }
diff --git a/app/models/backup.rb b/app/models/backup.rb
index 8823e7cae..20e6e8aa8 100644
--- a/app/models/backup.rb
+++ b/app/models/backup.rb
@@ -17,6 +17,6 @@
class Backup < ApplicationRecord
belongs_to :user, inverse_of: :backups
- has_attached_file :dump, s3_permissions: ->(*) { ENV['S3_PERMISSION'] == '' ? nil : 'private' }
+ has_attached_file :dump, s3_permissions: 'private'
do_not_validate_attachment_file_type :dump
end
diff --git a/app/views/admin/reports/actions/preview.html.haml b/app/views/admin/reports/actions/preview.html.haml
index 58745319c..70edb48d8 100644
--- a/app/views/admin/reports/actions/preview.html.haml
+++ b/app/views/admin/reports/actions/preview.html.haml
@@ -54,15 +54,15 @@
.strike-card__statuses-list__item
- if (status = status_map[status_id.to_i])
.one-liner
- = link_to short_account_status_url(@report.target_account, status_id), class: 'emojify' do
- = one_line_preview(status)
+ .emojify= one_line_preview(status)
- - status.ordered_media_attachments.each do |media_attachment|
- %abbr{ title: media_attachment.description }
- = fa_icon 'link'
- = media_attachment.file_file_name
+ - status.ordered_media_attachments.each do |media_attachment|
+ %abbr{ title: media_attachment.description }
+ = fa_icon 'link'
+ = media_attachment.file_file_name
.strike-card__statuses-list__item__meta
- %time.formatted{ datetime: status.created_at.iso8601, title: l(status.created_at) }= l(status.created_at)
+ = link_to ActivityPub::TagManager.instance.url_for(status), target: '_blank' do
+ %time.formatted{ datetime: status.created_at.iso8601, title: l(status.created_at) }= l(status.created_at)
- unless status.application.nil?
·
= status.application.name
diff --git a/app/workers/activitypub/delivery_worker.rb b/app/workers/activitypub/delivery_worker.rb
index d9153132b..7c1c14766 100644
--- a/app/workers/activitypub/delivery_worker.rb
+++ b/app/workers/activitypub/delivery_worker.rb
@@ -10,6 +10,16 @@ class ActivityPub::DeliveryWorker
sidekiq_options queue: 'push', retry: 16, dead: false
+ # Unfortunately, we cannot control Sidekiq's jitter, so add our own
+ sidekiq_retry_in do |count|
+ # This is Sidekiq's default delay
+ delay = (count**4) + 15
+ # Our custom jitter, that will be added to Sidekiq's built-in one.
+ # Sidekiq's built-in jitter is `rand(10) * (count + 1)`
+ jitter = rand(0.5 * (count**4))
+ delay + jitter
+ end
+
HEADERS = { 'Content-Type' => 'application/activity+json' }.freeze
def perform(json, source_account_id, inbox_url, options = {})
diff --git a/app/workers/scheduler/accounts_statuses_cleanup_scheduler.rb b/app/workers/scheduler/accounts_statuses_cleanup_scheduler.rb
index bd92fe32c..d245f6bbd 100644
--- a/app/workers/scheduler/accounts_statuses_cleanup_scheduler.rb
+++ b/app/workers/scheduler/accounts_statuses_cleanup_scheduler.rb
@@ -7,7 +7,7 @@ class Scheduler::AccountsStatusesCleanupScheduler
# This limit is mostly to be nice to the fediverse at large and not
# generate too much traffic.
# This also helps limiting the running time of the scheduler itself.
- MAX_BUDGET = 50
+ MAX_BUDGET = 150
# This is an attempt to spread the load across instances, as various
# accounts are likely to have various followers.
@@ -15,28 +15,22 @@ class Scheduler::AccountsStatusesCleanupScheduler
# This is an attempt to limit the workload generated by status removal
# jobs to something the particular instance can handle.
- PER_THREAD_BUDGET = 5
+ PER_THREAD_BUDGET = 6
# Those avoid loading an instance that is already under load
- MAX_DEFAULT_SIZE = 2
+ MAX_DEFAULT_SIZE = 200
MAX_DEFAULT_LATENCY = 5
- MAX_PUSH_SIZE = 5
+ MAX_PUSH_SIZE = 500
MAX_PUSH_LATENCY = 10
+
# 'pull' queue has lower priority jobs, and it's unlikely that pushing
# deletes would cause much issues with this queue if it didn't cause issues
# with default and push. Yet, do not enqueue deletes if the instance is
# lagging behind too much.
- MAX_PULL_SIZE = 500
- MAX_PULL_LATENCY = 300
+ MAX_PULL_SIZE = 10_000
+ MAX_PULL_LATENCY = 5.minutes.to_i
- # This is less of an issue in general, but deleting old statuses is likely
- # to cause delivery errors, and thus increase the number of jobs to be retried.
- # This doesn't directly translate to load, but connection errors and a high
- # number of dead instances may lead to this spiraling out of control if
- # unchecked.
- MAX_RETRY_SIZE = 50_000
-
- sidekiq_options retry: 0, lock: :until_executed
+ sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i
def perform
return if under_load?
@@ -72,7 +66,6 @@ class Scheduler::AccountsStatusesCleanupScheduler
end
def under_load?
- return true if Sidekiq::Stats.new.retry_size > MAX_RETRY_SIZE
queue_under_load?('default', MAX_DEFAULT_SIZE, MAX_DEFAULT_LATENCY) || queue_under_load?('push', MAX_PUSH_SIZE, MAX_PUSH_LATENCY) || queue_under_load?('pull', MAX_PULL_SIZE, MAX_PULL_LATENCY)
end
diff --git a/config/application.rb b/config/application.rb
index 74854f5a9..97fc0f148 100644
--- a/config/application.rb
+++ b/config/application.rb
@@ -36,6 +36,7 @@ require_relative '../lib/terrapin/multi_pipe_extensions'
require_relative '../lib/mastodon/snowflake'
require_relative '../lib/mastodon/version'
require_relative '../lib/mastodon/rack_middleware'
+require_relative '../lib/public_file_server_middleware'
require_relative '../lib/devise/two_factor_ldap_authenticatable'
require_relative '../lib/devise/two_factor_pam_authenticatable'
require_relative '../lib/chewy/strategy/mastodon'
@@ -187,6 +188,10 @@ module Mastodon
config.active_job.queue_adapter = :sidekiq
config.action_mailer.deliver_later_queue_name = 'mailers'
+ # We use our own middleware for this
+ config.public_file_server.enabled = false
+
+ config.middleware.use PublicFileServerMiddleware if Rails.env.development? || ENV['RAILS_SERVE_STATIC_FILES'] == 'true'
config.middleware.use Rack::Attack
config.middleware.use Mastodon::RackMiddleware
diff --git a/config/environments/development.rb b/config/environments/development.rb
index de8762ff7..c7b4a5d03 100644
--- a/config/environments/development.rb
+++ b/config/environments/development.rb
@@ -16,12 +16,7 @@ Rails.application.configure do
# Run rails dev:cache to toggle caching.
if Rails.root.join('tmp/caching-dev.txt').exist?
config.action_controller.perform_caching = true
-
config.cache_store = :redis_cache_store, REDIS_CACHE_PARAMS
-
- config.public_file_server.headers = {
- 'Cache-Control' => "public, max-age=#{2.days.to_i}",
- }
else
config.action_controller.perform_caching = false
diff --git a/config/environments/production.rb b/config/environments/production.rb
index 99c9bb40c..00d783477 100644
--- a/config/environments/production.rb
+++ b/config/environments/production.rb
@@ -19,27 +19,16 @@ Rails.application.configure do
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
- # Disable serving static files from the `/public` folder by default since
- # Apache or NGINX already handles this.
- config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
-
ActiveSupport::Logger.new(STDOUT).tap do |logger|
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
- # Compress JavaScripts and CSS.
- # config.assets.js_compressor = Uglifier.new(mangle: false)
- # config.assets.css_compressor = :sass
-
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
- # `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
-
# Specifies the header that your server uses for sending files.
- # config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
- config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
+ config.action_dispatch.x_sendfile_header = ENV['SENDFILE_HEADER'] if ENV['SENDFILE_HEADER'].present?
# Allow to specify public IP of reverse proxy if it's needed
config.action_dispatch.trusted_proxies = ENV['TRUSTED_PROXY_IP'].split(/(?:\s*,\s*|\s+)/).map { |item| IPAddr.new(item) } if ENV['TRUSTED_PROXY_IP'].present?
@@ -67,7 +56,7 @@ Rails.application.configure do
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# English when a translation cannot be found).
- config.i18n.fallbacks = [:en]
+ config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
@@ -128,6 +117,7 @@ Rails.application.configure do
enable_starttls_auto: enable_starttls_auto,
tls: ENV['SMTP_TLS'].presence && ENV['SMTP_TLS'] == 'true',
ssl: ENV['SMTP_SSL'].presence && ENV['SMTP_SSL'] == 'true',
+ read_timeout: 20,
}
config.action_mailer.delivery_method = ENV.fetch('SMTP_DELIVERY_METHOD', 'smtp').to_sym
diff --git a/config/environments/test.rb b/config/environments/test.rb
index ef3cb2e48..44786962a 100644
--- a/config/environments/test.rb
+++ b/config/environments/test.rb
@@ -12,11 +12,6 @@ Rails.application.configure do
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
- # Configure public file server for tests with Cache-Control for performance.
- config.public_file_server.enabled = true
- config.public_file_server.headers = {
- 'Cache-Control' => "public, max-age=#{1.hour.to_i}"
- }
config.assets.digest = false
# Show full error reports and disable caching.
diff --git a/config/initializers/chewy.rb b/config/initializers/chewy.rb
index daf4a5f32..2a91efefc 100644
--- a/config/initializers/chewy.rb
+++ b/config/initializers/chewy.rb
@@ -19,7 +19,6 @@ Chewy.settings = {
# cycle, which takes care of checking if Elasticsearch is enabled
# or not. However, mind that for the Rails console, the :urgent
# strategy is set automatically with no way to override it.
-Chewy.root_strategy = :bypass_with_warning if Rails.env.production?
Chewy.request_strategy = :mastodon
Chewy.use_after_commit_callbacks = false
diff --git a/config/puma.rb b/config/puma.rb
index e59295445..c4e2b0b85 100644
--- a/config/puma.rb
+++ b/config/puma.rb
@@ -22,3 +22,5 @@ on_worker_boot do
end
plugin :tmp_restart
+
+set_remote_address(proxy_protocol: :v1) if ENV['PROXY_PROTO_V1'] == 'true'
diff --git a/docker-compose.yml b/docker-compose.yml
index db2f81d20..f603c2f7e 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -56,7 +56,7 @@ services:
web:
build: .
- image: ghcr.io/mastodon/mastodon:v4.0.12
+ image: ghcr.io/mastodon/mastodon
restart: always
env_file: .env.production
command: bash -c "rm -f /mastodon/tmp/pids/server.pid; bundle exec rails s -p 3000"
@@ -77,7 +77,7 @@ services:
streaming:
build: .
- image: ghcr.io/mastodon/mastodon:v4.0.12
+ image: ghcr.io/mastodon/mastodon
restart: always
env_file: .env.production
command: node ./streaming
@@ -95,7 +95,7 @@ services:
sidekiq:
build: .
- image: ghcr.io/mastodon/mastodon:v4.0.12
+ image: ghcr.io/mastodon/mastodon
restart: always
env_file: .env.production
command: bundle exec sidekiq
diff --git a/lib/mastodon/accounts_cli.rb b/lib/mastodon/accounts_cli.rb
index 60dd18153..d4a6bbc12 100644
--- a/lib/mastodon/accounts_cli.rb
+++ b/lib/mastodon/accounts_cli.rb
@@ -372,16 +372,16 @@ module Mastodon
option :concurrency, type: :numeric, default: 5, aliases: [:c]
option :verbose, type: :boolean, aliases: [:v]
option :dry_run, type: :boolean
- desc 'refresh [USERNAME]', 'Fetch remote user data and files'
+ desc 'refresh [USERNAMES]', 'Fetch remote user data and files'
long_desc <<-LONG_DESC
Fetch remote user data and files for one or multiple accounts.
With the --all option, all remote accounts will be processed.
Through the --domain option, this can be narrowed down to a
- specific domain only. Otherwise, a single remote account must
- be specified with USERNAME.
+ specific domain only. Otherwise, remote accounts must be
+ specified with space-separated USERNAMES.
LONG_DESC
- def refresh(username = nil)
+ def refresh(*usernames)
dry_run = options[:dry_run] ? ' (DRY RUN)' : ''
if options[:domain] || options[:all]
@@ -397,19 +397,25 @@ module Mastodon
end
say("Refreshed #{processed} accounts#{dry_run}", :green, true)
- elsif username.present?
- username, domain = username.split('@')
- account = Account.find_remote(username, domain)
+ elsif !usernames.empty?
+ usernames.each do |user|
+ user, domain = user.split('@')
+ account = Account.find_remote(user, domain)
- if account.nil?
- say('No such account', :red)
- exit(1)
- end
+ if account.nil?
+ say('No such account', :red)
+ exit(1)
+ end
- unless options[:dry_run]
- account.reset_avatar!
- account.reset_header!
- account.save
+ next if options[:dry_run]
+
+ begin
+ account.reset_avatar!
+ account.reset_header!
+ account.save
+ rescue Mastodon::UnexpectedResponseError
+ say("Account failed: #{user}@#{domain}", :red)
+ end
end
say("OK#{dry_run}", :green)
@@ -631,7 +637,7 @@ module Mastodon
exit(1)
end
- unless options[:force] || migration.target_acount_id == account.moved_to_account_id
+ unless options[:force] || migration.target_account_id == account.moved_to_account_id
say('The specified account is not redirecting to its last migration target. Use --force if you want to replay the migration anyway', :red)
exit(1)
end
diff --git a/lib/mastodon/version.rb b/lib/mastodon/version.rb
index aab6ea8ba..ea5fad277 100644
--- a/lib/mastodon/version.rb
+++ b/lib/mastodon/version.rb
@@ -13,7 +13,7 @@ module Mastodon
end
def patch
- 0
+ 1
end
def flags
diff --git a/lib/public_file_server_middleware.rb b/lib/public_file_server_middleware.rb
new file mode 100644
index 000000000..3799230a2
--- /dev/null
+++ b/lib/public_file_server_middleware.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'action_dispatch/middleware/static'
+
+class PublicFileServerMiddleware
+ SERVICE_WORKER_TTL = 7.days.to_i
+ CACHE_TTL = 28.days.to_i
+
+ def initialize(app)
+ @app = app
+ @file_handler = ActionDispatch::FileHandler.new(Rails.application.paths['public'].first)
+ end
+
+ def call(env)
+ file = @file_handler.attempt(env)
+
+ # If the request is not a static file, move on!
+ return @app.call(env) if file.nil?
+
+ status, headers, response = file
+
+ # Set cache headers on static files. Some paths require different cache headers
+ headers['Cache-Control'] = begin
+ request_path = env['REQUEST_PATH']
+
+ if request_path.start_with?('/sw.js')
+ "public, max-age=#{SERVICE_WORKER_TTL}, must-revalidate"
+ elsif request_path.start_with?(paperclip_root_url)
+ "public, max-age=#{CACHE_TTL}, immutable"
+ else
+ "public, max-age=#{CACHE_TTL}, must-revalidate"
+ end
+ end
+
+ [status, headers, response]
+ end
+
+ private
+
+ def paperclip_root_url
+ ENV.fetch('PAPERCLIP_ROOT_URL', '/system')
+ end
+end
diff --git a/spec/workers/scheduler/accounts_statuses_cleanup_scheduler_spec.rb b/spec/workers/scheduler/accounts_statuses_cleanup_scheduler_spec.rb
index 8f20725c8..d953cc39d 100644
--- a/spec/workers/scheduler/accounts_statuses_cleanup_scheduler_spec.rb
+++ b/spec/workers/scheduler/accounts_statuses_cleanup_scheduler_spec.rb
@@ -23,7 +23,6 @@ describe Scheduler::AccountsStatusesCleanupScheduler do
},
]
end
- let(:retry_size) { 0 }
before do
queue_stub = double
@@ -33,7 +32,6 @@ describe Scheduler::AccountsStatusesCleanupScheduler do
allow(Sidekiq::ProcessSet).to receive(:new).and_return(process_set_stub)
sidekiq_stats_stub = double
- allow(sidekiq_stats_stub).to receive(:retry_size).and_return(retry_size)
allow(Sidekiq::Stats).to receive(:new).and_return(sidekiq_stats_stub)
# Create a bunch of old statuses
@@ -70,14 +68,6 @@ describe Scheduler::AccountsStatusesCleanupScheduler do
expect(subject.under_load?).to be true
end
end
-
- context 'when there is a huge amount of jobs to retry' do
- let(:retry_size) { 1_000_000 }
-
- it 'returns true' do
- expect(subject.under_load?).to be true
- end
- end
end
describe '#get_budget' do
diff --git a/streaming/index.js b/streaming/index.js
index 6c3e9b346..0a78a683c 100644
--- a/streaming/index.js
+++ b/streaming/index.js
@@ -942,15 +942,15 @@ const startWorker = async (workerId) => {
res.write('# TYPE connected_channels gauge\n');
res.write('# HELP connected_channels The number of Redis channels the streaming server is subscribed to\n');
res.write(`connected_channels ${Object.keys(subs).length}.0\n`);
- res.write('# TYPE pg.pool.total_connections gauge \n');
- res.write('# HELP pg.pool.total_connections The total number of clients existing within the pool\n');
- res.write(`pg.pool.total_connections ${pgPool.totalCount}.0\n`);
- res.write('# TYPE pg.pool.idle_connections gauge \n');
- res.write('# HELP pg.pool.idle_connections The number of clients which are not checked out but are currently idle in the pool\n');
- res.write(`pg.pool.idle_connections ${pgPool.idleCount}.0\n`);
- res.write('# TYPE pg.pool.waiting_queries gauge \n');
- res.write('# HELP pg.pool.waiting_queries The number of queued requests waiting on a client when all clients are checked out\n');
- res.write(`pg.pool.waiting_queries ${pgPool.waitingCount}.0\n`);
+ res.write('# TYPE pg_pool_total_connections gauge\n');
+ res.write('# HELP pg_pool_total_connections The total number of clients existing within the pool\n');
+ res.write(`pg_pool_total_connections ${pgPool.totalCount}.0\n`);
+ res.write('# TYPE pg_pool_idle_connections gauge\n');
+ res.write('# HELP pg_pool_idle_connections The number of clients which are not checked out but are currently idle in the pool\n');
+ res.write(`pg_pool_idle_connections ${pgPool.idleCount}.0\n`);
+ res.write('# TYPE pg_pool_waiting_queries gauge\n');
+ res.write('# HELP pg_pool_waiting_queries The number of queued requests waiting on a client when all clients are checked out\n');
+ res.write(`pg_pool_waiting_queries ${pgPool.waitingCount}.0\n`);
res.write('# EOF\n');
res.end();
}));