Merge pull request #2366 from ClearlyClaire/glitch-soc/merge-upstream

Merge upstream changes up to 3a4d3e9d4b
This commit is contained in:
Claire 2023-08-12 21:27:29 +02:00 committed by GitHub
commit 678fa1e6af
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 224 additions and 20 deletions

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
class Api::V1::Instances::LanguagesController < Api::BaseController
skip_before_action :require_authenticated_user!, unless: :limited_federation_mode?
skip_around_action :set_locale
before_action :set_languages
vary_by ''
def show
cache_even_if_authenticated!
render json: @languages, each_serializer: REST::LanguageSerializer
end
private
def set_languages
@languages = LanguagesHelper::SUPPORTED_LOCALES.keys.map { |code| LanguagePresenter.new(code) }
end
end

View File

@ -13,4 +13,30 @@ ready(() => {
console.error(error); console.error(error);
}); });
}, 5000); }, 5000);
document.querySelectorAll('.timer-button').forEach(button => {
let counter = 30;
const container = document.createElement('span');
const updateCounter = () => {
container.innerText = ` (${counter})`;
};
updateCounter();
const countdown = setInterval(() => {
counter--;
if (counter === 0) {
button.disabled = false;
button.removeChild(container);
clearInterval(countdown);
} else {
updateCounter();
}
}, 1000);
button.appendChild(container);
});
}); });

View File

@ -13,4 +13,30 @@ ready(() => {
console.error(error); console.error(error);
}); });
}, 5000); }, 5000);
document.querySelectorAll('.timer-button').forEach(button => {
let counter = 30;
const container = document.createElement('span');
const updateCounter = () => {
container.innerText = ` (${counter})`;
};
updateCounter();
const countdown = setInterval(() => {
counter--;
if (counter === 0) {
button.disabled = false;
button.removeChild(container);
clearInterval(countdown);
} else {
updateCounter();
}
}, 1000);
button.appendChild(container);
});
}); });

View File

@ -346,7 +346,7 @@ class Request
end end
def private_address_exceptions def private_address_exceptions
@private_address_exceptions = (ENV['ALLOWED_PRIVATE_ADDRESSES'] || '').split(',').map { |addr| IPAddr.new(addr) } @private_address_exceptions = (ENV['ALLOWED_PRIVATE_ADDRESSES'] || '').split(/(?:\s*,\s*|\s+)/).map { |addr| IPAddr.new(addr) }
end end
end end
end end

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
class LanguagePresenter < ActiveModelSerializers::Model
attributes :code, :name, :native_name
def initialize(code)
super()
@code = code
@item = LanguagesHelper::SUPPORTED_LOCALES[code]
end
def name
@item[0]
end
def native_name
@item[1]
end
end

View File

@ -0,0 +1,5 @@
# frozen_string_literal: true
class REST::LanguageSerializer < ActiveModel::Serializer
attributes :code, :name
end

View File

@ -61,10 +61,14 @@ class FetchLinkCardService < BaseService
end end
def attach_card def attach_card
with_redis_lock("attach_card:#{@status.id}") do
return if @status.preview_cards.any?
@status.preview_cards << @card @status.preview_cards << @card
Rails.cache.delete(@status) Rails.cache.delete(@status)
Trends.links.register(@status) Trends.links.register(@status)
end end
end
def parse_urls def parse_urls
urls = if @status.local? urls = if @status.local?

View File

@ -17,6 +17,6 @@
= f.input :email, required: true, hint: false, input_html: { 'aria-label': t('simple_form.labels.defaults.email'), autocomplete: 'off' } = f.input :email, required: true, hint: false, input_html: { 'aria-label': t('simple_form.labels.defaults.email'), autocomplete: 'off' }
.actions .actions
= f.submit t('auth.resend_confirmation'), class: 'button' = f.button :button, t('auth.resend_confirmation'), type: :submit, class: 'button timer-button', disabled: true
.form-footer= render 'auth/shared/links' .form-footer= render 'auth/shared/links'

View File

@ -4,7 +4,7 @@ class Scheduler::FollowRecommendationsScheduler
include Sidekiq::Worker include Sidekiq::Worker
include Redisable include Redisable
sidekiq_options retry: 0 sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i
# The maximum number of accounts that can be requested in one page from the # The maximum number of accounts that can be requested in one page from the
# API is 80, and the suggestions API does not allow pagination. This number # API is 80, and the suggestions API does not allow pagination. This number

View File

@ -4,7 +4,7 @@ class Scheduler::IndexingScheduler
include Sidekiq::Worker include Sidekiq::Worker
include Redisable include Redisable
sidekiq_options retry: 0 sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i
IMPORT_BATCH_SIZE = 1000 IMPORT_BATCH_SIZE = 1000
SCAN_BATCH_SIZE = 10 * IMPORT_BATCH_SIZE SCAN_BATCH_SIZE = 10 * IMPORT_BATCH_SIZE
@ -16,9 +16,7 @@ class Scheduler::IndexingScheduler
with_redis do |redis| with_redis do |redis|
redis.sscan_each("chewy:queue:#{type.name}", count: SCAN_BATCH_SIZE).each_slice(IMPORT_BATCH_SIZE) do |ids| redis.sscan_each("chewy:queue:#{type.name}", count: SCAN_BATCH_SIZE).each_slice(IMPORT_BATCH_SIZE) do |ids|
type.import!(ids) type.import!(ids)
redis.pipelined do |pipeline| redis.srem("chewy:queue:#{type.name}", ids)
pipeline.srem("chewy:queue:#{type.name}", ids)
end
end end
end end
end end

View File

@ -3,7 +3,7 @@
class Scheduler::InstanceRefreshScheduler class Scheduler::InstanceRefreshScheduler
include Sidekiq::Worker include Sidekiq::Worker
sidekiq_options retry: 0 sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i
def perform def perform
Instance.refresh Instance.refresh

View File

@ -6,7 +6,7 @@ class Scheduler::IpCleanupScheduler
IP_RETENTION_PERIOD = ENV.fetch('IP_RETENTION_PERIOD', 1.year).to_i.seconds.freeze IP_RETENTION_PERIOD = ENV.fetch('IP_RETENTION_PERIOD', 1.year).to_i.seconds.freeze
SESSION_RETENTION_PERIOD = ENV.fetch('SESSION_RETENTION_PERIOD', 1.year).to_i.seconds.freeze SESSION_RETENTION_PERIOD = ENV.fetch('SESSION_RETENTION_PERIOD', 1.year).to_i.seconds.freeze
sidekiq_options retry: 0 sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i
def perform def perform
clean_ip_columns! clean_ip_columns!

View File

@ -3,7 +3,7 @@
class Scheduler::PgheroScheduler class Scheduler::PgheroScheduler
include Sidekiq::Worker include Sidekiq::Worker
sidekiq_options retry: 0 sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i
def perform def perform
PgHero.capture_space_stats PgHero.capture_space_stats

View File

@ -3,7 +3,7 @@
class Scheduler::ScheduledStatusesScheduler class Scheduler::ScheduledStatusesScheduler
include Sidekiq::Worker include Sidekiq::Worker
sidekiq_options retry: 0 sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i
def perform def perform
publish_scheduled_statuses! publish_scheduled_statuses!

View File

@ -16,7 +16,7 @@ class Scheduler::SuspendedUserCleanupScheduler
# has the capacity for it. # has the capacity for it.
MAX_DELETIONS_PER_JOB = 10 MAX_DELETIONS_PER_JOB = 10
sidekiq_options retry: 0 sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i
def perform def perform
return if Sidekiq::Queue.new('pull').size > MAX_PULL_SIZE return if Sidekiq::Queue.new('pull').size > MAX_PULL_SIZE

View File

@ -3,7 +3,7 @@
class Scheduler::UserCleanupScheduler class Scheduler::UserCleanupScheduler
include Sidekiq::Worker include Sidekiq::Worker
sidekiq_options retry: 0 sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i
def perform def perform
clean_unconfirmed_accounts! clean_unconfirmed_accounts!

View File

@ -3,7 +3,7 @@
class Scheduler::VacuumScheduler class Scheduler::VacuumScheduler
include Sidekiq::Worker include Sidekiq::Worker
sidekiq_options retry: 0, lock: :until_executed sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i
def perform def perform
vacuum_operations.each do |operation| vacuum_operations.each do |operation|

View File

@ -121,6 +121,7 @@ namespace :api, format: false do
resource :privacy_policy, only: [:show], controller: 'instances/privacy_policies' resource :privacy_policy, only: [:show], controller: 'instances/privacy_policies'
resource :extended_description, only: [:show], controller: 'instances/extended_descriptions' resource :extended_description, only: [:show], controller: 'instances/extended_descriptions'
resource :translation_languages, only: [:show], controller: 'instances/translation_languages' resource :translation_languages, only: [:show], controller: 'instances/translation_languages'
resource :languages, only: [:show], controller: 'instances/languages'
resource :activity, only: [:show], controller: 'instances/activity' resource :activity, only: [:show], controller: 'instances/activity'
end end

View File

@ -23,7 +23,7 @@
class: Scheduler::Trends::ReviewNotificationsScheduler class: Scheduler::Trends::ReviewNotificationsScheduler
queue: scheduler queue: scheduler
indexing_scheduler: indexing_scheduler:
every: '5m' interval: 1 minute
class: Scheduler::IndexingScheduler class: Scheduler::IndexingScheduler
queue: scheduler queue: scheduler
vacuum_scheduler: vacuum_scheduler:

View File

@ -0,0 +1,39 @@
# frozen_string_literal: true
class AddUniqueIndexOnPreviewCardsStatuses < ActiveRecord::Migration[6.1]
disable_ddl_transaction!
def up
add_index :preview_cards_statuses, [:status_id, :preview_card_id], name: :preview_cards_statuses_pkey, algorithm: :concurrently, unique: true
rescue ActiveRecord::RecordNotUnique
deduplicate_and_reindex!
end
def down
remove_index :preview_cards_statuses, name: :preview_cards_statuses_pkey
end
private
def deduplicate_and_reindex!
deduplicate_preview_cards!
safety_assured { execute 'REINDEX INDEX preview_cards_statuses_pkey' }
rescue ActiveRecord::RecordNotUnique
retry
end
def deduplicate_preview_cards!
# Statuses should have only one preview card at most, even if that's not the database
# constraint we will end up with
duplicate_ids = select_all('SELECT status_id FROM preview_cards_statuses GROUP BY status_id HAVING count(*) > 1;').rows
duplicate_ids.each_slice(1000) do |ids|
# This one is tricky: since we don't have primary keys to keep only one record,
# use the physical `ctid`
safety_assured do
execute "DELETE FROM preview_cards_statuses p WHERE p.status_id IN (#{ids.join(', ')}) AND p.ctid NOT IN (SELECT q.ctid FROM preview_cards_statuses q WHERE q.status_id = p.status_id LIMIT 1)"
end
end
end
end

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
class AddPrimaryKeyToPreviewCardsStatusesJoinTable < ActiveRecord::Migration[6.1]
disable_ddl_transaction!
def up
safety_assured do
execute 'ALTER TABLE preview_cards_statuses ADD PRIMARY KEY USING INDEX preview_cards_statuses_pkey'
end
end
def down
safety_assured do
# I have found no way to demote the primary key to an index, instead, re-create the index
execute 'CREATE UNIQUE INDEX CONCURRENTLY preview_cards_statuses_pkey_tmp ON preview_cards_statuses (status_id, preview_card_id)'
execute 'ALTER TABLE preview_cards_statuses DROP CONSTRAINT preview_cards_statuses_pkey'
execute 'ALTER INDEX preview_cards_statuses_pkey_tmp RENAME TO preview_cards_statuses_pkey'
end
end
end

View File

@ -10,7 +10,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema[7.0].define(version: 2023_07_24_160715) do ActiveRecord::Schema[7.0].define(version: 2023_08_03_112520) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "plpgsql" enable_extension "plpgsql"
@ -805,7 +805,7 @@ ActiveRecord::Schema[7.0].define(version: 2023_07_24_160715) do
t.index ["url"], name: "index_preview_cards_on_url", unique: true t.index ["url"], name: "index_preview_cards_on_url", unique: true
end end
create_table "preview_cards_statuses", id: false, force: :cascade do |t| create_table "preview_cards_statuses", primary_key: ["status_id", "preview_card_id"], force: :cascade do |t|
t.bigint "preview_card_id", null: false t.bigint "preview_card_id", null: false
t.bigint "status_id", null: false t.bigint "status_id", null: false
t.index ["status_id", "preview_card_id"], name: "index_preview_cards_statuses_on_status_id_and_preview_card_id" t.index ["status_id", "preview_card_id"], name: "index_preview_cards_statuses_on_status_id_and_preview_card_id"

View File

@ -63,6 +63,11 @@ namespace :tests do
puts 'Account domains not properly normalized' puts 'Account domains not properly normalized'
exit(1) exit(1)
end end
unless Status.find(12).preview_cards.pluck(:url) == ['https://joinmastodon.org/']
puts 'Preview cards not deduplicated as expected'
exit(1)
end
end end
desc 'Populate the database with test data for 2.4.3' desc 'Populate the database with test data for 2.4.3'
@ -238,6 +243,11 @@ namespace :tests do
(10, 2, '@admin hey!', NULL, 1, 3, now(), now()), (10, 2, '@admin hey!', NULL, 1, 3, now(), now()),
(11, 1, '@user hey!', 10, 1, 3, now(), now()); (11, 1, '@user hey!', 10, 1, 3, now(), now());
INSERT INTO "statuses"
(id, account_id, text, created_at, updated_at)
VALUES
(12, 1, 'check out https://joinmastodon.org/', now(), now());
-- mentions (from previous statuses) -- mentions (from previous statuses)
INSERT INTO "mentions" INSERT INTO "mentions"
@ -326,6 +336,21 @@ namespace :tests do
(1, 6, 2, 'Follow', 2, now(), now()), (1, 6, 2, 'Follow', 2, now(), now()),
(2, 2, 1, 'Mention', 4, now(), now()), (2, 2, 1, 'Mention', 4, now(), now()),
(3, 1, 2, 'Mention', 5, now(), now()); (3, 1, 2, 'Mention', 5, now(), now());
-- preview cards
INSERT INTO "preview_cards"
(id, url, title, created_at, updated_at)
VALUES
(1, 'https://joinmastodon.org/', 'Mastodon - Decentralized social media', now(), now());
-- many-to-many association between preview cards and statuses
INSERT INTO "preview_cards_statuses"
(status_id, preview_card_id)
VALUES
(12, 1),
(12, 1);
SQL SQL
end end
end end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe 'Languages' do
describe 'GET /api/v1/instance/languages' do
before do
get '/api/v1/instance/languages'
end
it 'returns http success' do
expect(response).to have_http_status(200)
end
it 'returns the supported languages' do
expect(body_as_json.pluck(:code)).to match_array LanguagesHelper::SUPPORTED_LOCALES.keys.map(&:to_s)
end
end
end