Skip to content

Bigint Migration for 'events' Table (Step 3) #4406

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions db/migrations/20250327142351_bigint_migration_events_step1.rb
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,15 @@

down do
if database_type == :postgres
# There is no guarantee that the table is still empty - which was the condition for simply switching the id
# column's type to bigint. We nevertheless want to revert the type to integer as this is the opposite procedure of
# the up migration. In case there is a lot of data in the table at this moment in time, this change might be
# problematic, e.g. take a longer time.
#
# Ideally this down migration SHOULD NEVER BE EXECUTED IN A PRODUCTION SYSTEM! (It's there for proper integration
# testing of the bigint migration steps.)
VCAP::BigintMigration.revert_pk_to_integer(self, :events)

VCAP::BigintMigration.drop_trigger_function(self, :events)
VCAP::BigintMigration.drop_bigint_column(self, :events)
end
Expand Down
19 changes: 19 additions & 0 deletions db/migrations/20250603103400_bigint_migration_events_step3a.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
require 'database/bigint_migration'

Sequel.migration do
up do
if database_type == :postgres && !VCAP::BigintMigration.migration_completed?(self, :events) && !VCAP::BigintMigration.migration_skipped?(self, :events)
begin
VCAP::BigintMigration.add_check_constraint(self, :events)
rescue Sequel::CheckConstraintViolation
raise "Failed to add check constraint on 'events' table!\n" \
"There are rows where 'id_bigint' does not match 'id', thus step 3 of the bigint migration cannot be executed.\n" \
"Consider running rake task 'db:bigint_backfill[events]'."
end
end
end

down do
VCAP::BigintMigration.drop_check_constraint(self, :events) if database_type == :postgres
end
end
48 changes: 48 additions & 0 deletions db/migrations/20250603103500_bigint_migration_events_step3b.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
require 'database/bigint_migration'

Sequel.migration do
up do
if database_type == :postgres && VCAP::BigintMigration.has_check_constraint?(self, :events)
# Drop check constraint and trigger function
VCAP::BigintMigration.drop_check_constraint(self, :events)
VCAP::BigintMigration.drop_trigger_function(self, :events)

# Drop old id column
VCAP::BigintMigration.drop_pk_column(self, :events)

# Switch id_bigint -> id
VCAP::BigintMigration.rename_bigint_column(self, :events)
VCAP::BigintMigration.add_pk_constraint(self, :events)
VCAP::BigintMigration.add_timestamp_pk_index(self, :events)
VCAP::BigintMigration.set_pk_as_identity_with_correct_start_value(self, :events)
end
end

down do
if database_type == :postgres && VCAP::BigintMigration.migration_completed?(self, :events)
# Revert id -> id_bigint
VCAP::BigintMigration.drop_identity(self, :events)
VCAP::BigintMigration.drop_timestamp_pk_index(self, :events)
VCAP::BigintMigration.drop_pk_constraint(self, :events)
VCAP::BigintMigration.revert_bigint_column_name(self, :events)

# Restore old id column
VCAP::BigintMigration.add_id_column(self, :events)

# To restore the previous state it is necessary to backfill the id column. In case there is a lot of data in the
# table this might be problematic, e.g. take a longer time.
#
# Ideally this down migration SHOULD NEVER BE EXECUTED IN A PRODUCTION SYSTEM! (It's there for proper integration
# testing of the bigint migration steps.)
VCAP::BigintMigration.backfill_id(self, :events)

VCAP::BigintMigration.add_pk_constraint(self, :events)
VCAP::BigintMigration.add_timestamp_pk_index(self, :events)
VCAP::BigintMigration.set_pk_as_identity_with_correct_start_value(self, :events)

# Recreate trigger function and check constraint
VCAP::BigintMigration.create_trigger_function(self, :events)
VCAP::BigintMigration.add_check_constraint(self, :events)
end
end
end
138 changes: 132 additions & 6 deletions lib/database/bigint_migration.rb
Original file line number Diff line number Diff line change
Expand Up @@ -50,22 +50,124 @@ def backfill(logger, db, table, batch_size: 10_000, iterations: -1)

logger.info("starting bigint backfill on table '#{table}' (batch_size: #{batch_size}, iterations: #{iterations})")
loop do
updated_rows = db.
from(table, :batch).
with(:batch, db[table].select(:id).where(id_bigint: nil).order(:id).limit(batch_size).for_update.skip_locked).
where(Sequel.qualify(table, :id) => :batch__id).
update(id_bigint: :batch__id)
updated_rows = backfill_batch(db, table, :id, :id_bigint, batch_size)
logger.info("updated #{updated_rows} rows")
iterations -= 1 if iterations > 0
break if updated_rows < batch_size || iterations == 0
end
logger.info("finished bigint backfill on table '#{table}'")
end

def migration_completed?(db, table)
column_type(db, table, :id) == 'bigint'
end

def migration_skipped?(db, table)
!column_exists?(db, table, :id_bigint)
end

def add_check_constraint(db, table)
return if has_check_constraint?(db, table)

constraint_name = check_constraint_name(table)
db.alter_table(table) do
add_constraint(constraint_name) do
Sequel.lit('id_bigint IS NOT NULL AND id_bigint = id')
end
end
end

def drop_check_constraint(db, table)
return unless has_check_constraint?(db, table)

constraint_name = check_constraint_name(table)
db.alter_table(table) do
drop_constraint(constraint_name)
end
end

def has_check_constraint?(db, table)
check_constraint_exists?(db, table, check_constraint_name(table))
end

def drop_pk_column(db, table)
db.drop_column(table, :id, if_exists: true)
end

def add_id_column(db, table)
db.add_column(table, :id, :integer, if_not_exists: true)
end

def rename_bigint_column(db, table)
db.rename_column(table, :id_bigint, :id) if column_exists?(db, table, :id_bigint) && !column_exists?(db, table, :id)
end

def revert_bigint_column_name(db, table)
db.rename_column(table, :id, :id_bigint) if column_exists?(db, table, :id) && column_type(db, table, :id) == 'bigint' && !column_exists?(db, table, :id_bigint)
end

def add_pk_constraint(db, table)
return if db.primary_key(table) == 'id'

db.alter_table(table) do
add_primary_key([:id])
end
end

def drop_pk_constraint(db, table)
return unless db.primary_key(table) == 'id'

constraint_name = pk_constraint_name(table)
db.alter_table(table) do
drop_constraint(constraint_name)
set_column_allow_null(:id, true)
end
end

def add_timestamp_pk_index(db, table)
db.add_index(table, %i[timestamp id], name: timestamp_id_index_name(table), unique: false, if_not_exists: true)
end

def drop_timestamp_pk_index(db, table)
db.drop_index(table, %i[timestamp id], name: timestamp_id_index_name(table), if_exists: true)
end

def set_pk_as_identity_with_correct_start_value(db, table)
return if column_attribute(db, table, :id, :auto_increment) == true

block = <<~BLOCK
DO $$
DECLARE
max_id BIGINT;
BEGIN
SELECT COALESCE(MAX(id), 0) + 1 INTO max_id FROM #{table};

EXECUTE format('ALTER TABLE #{table} ALTER COLUMN id ADD GENERATED BY DEFAULT AS IDENTITY (START WITH %s)', max_id);
END $$;
BLOCK
db.run(block)
end

def drop_identity(db, table)
db.run("ALTER TABLE #{table} ALTER COLUMN id DROP IDENTITY IF EXISTS")
end

def backfill_id(db, table)
batch_size = 10_000
loop do
updated_rows = backfill_batch(db, table, :id_bigint, :id, batch_size)
break if updated_rows < batch_size
end
end

private

def column_attribute(db, table, column, attribute)
db.schema(table).find { |col, _| col == column }&.dig(1, attribute)
end

def column_type(db, table, column)
db.schema(table).find { |col, _| col == column }&.dig(1, :db_type)
column_attribute(db, table, column, :db_type)
end

def function_name(table)
Expand All @@ -79,5 +181,29 @@ def trigger_name(table)
def column_exists?(db, table, column)
db[table].columns.include?(column)
end

def check_constraint_name(table)
:"#{table}_check_id_bigint_matches_id"
end

def check_constraint_exists?(db, table, constraint_name)
db.check_constraints(table).include?(constraint_name)
end

def pk_constraint_name(table)
:"#{table}_pkey"
end

def timestamp_id_index_name(table)
:"#{table}_timestamp_id_index"
end

def backfill_batch(db, table, from_column, to_column, batch_size)
db.
from(table, :batch).
with(:batch, db[table].select(from_column).where(to_column => nil).order(from_column).limit(batch_size).for_update.skip_locked).
where(Sequel.qualify(table, from_column) => :"batch__#{from_column}").
update(to_column => :"batch__#{from_column}")
end
end
end
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
require 'spec_helper'
require 'migrations/helpers/bigint_migration_step3_shared_context'

RSpec.describe 'bigint migration - events table - step3a', isolation: :truncation, type: :migration do
include_context 'bigint migration step3a' do
let(:migration_filename_step1) { '20250327142351_bigint_migration_events_step1.rb' }
let(:migration_filename_step3a) { '20250603103400_bigint_migration_events_step3a.rb' }
let(:table) { :events }
let(:insert) do
lambda do |db|
db[:events].insert(guid: SecureRandom.uuid, timestamp: Time.now.utc, type: 'type',
actor: 'actor', actor_type: 'actor_type',
actee: 'actee', actee_type: 'actee_type')
end
end
end
end
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
require 'spec_helper'
require 'migrations/helpers/bigint_migration_step3_shared_context'

RSpec.describe 'bigint migration - events table - step3b', isolation: :truncation, type: :migration do
include_context 'bigint migration step3b' do
let(:migration_filename_step1) { '20250327142351_bigint_migration_events_step1.rb' }
let(:migration_filename_step3a) { '20250603103400_bigint_migration_events_step3a.rb' }
let(:migration_filename_step3b) { '20250603103500_bigint_migration_events_step3b.rb' }
let(:table) { :events }
let(:insert) do
lambda do |db|
db[:events].insert(guid: SecureRandom.uuid, timestamp: Time.now.utc, type: 'type',
actor: 'actor', actor_type: 'actor_type',
actee: 'actee', actee_type: 'actee_type')
end
end
end
end
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,10 @@
context 'when the table is not empty' do
let!(:old_id) { insert.call(db) }

after do
db[table].delete # Necessary to successfully run subsequent migrations in the after block of the migration shared context...
end

it "does not change the id column's type" do
expect(db).to have_table_with_column_and_type(table, :id, 'integer')

Expand Down Expand Up @@ -186,6 +190,10 @@
run_migration
end

after do
db[table].delete # Necessary to successfully run subsequent migrations in the after block of the migration shared context...
end

it 'drops the id_bigint column' do
expect(db).to have_table_with_column(table, :id_bigint)

Expand Down
Loading