Rename all temporary tables to be prefixed with _test

In number of places we create temporary tables. This renames
these temporary tables to be prefixed with `_test_`
to exempt them from schema validation.
This commit is contained in:
Kamil Trzciński 2021-11-04 11:46:43 +01:00
parent 367be6733d
commit 87ed30903f
15 changed files with 150 additions and 142 deletions

View file

@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJob do
let(:table_name) { :copy_primary_key_test }
let(:table_name) { :_test_copy_primary_key_test }
let(:test_table) { table(table_name) }
let(:sub_batch_size) { 1000 }
let(:pause_ms) { 0 }

View file

@ -286,7 +286,7 @@
let(:migration_wrapper) { Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper.new }
let(:migration_helpers) { ActiveRecord::Migration.new }
let(:table_name) { :_batched_migrations_test_table }
let(:table_name) { :_test_batched_migrations_test_table }
let(:column_name) { :some_id }
let(:job_arguments) { [:some_id, :some_id_convert_to_bigint] }

View file

@ -85,7 +85,7 @@
describe '.insert_all!' do
before do
ActiveRecord::Schema.define do
create_table :connection_proxy_bulk_insert, force: true do |t|
create_table :_test_connection_proxy_bulk_insert, force: true do |t|
t.string :name, null: true
end
end
@ -93,13 +93,13 @@
after do
ActiveRecord::Schema.define do
drop_table :connection_proxy_bulk_insert, force: true
drop_table :_test_connection_proxy_bulk_insert, force: true
end
end
let(:model_class) do
Class.new(ApplicationRecord) do
self.table_name = "connection_proxy_bulk_insert"
self.table_name = "_test_connection_proxy_bulk_insert"
end
end

View file

@ -105,7 +105,7 @@
describe 'LoadBalancing integration tests', :database_replica, :delete do
before(:all) do
ActiveRecord::Schema.define do
create_table :load_balancing_test, force: true do |t|
create_table :_test_load_balancing_test, force: true do |t|
t.string :name, null: true
end
end
@ -113,13 +113,13 @@
after(:all) do
ActiveRecord::Schema.define do
drop_table :load_balancing_test, force: true
drop_table :_test_load_balancing_test, force: true
end
end
let(:model) do
Class.new(ApplicationRecord) do
self.table_name = "load_balancing_test"
self.table_name = "_test_load_balancing_test"
end
end
@ -443,7 +443,7 @@
elsif payload[:name] == 'SQL' # Custom query
true
else
keywords = %w[load_balancing_test]
keywords = %w[_test_load_balancing_test]
keywords += %w[begin commit] if include_transaction
keywords.any? { |keyword| payload[:sql].downcase.include?(keyword) }
end

View file

@ -9,18 +9,18 @@
let(:model) do
Class.new(ApplicationRecord) do
self.table_name = 'loose_fk_test_table'
self.table_name = '_test_loose_fk_test_table'
end
end
before(:all) do
migration.create_table :loose_fk_test_table do |t|
migration.create_table :_test_loose_fk_test_table do |t|
t.timestamps
end
end
after(:all) do
migration.drop_table :loose_fk_test_table
migration.drop_table :_test_loose_fk_test_table
end
before do
@ -37,7 +37,7 @@
context 'when the record deletion tracker trigger is installed' do
before do
migration.track_record_deletions(:loose_fk_test_table)
migration.track_record_deletions(:_test_loose_fk_test_table)
end
it 'stores the record deletion' do
@ -50,7 +50,7 @@
deleted_record = LooseForeignKeys::DeletedRecord.all.first
expect(deleted_record.primary_key_value).to eq(record_to_be_deleted.id)
expect(deleted_record.fully_qualified_table_name).to eq('public.loose_fk_test_table')
expect(deleted_record.fully_qualified_table_name).to eq('public._test_loose_fk_test_table')
expect(deleted_record.partition).to eq(1)
end

View file

@ -20,7 +20,7 @@
let(:model) { Class.new(ActiveRecord::Base) }
before do
model.table_name = :test_table
model.table_name = :_test_table
end
context 'when called inside a transaction block' do
@ -30,19 +30,19 @@
it 'raises an error' do
expect do
migration.public_send(operation, :test_table, :original, :renamed)
migration.public_send(operation, :_test_table, :original, :renamed)
end.to raise_error("#{operation} can not be run inside a transaction")
end
end
context 'when the existing column has a default value' do
before do
migration.change_column_default :test_table, existing_column, 'default value'
migration.change_column_default :_test_table, existing_column, 'default value'
end
it 'raises an error' do
expect do
migration.public_send(operation, :test_table, :original, :renamed)
migration.public_send(operation, :_test_table, :original, :renamed)
end.to raise_error("#{operation} does not currently support columns with default values")
end
end
@ -51,18 +51,18 @@
context 'when the batch column does not exist' do
it 'raises an error' do
expect do
migration.public_send(operation, :test_table, :original, :renamed, batch_column_name: :missing)
end.to raise_error('Column missing does not exist on test_table')
migration.public_send(operation, :_test_table, :original, :renamed, batch_column_name: :missing)
end.to raise_error('Column missing does not exist on _test_table')
end
end
context 'when the batch column does exist' do
it 'passes it when creating the column' do
expect(migration).to receive(:create_column_from)
.with(:test_table, existing_column, added_column, type: nil, batch_column_name: :status)
.with(:_test_table, existing_column, added_column, type: nil, batch_column_name: :status)
.and_call_original
migration.public_send(operation, :test_table, :original, :renamed, batch_column_name: :status)
migration.public_send(operation, :_test_table, :original, :renamed, batch_column_name: :status)
end
end
end
@ -71,17 +71,17 @@
existing_record_1 = model.create!(status: 0, existing_column => 'existing')
existing_record_2 = model.create!(status: 0, existing_column => nil)
migration.send(operation, :test_table, :original, :renamed)
migration.send(operation, :_test_table, :original, :renamed)
model.reset_column_information
expect(migration.column_exists?(:test_table, added_column)).to eq(true)
expect(migration.column_exists?(:_test_table, added_column)).to eq(true)
expect(existing_record_1.reload).to have_attributes(status: 0, original: 'existing', renamed: 'existing')
expect(existing_record_2.reload).to have_attributes(status: 0, original: nil, renamed: nil)
end
it 'installs triggers to sync new data' do
migration.public_send(operation, :test_table, :original, :renamed)
migration.public_send(operation, :_test_table, :original, :renamed)
model.reset_column_information
new_record_1 = model.create!(status: 1, original: 'first')
@ -102,7 +102,7 @@
before do
allow(migration).to receive(:transaction_open?).and_return(false)
migration.create_table :test_table do |t|
migration.create_table :_test_table do |t|
t.integer :status, null: false
t.text :original
t.text :other_column
@ -118,8 +118,8 @@
context 'when the column to rename does not exist' do
it 'raises an error' do
expect do
migration.rename_column_concurrently :test_table, :missing_column, :renamed
end.to raise_error('Column missing_column does not exist on test_table')
migration.rename_column_concurrently :_test_table, :missing_column, :renamed
end.to raise_error('Column missing_column does not exist on _test_table')
end
end
end
@ -128,7 +128,7 @@
before do
allow(migration).to receive(:transaction_open?).and_return(false)
migration.create_table :test_table do |t|
migration.create_table :_test_table do |t|
t.integer :status, null: false
t.text :other_column
t.text :renamed
@ -144,8 +144,8 @@
context 'when the renamed column does not exist' do
it 'raises an error' do
expect do
migration.undo_cleanup_concurrent_column_rename :test_table, :original, :missing_column
end.to raise_error('Column missing_column does not exist on test_table')
migration.undo_cleanup_concurrent_column_rename :_test_table, :original, :missing_column
end.to raise_error('Column missing_column does not exist on _test_table')
end
end
end
@ -156,25 +156,25 @@
before do
allow(migration).to receive(:transaction_open?).and_return(false)
migration.create_table :test_table do |t|
migration.create_table :_test_table do |t|
t.integer :status, null: false
t.text :original
t.text :other_column
end
migration.rename_column_concurrently :test_table, :original, :renamed
migration.rename_column_concurrently :_test_table, :original, :renamed
end
context 'when the helper is called repeatedly' do
before do
migration.public_send(operation, :test_table, :original, :renamed)
migration.public_send(operation, :_test_table, :original, :renamed)
end
it 'does not make repeated attempts to cleanup' do
expect(migration).not_to receive(:remove_column)
expect do
migration.public_send(operation, :test_table, :original, :renamed)
migration.public_send(operation, :_test_table, :original, :renamed)
end.not_to raise_error
end
end
@ -182,26 +182,26 @@
context 'when the renamed column exists' do
let(:triggers) do
[
['trigger_7cc71f92fd63', 'function_for_trigger_7cc71f92fd63', before: 'insert'],
['trigger_f1a1f619636a', 'function_for_trigger_f1a1f619636a', before: 'update'],
['trigger_769a49938884', 'function_for_trigger_769a49938884', before: 'update']
['trigger_020dbcb8cdd0', 'function_for_trigger_020dbcb8cdd0', before: 'insert'],
['trigger_6edaca641d03', 'function_for_trigger_6edaca641d03', before: 'update'],
['trigger_a3fb9f3add34', 'function_for_trigger_a3fb9f3add34', before: 'update']
]
end
it 'removes the sync triggers and renamed columns' do
triggers.each do |(trigger_name, function_name, event)|
expect_function_to_exist(function_name)
expect_valid_function_trigger(:test_table, trigger_name, function_name, event)
expect_valid_function_trigger(:_test_table, trigger_name, function_name, event)
end
expect(migration.column_exists?(:test_table, added_column)).to eq(true)
expect(migration.column_exists?(:_test_table, added_column)).to eq(true)
migration.public_send(operation, :test_table, :original, :renamed)
migration.public_send(operation, :_test_table, :original, :renamed)
expect(migration.column_exists?(:test_table, added_column)).to eq(false)
expect(migration.column_exists?(:_test_table, added_column)).to eq(false)
triggers.each do |(trigger_name, function_name, _)|
expect_trigger_not_to_exist(:test_table, trigger_name)
expect_trigger_not_to_exist(:_test_table, trigger_name)
expect_function_not_to_exist(function_name)
end
end
@ -223,7 +223,7 @@
end
describe '#create_table' do
let(:table_name) { :test_table }
let(:table_name) { :_test_table }
let(:column_attributes) do
[
{ name: 'id', sql_type: 'bigint', null: false, default: nil },
@ -245,7 +245,7 @@
end
expect_table_columns_to_match(column_attributes, table_name)
expect_check_constraint(table_name, 'check_cda6f69506', 'char_length(name) <= 100')
expect_check_constraint(table_name, 'check_e9982cf9da', 'char_length(name) <= 100')
end
end
end

View file

@ -10,7 +10,7 @@
let(:model) { double('model', table_name: table_name) }
let(:partitioning_key) { double }
let(:table_name) { :partitioned_test }
let(:table_name) { :_test_partitioned_test }
before do
connection.execute(<<~SQL)
@ -18,11 +18,11 @@
(id serial not null, created_at timestamptz not null, PRIMARY KEY (id, created_at))
PARTITION BY RANGE (created_at);
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.partitioned_test_000000
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_000000
PARTITION OF #{table_name}
FOR VALUES FROM (MINVALUE) TO ('2020-05-01');
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.partitioned_test_202005
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_202005
PARTITION OF #{table_name}
FOR VALUES FROM ('2020-05-01') TO ('2020-06-01');
SQL
@ -30,8 +30,8 @@
it 'detects both partitions' do
expect(subject).to eq([
Gitlab::Database::Partitioning::TimePartition.new(table_name, nil, '2020-05-01', partition_name: 'partitioned_test_000000'),
Gitlab::Database::Partitioning::TimePartition.new(table_name, '2020-05-01', '2020-06-01', partition_name: 'partitioned_test_202005')
Gitlab::Database::Partitioning::TimePartition.new(table_name, nil, '2020-05-01', partition_name: '_test_partitioned_test_000000'),
Gitlab::Database::Partitioning::TimePartition.new(table_name, '2020-05-01', '2020-06-01', partition_name: '_test_partitioned_test_202005')
])
end
end
@ -41,7 +41,7 @@
let(:model) do
Class.new(ActiveRecord::Base) do
self.table_name = 'partitioned_test'
self.table_name = '_test_partitioned_test'
self.primary_key = :id
end
end
@ -59,11 +59,11 @@
(id serial not null, created_at timestamptz not null, PRIMARY KEY (id, created_at))
PARTITION BY RANGE (created_at);
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.partitioned_test_000000
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_000000
PARTITION OF #{model.table_name}
FOR VALUES FROM (MINVALUE) TO ('2020-05-01');
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.partitioned_test_202006
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_202006
PARTITION OF #{model.table_name}
FOR VALUES FROM ('2020-06-01') TO ('2020-07-01');
SQL
@ -166,7 +166,7 @@
(id serial not null, created_at timestamptz not null, PRIMARY KEY (id, created_at))
PARTITION BY RANGE (created_at);
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.partitioned_test_202006
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_202006
PARTITION OF #{model.table_name}
FOR VALUES FROM ('2020-06-01') TO ('2020-07-01');
SQL
@ -181,13 +181,13 @@
describe '#extra_partitions' do
let(:model) do
Class.new(ActiveRecord::Base) do
self.table_name = 'partitioned_test'
self.table_name = '_test_partitioned_test'
self.primary_key = :id
end
end
let(:partitioning_key) { :created_at }
let(:table_name) { :partitioned_test }
let(:table_name) { :_test_partitioned_test }
around do |example|
travel_to(Date.parse('2020-08-22')) { example.run }
@ -200,15 +200,15 @@
(id serial not null, created_at timestamptz not null, PRIMARY KEY (id, created_at))
PARTITION BY RANGE (created_at);
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.partitioned_test_000000
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_000000
PARTITION OF #{table_name}
FOR VALUES FROM (MINVALUE) TO ('2020-05-01');
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.partitioned_test_202005
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_202005
PARTITION OF #{table_name}
FOR VALUES FROM ('2020-05-01') TO ('2020-06-01');
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.partitioned_test_202006
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_202006
PARTITION OF #{table_name}
FOR VALUES FROM ('2020-06-01') TO ('2020-07-01')
SQL
@ -235,7 +235,7 @@
it 'prunes the unbounded partition ending 2020-05-01' do
min_value_to_may = Gitlab::Database::Partitioning::TimePartition.new(model.table_name, nil, '2020-05-01',
partition_name: 'partitioned_test_000000')
partition_name: '_test_partitioned_test_000000')
expect(subject).to contain_exactly(min_value_to_may)
end
@ -246,8 +246,8 @@
it 'prunes the unbounded partition and the partition for May-June' do
expect(subject).to contain_exactly(
Gitlab::Database::Partitioning::TimePartition.new(model.table_name, nil, '2020-05-01', partition_name: 'partitioned_test_000000'),
Gitlab::Database::Partitioning::TimePartition.new(model.table_name, '2020-05-01', '2020-06-01', partition_name: 'partitioned_test_202005')
Gitlab::Database::Partitioning::TimePartition.new(model.table_name, nil, '2020-05-01', partition_name: '_test_partitioned_test_000000'),
Gitlab::Database::Partitioning::TimePartition.new(model.table_name, '2020-05-01', '2020-06-01', partition_name: '_test_partitioned_test_202005')
)
end
@ -256,16 +256,16 @@
it 'prunes empty partitions' do
expect(subject).to contain_exactly(
Gitlab::Database::Partitioning::TimePartition.new(model.table_name, nil, '2020-05-01', partition_name: 'partitioned_test_000000'),
Gitlab::Database::Partitioning::TimePartition.new(model.table_name, '2020-05-01', '2020-06-01', partition_name: 'partitioned_test_202005')
Gitlab::Database::Partitioning::TimePartition.new(model.table_name, nil, '2020-05-01', partition_name: '_test_partitioned_test_000000'),
Gitlab::Database::Partitioning::TimePartition.new(model.table_name, '2020-05-01', '2020-06-01', partition_name: '_test_partitioned_test_202005')
)
end
it 'does not prune non-empty partitions' do
connection.execute("INSERT INTO #{table_name} (created_at) VALUES (('2020-05-15'))") # inserting one record into partitioned_test_202005
connection.execute("INSERT INTO #{table_name} (created_at) VALUES (('2020-05-15'))") # inserting one record into _test_partitioned_test_202005
expect(subject).to contain_exactly(
Gitlab::Database::Partitioning::TimePartition.new(model.table_name, nil, '2020-05-01', partition_name: 'partitioned_test_000000')
Gitlab::Database::Partitioning::TimePartition.new(model.table_name, nil, '2020-05-01', partition_name: '_test_partitioned_test_000000')
)
end
end

View file

@ -11,12 +11,12 @@
let(:new_model) do
Class.new(ActiveRecord::Base) do
self.table_name = 'projects_new'
self.table_name = '_test_projects_new'
end
end
before do
stub_const('Gitlab::Database::TABLES_TO_BE_RENAMED', { 'projects' => 'projects_new' })
stub_const('Gitlab::Database::TABLES_TO_BE_RENAMED', { 'projects' => '_test_projects_new' })
end
context 'when table is not renamed yet' do
@ -32,8 +32,8 @@
context 'when table is renamed' do
before do
ActiveRecord::Base.connection.execute("ALTER TABLE projects RENAME TO projects_new")
ActiveRecord::Base.connection.execute("CREATE VIEW projects AS SELECT * FROM projects_new")
ActiveRecord::Base.connection.execute("ALTER TABLE projects RENAME TO _test_projects_new")
ActiveRecord::Base.connection.execute("CREATE VIEW projects AS SELECT * FROM _test_projects_new")
old_model.reset_column_information
ActiveRecord::Base.connection.schema_cache.clear!
@ -54,14 +54,14 @@
it 'has the same indexes' do
indexes_for_old_table = ActiveRecord::Base.connection.schema_cache.indexes('projects')
indexes_for_new_table = ActiveRecord::Base.connection.schema_cache.indexes('projects_new')
indexes_for_new_table = ActiveRecord::Base.connection.schema_cache.indexes('_test_projects_new')
expect(indexes_for_old_table).to eq(indexes_for_new_table)
end
it 'has the same column_hash' do
columns_hash_for_old_table = ActiveRecord::Base.connection.schema_cache.columns_hash('projects')
columns_hash_for_new_table = ActiveRecord::Base.connection.schema_cache.columns_hash('projects_new')
columns_hash_for_new_table = ActiveRecord::Base.connection.schema_cache.columns_hash('_test_projects_new')
expect(columns_hash_for_old_table).to eq(columns_hash_for_new_table)
end

View file

@ -8,7 +8,7 @@
before(:all) do
ActiveRecord::Schema.define do
create_table :testing_pagination_nodes, force: true do |t|
create_table :_test_testing_pagination_nodes, force: true do |t|
t.integer :value, null: false
end
end
@ -16,13 +16,13 @@
after(:all) do
ActiveRecord::Schema.define do
drop_table :testing_pagination_nodes, force: true
drop_table :_test_testing_pagination_nodes, force: true
end
end
let_it_be(:node_model) do
Class.new(ActiveRecord::Base) do
self.table_name = 'testing_pagination_nodes'
self.table_name = '_test_testing_pagination_nodes'
end
end

View file

@ -5,42 +5,42 @@
RSpec.describe BulkInsertSafe do
before(:all) do
ActiveRecord::Schema.define do
create_table :bulk_insert_parent_items, force: true do |t|
create_table :_test_bulk_insert_parent_items, force: true do |t|
t.string :name, null: false
end
create_table :bulk_insert_items, force: true do |t|
create_table :_test_bulk_insert_items, force: true do |t|
t.string :name, null: true
t.integer :enum_value, null: false
t.text :encrypted_secret_value, null: false
t.string :encrypted_secret_value_iv, null: false
t.binary :sha_value, null: false, limit: 20
t.jsonb :jsonb_value, null: false
t.belongs_to :bulk_insert_parent_item, foreign_key: true, null: true
t.belongs_to :bulk_insert_parent_item, foreign_key: { to_table: :_test_bulk_insert_parent_items }, null: true
t.timestamps null: true
t.index :name, unique: true
end
create_table :bulk_insert_items_with_composite_pk, id: false, force: true do |t|
create_table :_test_bulk_insert_items_with_composite_pk, id: false, force: true do |t|
t.integer :id, null: true
t.string :name, null: true
end
execute("ALTER TABLE bulk_insert_items_with_composite_pk ADD PRIMARY KEY (id,name);")
execute("ALTER TABLE _test_bulk_insert_items_with_composite_pk ADD PRIMARY KEY (id,name);")
end
end
after(:all) do
ActiveRecord::Schema.define do
drop_table :bulk_insert_items, force: true
drop_table :bulk_insert_parent_items, force: true
drop_table :bulk_insert_items_with_composite_pk, force: true
drop_table :_test_bulk_insert_items, force: true
drop_table :_test_bulk_insert_parent_items, force: true
drop_table :_test_bulk_insert_items_with_composite_pk, force: true
end
end
BulkInsertParentItem = Class.new(ActiveRecord::Base) do
self.table_name = :bulk_insert_parent_items
self.table_name = :_test_bulk_insert_parent_items
self.inheritance_column = :_type_disabled
def self.name
@ -54,7 +54,7 @@ def self.name
let_it_be(:bulk_insert_item_class) do
Class.new(ActiveRecord::Base) do
self.table_name = 'bulk_insert_items'
self.table_name = '_test_bulk_insert_items'
include BulkInsertSafe
include ShaAttribute
@ -247,7 +247,7 @@ def self.invalid_list(count)
context 'when a model with composite primary key is inserted' do
let_it_be(:bulk_insert_items_with_composite_pk_class) do
Class.new(ActiveRecord::Base) do
self.table_name = 'bulk_insert_items_with_composite_pk'
self.table_name = '_test_bulk_insert_items_with_composite_pk'
include BulkInsertSafe
end

View file

@ -6,42 +6,50 @@
class BulkFoo < ApplicationRecord
include BulkInsertSafe
self.table_name = '_test_bulk_foos'
validates :name, presence: true
end
class BulkBar < ApplicationRecord
include BulkInsertSafe
self.table_name = '_test_bulk_bars'
end
SimpleBar = Class.new(ApplicationRecord)
SimpleBar = Class.new(ApplicationRecord) do
self.table_name = '_test_simple_bars'
end
class BulkParent < ApplicationRecord
include BulkInsertableAssociations
has_many :bulk_foos
self.table_name = '_test_bulk_parents'
has_many :bulk_foos, class_name: 'BulkFoo'
has_many :bulk_hunks, class_name: 'BulkFoo'
has_many :bulk_bars
has_many :simple_bars # not `BulkInsertSafe`
has_many :bulk_bars, class_name: 'BulkBar'
has_many :simple_bars, class_name: 'SimpleBar' # not `BulkInsertSafe`
has_one :bulk_foo # not supported
end
before(:all) do
ActiveRecord::Schema.define do
create_table :bulk_parents, force: true do |t|
create_table :_test_bulk_parents, force: true do |t|
t.string :name, null: true
end
create_table :bulk_foos, force: true do |t|
create_table :_test_bulk_foos, force: true do |t|
t.string :name, null: true
t.belongs_to :bulk_parent, null: false
end
create_table :bulk_bars, force: true do |t|
create_table :_test_bulk_bars, force: true do |t|
t.string :name, null: true
t.belongs_to :bulk_parent, null: false
end
create_table :simple_bars, force: true do |t|
create_table :_test_simple_bars, force: true do |t|
t.string :name, null: true
t.belongs_to :bulk_parent, null: false
end
@ -50,10 +58,10 @@ class BulkParent < ApplicationRecord
after(:all) do
ActiveRecord::Schema.define do
drop_table :bulk_foos, force: true
drop_table :bulk_bars, force: true
drop_table :simple_bars, force: true
drop_table :bulk_parents, force: true
drop_table :_test_bulk_foos, force: true
drop_table :_test_bulk_bars, force: true
drop_table :_test_simple_bars, force: true
drop_table :_test_bulk_parents, force: true
end
end

View file

@ -4,7 +4,7 @@
RSpec.describe WhereComposite do
describe '.where_composite' do
let_it_be(:test_table_name) { "test_table_#{SecureRandom.hex(10)}" }
let_it_be(:test_table_name) { "_test_table_#{SecureRandom.hex(10)}" }
let(:model) do
tbl_name = test_table_name

View file

@ -8,44 +8,44 @@
def create_table_structure
migration = ActiveRecord::Migration.new.extend(Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers)
migration.create_table :loose_fk_parent_table
migration.create_table :_test_loose_fk_parent_table
migration.create_table :loose_fk_child_table_1 do |t|
migration.create_table :_test_loose_fk_child_table_1 do |t|
t.bigint :parent_id
end
migration.create_table :loose_fk_child_table_2 do |t|
migration.create_table :_test_loose_fk_child_table_2 do |t|
t.bigint :parent_id_with_different_column
end
migration.track_record_deletions(:loose_fk_parent_table)
migration.track_record_deletions(:_test_loose_fk_parent_table)
end
let(:parent_model) do
Class.new(ApplicationRecord) do
self.table_name = 'loose_fk_parent_table'
self.table_name = '_test_loose_fk_parent_table'
include LooseForeignKey
loose_foreign_key :loose_fk_child_table_1, :parent_id, on_delete: :async_delete
loose_foreign_key :loose_fk_child_table_2, :parent_id_with_different_column, on_delete: :async_nullify
loose_foreign_key :_test_loose_fk_child_table_1, :parent_id, on_delete: :async_delete
loose_foreign_key :_test_loose_fk_child_table_2, :parent_id_with_different_column, on_delete: :async_nullify
end
end
let(:child_model_1) do
Class.new(ApplicationRecord) do
self.table_name = 'loose_fk_child_table_1'
self.table_name = '_test_loose_fk_child_table_1'
end
end
let(:child_model_2) do
Class.new(ApplicationRecord) do
self.table_name = 'loose_fk_child_table_2'
self.table_name = '_test_loose_fk_child_table_2'
end
end
let(:loose_fk_child_table_1) { table(:loose_fk_child_table_1) }
let(:loose_fk_child_table_2) { table(:loose_fk_child_table_2) }
let(:loose_fk_child_table_1) { table(:_test_loose_fk_child_table_1) }
let(:loose_fk_child_table_2) { table(:_test_loose_fk_child_table_2) }
let(:parent_record_1) { parent_model.create! }
let(:other_parent_record) { parent_model.create! }
@ -73,9 +73,9 @@ def create_table_structure
after(:all) do
migration = ActiveRecord::Migration.new
migration.drop_table :loose_fk_parent_table
migration.drop_table :loose_fk_child_table_1
migration.drop_table :loose_fk_child_table_2
migration.drop_table :_test_loose_fk_parent_table
migration.drop_table :_test_loose_fk_child_table_1
migration.drop_table :_test_loose_fk_child_table_2
end
context 'when parent records are deleted' do
@ -90,8 +90,8 @@ def create_table_structure
described_class.new(parent_klass: parent_model,
deleted_parent_records: LooseForeignKeys::DeletedRecord.status_pending.all,
models_by_table_name: {
'loose_fk_child_table_1' => child_model_1,
'loose_fk_child_table_2' => child_model_2
'_test_loose_fk_child_table_1' => child_model_1,
'_test_loose_fk_child_table_2' => child_model_2
}).execute
end

View file

@ -215,7 +215,7 @@
stub_feature_flags(database_async_index_creation: true)
expect(Gitlab::Database::AsyncIndexes).to receive(:create_pending_indexes!).ordered.exactly(databases_count).times
expect(Gitlab::Database::Reindexing).to receive(:automatic_reindexing).ordered.once
expect(Gitlab::Database::Reindexing).to receive(:automatic_reindexing).ordered.exactly(databases_count).times
run_rake_task('gitlab:db:reindex')
end
@ -233,7 +233,7 @@
context 'calls automatic reindexing' do
it 'uses all candidate indexes' do
expect(Gitlab::Database::Reindexing).to receive(:automatic_reindexing).once
expect(Gitlab::Database::Reindexing).to receive(:automatic_reindexing).exactly(databases_count).times
run_rake_task('gitlab:db:reindex')
end

View file

@ -8,69 +8,69 @@
def create_table_structure
migration = ActiveRecord::Migration.new.extend(Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers)
migration.create_table :loose_fk_parent_table_1
migration.create_table :loose_fk_parent_table_2
migration.create_table :_test_loose_fk_parent_table_1
migration.create_table :_test_loose_fk_parent_table_2
migration.create_table :loose_fk_child_table_1_1 do |t|
migration.create_table :_test_loose_fk_child_table_1_1 do |t|
t.bigint :parent_id
end
migration.create_table :loose_fk_child_table_1_2 do |t|
migration.create_table :_test_loose_fk_child_table_1_2 do |t|
t.bigint :parent_id_with_different_column
end
migration.create_table :loose_fk_child_table_2_1 do |t|
migration.create_table :_test_loose_fk_child_table_2_1 do |t|
t.bigint :parent_id
end
migration.track_record_deletions(:loose_fk_parent_table_1)
migration.track_record_deletions(:loose_fk_parent_table_2)
migration.track_record_deletions(:_test_loose_fk_parent_table_1)
migration.track_record_deletions(:_test_loose_fk_parent_table_2)
end
let!(:parent_model_1) do
Class.new(ApplicationRecord) do
self.table_name = 'loose_fk_parent_table_1'
self.table_name = '_test_loose_fk_parent_table_1'
include LooseForeignKey
loose_foreign_key :loose_fk_child_table_1_1, :parent_id, on_delete: :async_delete
loose_foreign_key :loose_fk_child_table_1_2, :parent_id_with_different_column, on_delete: :async_nullify
loose_foreign_key :_test_loose_fk_child_table_1_1, :parent_id, on_delete: :async_delete
loose_foreign_key :_test_loose_fk_child_table_1_2, :parent_id_with_different_column, on_delete: :async_nullify
end
end
let!(:parent_model_2) do
Class.new(ApplicationRecord) do
self.table_name = 'loose_fk_parent_table_2'
self.table_name = '_test_loose_fk_parent_table_2'
include LooseForeignKey
loose_foreign_key :loose_fk_child_table_2_1, :parent_id, on_delete: :async_delete
loose_foreign_key :_test_loose_fk_child_table_2_1, :parent_id, on_delete: :async_delete
end
end
let!(:child_model_1) do
Class.new(ApplicationRecord) do
self.table_name = 'loose_fk_child_table_1_1'
self.table_name = '_test_loose_fk_child_table_1_1'
end
end
let!(:child_model_2) do
Class.new(ApplicationRecord) do
self.table_name = 'loose_fk_child_table_1_2'
self.table_name = '_test_loose_fk_child_table_1_2'
end
end
let!(:child_model_3) do
Class.new(ApplicationRecord) do
self.table_name = 'loose_fk_child_table_2_1'
self.table_name = '_test_loose_fk_child_table_2_1'
end
end
let(:loose_fk_parent_table_1) { table(:loose_fk_parent_table_1) }
let(:loose_fk_parent_table_2) { table(:loose_fk_parent_table_2) }
let(:loose_fk_child_table_1_1) { table(:loose_fk_child_table_1_1) }
let(:loose_fk_child_table_1_2) { table(:loose_fk_child_table_1_2) }
let(:loose_fk_child_table_2_1) { table(:loose_fk_child_table_2_1) }
let(:loose_fk_parent_table_1) { table(:_test_loose_fk_parent_table_1) }
let(:loose_fk_parent_table_2) { table(:_test_loose_fk_parent_table_2) }
let(:loose_fk_child_table_1_1) { table(:_test_loose_fk_child_table_1_1) }
let(:loose_fk_child_table_1_2) { table(:_test_loose_fk_child_table_1_2) }
let(:loose_fk_child_table_2_1) { table(:_test_loose_fk_child_table_2_1) }
before(:all) do
create_table_structure
@ -79,11 +79,11 @@ def create_table_structure
after(:all) do
migration = ActiveRecord::Migration.new
migration.drop_table :loose_fk_parent_table_1
migration.drop_table :loose_fk_parent_table_2
migration.drop_table :loose_fk_child_table_1_1
migration.drop_table :loose_fk_child_table_1_2
migration.drop_table :loose_fk_child_table_2_1
migration.drop_table :_test_loose_fk_parent_table_1
migration.drop_table :_test_loose_fk_parent_table_2
migration.drop_table :_test_loose_fk_child_table_1_1
migration.drop_table :_test_loose_fk_child_table_1_2
migration.drop_table :_test_loose_fk_child_table_2_1
end
before do