Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-05-20 17:34:42 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2020-05-20 17:34:42 +0300
commit9f46488805e86b1bc341ea1620b866016c2ce5ed (patch)
treef9748c7e287041e37d6da49e0a29c9511dc34768 /spec/migrations
parentdfc92d081ea0332d69c8aca2f0e745cb48ae5e6d (diff)
Add latest changes from gitlab-org/gitlab@13-0-stable-ee
Diffstat (limited to 'spec/migrations')
-rw-r--r--spec/migrations/20200511145545_change_variable_interpolation_format_in_common_metrics_spec.rb34
-rw-r--r--spec/migrations/backfill_snippet_repositories_spec.rb44
-rw-r--r--spec/migrations/cleanup_optimistic_locking_nulls_pt2_fixed_spec.rb45
-rw-r--r--spec/migrations/cleanup_optimistic_locking_nulls_spec.rb9
-rw-r--r--spec/migrations/cleanup_projects_with_missing_namespace_spec.rb134
-rw-r--r--spec/migrations/encrypt_plaintext_attributes_on_application_settings_spec.rb10
-rw-r--r--spec/migrations/fill_file_store_ci_job_artifacts_spec.rb44
-rw-r--r--spec/migrations/fill_file_store_lfs_objects_spec.rb36
-rw-r--r--spec/migrations/fill_store_uploads_spec.rb48
-rw-r--r--spec/migrations/remove_additional_application_settings_rows_spec.rb27
-rw-r--r--spec/migrations/remove_deprecated_jenkins_service_records_spec.rb28
-rw-r--r--spec/migrations/remove_orphaned_invited_members_spec.rb55
12 files changed, 504 insertions, 10 deletions
diff --git a/spec/migrations/20200511145545_change_variable_interpolation_format_in_common_metrics_spec.rb b/spec/migrations/20200511145545_change_variable_interpolation_format_in_common_metrics_spec.rb
new file mode 100644
index 00000000000..f9e8a7ee6e9
--- /dev/null
+++ b/spec/migrations/20200511145545_change_variable_interpolation_format_in_common_metrics_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200511145545_change_variable_interpolation_format_in_common_metrics')
+
+describe ChangeVariableInterpolationFormatInCommonMetrics, :migration do
+ let(:prometheus_metrics) { table(:prometheus_metrics) }
+
+ let!(:common_metric) do
+ prometheus_metrics.create!(
+ identifier: 'system_metrics_kubernetes_container_memory_total',
+ query: 'avg(sum(container_memory_usage_bytes{container_name!="POD",' \
+ 'pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"})' \
+ ' by (job)) without (job) /1024/1024/1024',
+ project_id: nil,
+ title: 'Memory Usage (Total)',
+ y_label: 'Total Memory Used (GB)',
+ unit: 'GB',
+ legend: 'Total (GB)',
+ group: -5,
+ common: true
+ )
+ end
+
+ it 'updates query to use {{}}' do
+ expected_query = 'avg(sum(container_memory_usage_bytes{container_name!="POD",' \
+ 'pod_name=~"^{{ci_environment_slug}}-(.*)",namespace="{{kube_namespace}}"})' \
+ ' by (job)) without (job) /1024/1024/1024'
+
+ migrate!
+
+ expect(common_metric.reload.query).to eq(expected_query)
+ end
+end
diff --git a/spec/migrations/backfill_snippet_repositories_spec.rb b/spec/migrations/backfill_snippet_repositories_spec.rb
new file mode 100644
index 00000000000..e87bf7376dd
--- /dev/null
+++ b/spec/migrations/backfill_snippet_repositories_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200420094444_backfill_snippet_repositories.rb')
+
+describe BackfillSnippetRepositories do
+ let(:users) { table(:users) }
+ let(:snippets) { table(:snippets) }
+ let(:user) { users.create(id: 1, email: 'user@example.com', projects_limit: 10, username: 'test', name: 'Test', state: 'active') }
+
+ def create_snippet(id)
+ params = {
+ id: id,
+ type: 'PersonalSnippet',
+ author_id: user.id,
+ file_name: 'foo',
+ content: 'bar'
+ }
+
+ snippets.create!(params)
+ end
+
+ it 'correctly schedules background migrations' do
+ create_snippet(1)
+ create_snippet(2)
+ create_snippet(3)
+
+ stub_const("#{described_class.name}::BATCH_SIZE", 2)
+
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(3.minutes, 1, 2)
+
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(6.minutes, 3, 3)
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ end
+ end
+ end
+end
diff --git a/spec/migrations/cleanup_optimistic_locking_nulls_pt2_fixed_spec.rb b/spec/migrations/cleanup_optimistic_locking_nulls_pt2_fixed_spec.rb
new file mode 100644
index 00000000000..2e5e450afc7
--- /dev/null
+++ b/spec/migrations/cleanup_optimistic_locking_nulls_pt2_fixed_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200427064130_cleanup_optimistic_locking_nulls_pt2_fixed.rb')
+
+describe CleanupOptimisticLockingNullsPt2Fixed, :migration do
+ test_tables = %w(ci_stages ci_builds ci_pipelines).freeze
+ test_tables.each do |table|
+ let(table.to_sym) { table(table.to_sym) }
+ end
+ let(:tables) { test_tables.map { |t| method(t.to_sym).call } }
+
+ before do
+ # Create necessary rows
+ ci_stages.create!
+ ci_builds.create!
+ ci_pipelines.create!
+
+ # Nullify `lock_version` column for all rows
+ # Needs to be done with a SQL fragment, otherwise Rails will coerce it to 0
+ tables.each do |table|
+ table.update_all('lock_version = NULL')
+ end
+ end
+
+ it 'correctly migrates nullified lock_version column', :sidekiq_might_not_need_inline do
+ tables.each do |table|
+ expect(table.where(lock_version: nil).count).to eq(1)
+ end
+
+ tables.each do |table|
+ expect(table.where(lock_version: 0).count).to eq(0)
+ end
+
+ migrate!
+
+ tables.each do |table|
+ expect(table.where(lock_version: nil).count).to eq(0)
+ end
+
+ tables.each do |table|
+ expect(table.where(lock_version: 0).count).to eq(1)
+ end
+ end
+end
diff --git a/spec/migrations/cleanup_optimistic_locking_nulls_spec.rb b/spec/migrations/cleanup_optimistic_locking_nulls_spec.rb
index d32a374b914..6e541c903ff 100644
--- a/spec/migrations/cleanup_optimistic_locking_nulls_spec.rb
+++ b/spec/migrations/cleanup_optimistic_locking_nulls_spec.rb
@@ -4,11 +4,10 @@ require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200128210353_cleanup_optimistic_locking_nulls')
describe CleanupOptimisticLockingNulls do
- TABLES = %w(epics merge_requests issues).freeze
- TABLES.each do |table|
- let(table.to_sym) { table(table.to_sym) }
- end
- let(:tables) { TABLES.map { |t| method(t.to_sym).call } }
+ let(:epics) { table(:epics) }
+ let(:merge_requests) { table(:merge_requests) }
+ let(:issues) { table(:issues) }
+ let(:tables) { [epics, merge_requests, issues] }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
diff --git a/spec/migrations/cleanup_projects_with_missing_namespace_spec.rb b/spec/migrations/cleanup_projects_with_missing_namespace_spec.rb
new file mode 100644
index 00000000000..06b6d5e3b46
--- /dev/null
+++ b/spec/migrations/cleanup_projects_with_missing_namespace_spec.rb
@@ -0,0 +1,134 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require Rails.root.join('db', 'post_migrate', '20200511080113_add_projects_foreign_key_to_namespaces.rb')
+require Rails.root.join('db', 'post_migrate', '20200511083541_cleanup_projects_with_missing_namespace.rb')
+
+LOST_AND_FOUND_GROUP = 'lost-and-found'
+USER_TYPE_GHOST = 5
+ACCESS_LEVEL_OWNER = 50
+
+# In order to test the CleanupProjectsWithMissingNamespace migration, we need
+# to first create an orphaned project (one with an invalid namespace_id)
+# and then run the migration to check that the project was properly cleaned up
+#
+# The problem is that the CleanupProjectsWithMissingNamespace migration comes
+# after the FK has been added with a previous migration (AddProjectsForeignKeyToNamespaces)
+# That means that while testing the current class we can not insert projects with an
+# invalid namespace_id as the existing FK is correctly blocking us from doing so
+#
+# The approach that solves that problem is to:
+# - Set the schema of this test to the one prior to AddProjectsForeignKeyToNamespaces
+# - We could hardcode it to `20200508091106` (which currently is the previous
+# migration before adding the FK) but that would mean that this test depends
+# on migration 20200508091106 not being reverted or deleted
+# - So, we use SchemaVersionFinder that finds the previous migration and returns
+# its schema, which we then use in the describe
+#
+# That means that we lock the schema version to the one returned by
+# SchemaVersionFinder.previous_migration and only test the cleanup migration
+# *without* the migration that adds the Foreign Key ever running
+# That's acceptable as the cleanup script should not be affected in any way
+# by the migration that adds the Foreign Key
+class SchemaVersionFinder
+ def self.migrations_paths
+ ActiveRecord::Migrator.migrations_paths
+ end
+
+ def self.migration_context
+ ActiveRecord::MigrationContext.new(migrations_paths, ActiveRecord::SchemaMigration)
+ end
+
+ def self.migrations
+ migration_context.migrations
+ end
+
+ def self.previous_migration
+ migrations.each_cons(2) do |previous, migration|
+ break previous.version if migration.name == AddProjectsForeignKeyToNamespaces.name
+ end
+ end
+end
+
+describe CleanupProjectsWithMissingNamespace, :migration, schema: SchemaVersionFinder.previous_migration do
+ let(:projects) { table(:projects) }
+ let(:namespaces) { table(:namespaces) }
+ let(:users) { table(:users) }
+
+ before do
+ namespace = namespaces.create!(name: 'existing_namespace', path: 'existing_namespace')
+
+ projects.create!(
+ name: 'project_with_existing_namespace',
+ path: 'project_with_existing_namespace',
+ visibility_level: 20,
+ archived: false,
+ namespace_id: namespace.id
+ )
+
+ projects.create!(
+ name: 'project_with_non_existing_namespace',
+ path: 'project_with_non_existing_namespace',
+ visibility_level: 20,
+ archived: false,
+ namespace_id: non_existing_record_id
+ )
+ end
+
+ it 'creates the ghost user' do
+ expect(users.where(user_type: USER_TYPE_GHOST).count).to eq(0)
+
+ disable_migrations_output { migrate! }
+
+ expect(users.where(user_type: USER_TYPE_GHOST).count).to eq(1)
+ end
+
+ it 'creates the lost-and-found group, owned by the ghost user' do
+ expect(
+ Group.where(Group.arel_table[:name].matches("#{LOST_AND_FOUND_GROUP}%")).count
+ ).to eq(0)
+
+ disable_migrations_output { migrate! }
+
+ ghost_user = users.find_by(user_type: USER_TYPE_GHOST)
+ expect(
+ Group
+ .joins('INNER JOIN members ON namespaces.id = members.source_id')
+ .where('namespaces.type = ?', 'Group')
+ .where('members.type = ?', 'GroupMember')
+ .where('members.source_type = ?', 'Namespace')
+ .where('members.user_id = ?', ghost_user.id)
+ .where('members.requested_at IS NULL')
+ .where('members.access_level = ?', ACCESS_LEVEL_OWNER)
+ .where(Group.arel_table[:name].matches("#{LOST_AND_FOUND_GROUP}%"))
+ .count
+ ).to eq(1)
+ end
+
+ it 'moves the orphaned project to the lost-and-found group' do
+ orphaned_project = projects.find_by(name: 'project_with_non_existing_namespace')
+ expect(orphaned_project.visibility_level).to eq(20)
+ expect(orphaned_project.archived).to eq(false)
+ expect(orphaned_project.namespace_id).to eq(non_existing_record_id)
+
+ disable_migrations_output { migrate! }
+
+ lost_and_found_group = Group.find_by(Group.arel_table[:name].matches("#{LOST_AND_FOUND_GROUP}%"))
+ orphaned_project = projects.find_by(id: orphaned_project.id)
+
+ expect(orphaned_project.visibility_level).to eq(0)
+ expect(orphaned_project.namespace_id).to eq(lost_and_found_group.id)
+ expect(orphaned_project.name).to eq("project_with_non_existing_namespace_#{orphaned_project.id}")
+ expect(orphaned_project.path).to eq("project_with_non_existing_namespace_#{orphaned_project.id}")
+ expect(orphaned_project.archived).to eq(true)
+
+ valid_project = projects.find_by(name: 'project_with_existing_namespace')
+ existing_namespace = namespaces.find_by(name: 'existing_namespace')
+
+ expect(valid_project.visibility_level).to eq(20)
+ expect(valid_project.namespace_id).to eq(existing_namespace.id)
+ expect(valid_project.path).to eq('project_with_existing_namespace')
+ expect(valid_project.archived).to eq(false)
+ end
+end
diff --git a/spec/migrations/encrypt_plaintext_attributes_on_application_settings_spec.rb b/spec/migrations/encrypt_plaintext_attributes_on_application_settings_spec.rb
index 87a72ed0cf5..fda810d1da9 100644
--- a/spec/migrations/encrypt_plaintext_attributes_on_application_settings_spec.rb
+++ b/spec/migrations/encrypt_plaintext_attributes_on_application_settings_spec.rb
@@ -8,7 +8,7 @@ describe EncryptPlaintextAttributesOnApplicationSettings do
let(:application_settings) { table(:application_settings) }
let(:plaintext) { 'secret-token' }
- PLAINTEXT_ATTRIBUTES = %w[
+ plaintext_attributes = %w[
akismet_api_key
elasticsearch_aws_secret_access_key
recaptcha_private_key
@@ -21,7 +21,7 @@ describe EncryptPlaintextAttributesOnApplicationSettings do
it 'encrypts token and saves it' do
application_setting = application_settings.create
application_setting.update_columns(
- PLAINTEXT_ATTRIBUTES.each_with_object({}) do |plaintext_attribute, attributes|
+ plaintext_attributes.each_with_object({}) do |plaintext_attribute, attributes|
attributes[plaintext_attribute] = plaintext
end
)
@@ -29,7 +29,7 @@ describe EncryptPlaintextAttributesOnApplicationSettings do
migration.up
application_setting.reload
- PLAINTEXT_ATTRIBUTES.each do |plaintext_attribute|
+ plaintext_attributes.each do |plaintext_attribute|
expect(application_setting[plaintext_attribute]).not_to be_nil
expect(application_setting["encrypted_#{plaintext_attribute}"]).not_to be_nil
expect(application_setting["encrypted_#{plaintext_attribute}_iv"]).not_to be_nil
@@ -40,7 +40,7 @@ describe EncryptPlaintextAttributesOnApplicationSettings do
describe '#down' do
it 'decrypts encrypted token and saves it' do
application_setting = application_settings.create(
- PLAINTEXT_ATTRIBUTES.each_with_object({}) do |plaintext_attribute, attributes|
+ plaintext_attributes.each_with_object({}) do |plaintext_attribute, attributes|
attributes[plaintext_attribute] = plaintext
end
)
@@ -48,7 +48,7 @@ describe EncryptPlaintextAttributesOnApplicationSettings do
migration.down
application_setting.reload
- PLAINTEXT_ATTRIBUTES.each do |plaintext_attribute|
+ plaintext_attributes.each do |plaintext_attribute|
expect(application_setting[plaintext_attribute]).to eq(plaintext)
expect(application_setting["encrypted_#{plaintext_attribute}"]).to be_nil
expect(application_setting["encrypted_#{plaintext_attribute}_iv"]).to be_nil
diff --git a/spec/migrations/fill_file_store_ci_job_artifacts_spec.rb b/spec/migrations/fill_file_store_ci_job_artifacts_spec.rb
new file mode 100644
index 00000000000..5435a438824
--- /dev/null
+++ b/spec/migrations/fill_file_store_ci_job_artifacts_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20200513235532_fill_file_store_ci_job_artifacts.rb')
+
+describe FillFileStoreCiJobArtifacts do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:builds) { table(:ci_builds) }
+ let(:job_artifacts) { table(:ci_job_artifacts) }
+
+ before do
+ namespaces.create!(id: 123, name: 'sample', path: 'sample')
+ projects.create!(id: 123, name: 'sample', path: 'sample', namespace_id: 123)
+ builds.create!(id: 1)
+ end
+
+ context 'when file_store is nil' do
+ it 'updates file_store to local' do
+ job_artifacts.create!(project_id: 123, job_id: 1, file_type: 1, file_store: nil)
+ job_artifact = job_artifacts.find_by(project_id: 123, job_id: 1)
+
+ expect { migrate! }.to change { job_artifact.reload.file_store }.from(nil).to(1)
+ end
+ end
+
+ context 'when file_store is set to local' do
+ it 'does not update file_store' do
+ job_artifacts.create!(project_id: 123, job_id: 1, file_type: 1, file_store: 1)
+ job_artifact = job_artifacts.find_by(project_id: 123, job_id: 1)
+
+ expect { migrate! }.not_to change { job_artifact.reload.file_store }
+ end
+ end
+
+ context 'when file_store is set to object storage' do
+ it 'does not update file_store' do
+ job_artifacts.create!(project_id: 123, job_id: 1, file_type: 1, file_store: 2)
+ job_artifact = job_artifacts.find_by(project_id: 123, job_id: 1)
+
+ expect { migrate! }.not_to change { job_artifact.reload.file_store }
+ end
+ end
+end
diff --git a/spec/migrations/fill_file_store_lfs_objects_spec.rb b/spec/migrations/fill_file_store_lfs_objects_spec.rb
new file mode 100644
index 00000000000..e574eacca35
--- /dev/null
+++ b/spec/migrations/fill_file_store_lfs_objects_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20200513234502_fill_file_store_lfs_objects.rb')
+
+describe FillFileStoreLfsObjects do
+ let(:lfs_objects) { table(:lfs_objects) }
+ let(:oid) { 'b804383982bb89b00e828e3f44c038cc991d3d1768009fc39ba8e2c081b9fb75' }
+
+ context 'when file_store is nil' do
+ it 'updates file_store to local' do
+ lfs_objects.create(oid: oid, size: 1062, file_store: nil)
+ lfs_object = lfs_objects.find_by(oid: oid)
+
+ expect { migrate! }.to change { lfs_object.reload.file_store }.from(nil).to(1)
+ end
+ end
+
+ context 'when file_store is set to local' do
+ it 'does not update file_store' do
+ lfs_objects.create(oid: oid, size: 1062, file_store: 1)
+ lfs_object = lfs_objects.find_by(oid: oid)
+
+ expect { migrate! }.not_to change { lfs_object.reload.file_store }
+ end
+ end
+
+ context 'when file_store is set to object storage' do
+ it 'does not update file_store' do
+ lfs_objects.create(oid: oid, size: 1062, file_store: 2)
+ lfs_object = lfs_objects.find_by(oid: oid)
+
+ expect { migrate! }.not_to change { lfs_object.reload.file_store }
+ end
+ end
+end
diff --git a/spec/migrations/fill_store_uploads_spec.rb b/spec/migrations/fill_store_uploads_spec.rb
new file mode 100644
index 00000000000..6a2a3c4ea8e
--- /dev/null
+++ b/spec/migrations/fill_store_uploads_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20200513235347_fill_store_uploads.rb')
+
+describe FillStoreUploads do
+ let(:uploads) { table(:uploads) }
+ let(:path) { 'uploads/-/system/avatar.jpg' }
+
+ context 'when store is nil' do
+ it 'updates store to local' do
+ uploads.create(size: 100.kilobytes,
+ uploader: 'AvatarUploader',
+ path: path,
+ store: nil)
+
+ upload = uploads.find_by(path: path)
+
+ expect { migrate! }.to change { upload.reload.store }.from(nil).to(1)
+ end
+ end
+
+ context 'when store is set to local' do
+ it 'does not update store' do
+ uploads.create(size: 100.kilobytes,
+ uploader: 'AvatarUploader',
+ path: path,
+ store: 1)
+
+ upload = uploads.find_by(path: path)
+
+ expect { migrate! }.not_to change { upload.reload.store }
+ end
+ end
+
+ context 'when store is set to object storage' do
+ it 'does not update store' do
+ uploads.create(size: 100.kilobytes,
+ uploader: 'AvatarUploader',
+ path: path,
+ store: 2)
+
+ upload = uploads.find_by(path: path)
+
+ expect { migrate! }.not_to change { upload.reload.store }
+ end
+ end
+end
diff --git a/spec/migrations/remove_additional_application_settings_rows_spec.rb b/spec/migrations/remove_additional_application_settings_rows_spec.rb
new file mode 100644
index 00000000000..379fa385b8e
--- /dev/null
+++ b/spec/migrations/remove_additional_application_settings_rows_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require Rails.root.join('db', 'post_migrate', '20200420162730_remove_additional_application_settings_rows.rb')
+
+describe RemoveAdditionalApplicationSettingsRows do
+ let(:application_settings) { table(:application_settings) }
+
+ it 'removes additional rows from application settings' do
+ 3.times { application_settings.create! }
+ latest_settings = application_settings.create!
+
+ disable_migrations_output { migrate! }
+
+ expect(application_settings.count).to eq(1)
+ expect(application_settings.first).to eq(latest_settings)
+ end
+
+ it 'leaves only row in application_settings' do
+ latest_settings = application_settings.create!
+
+ disable_migrations_output { migrate! }
+
+ expect(application_settings.first).to eq(latest_settings)
+ end
+end
diff --git a/spec/migrations/remove_deprecated_jenkins_service_records_spec.rb b/spec/migrations/remove_deprecated_jenkins_service_records_spec.rb
new file mode 100644
index 00000000000..9c9abd36203
--- /dev/null
+++ b/spec/migrations/remove_deprecated_jenkins_service_records_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20200511130129_remove_deprecated_jenkins_service_records.rb')
+require Rails.root.join('db', 'post_migrate', '20200511130130_ensure_deprecated_jenkins_service_records_removal.rb')
+
+shared_examples 'remove DeprecatedJenkinsService records' do
+ let(:services) { table(:services) }
+
+ before do
+ services.create!(type: 'JenkinsDeprecatedService')
+ services.create!(type: 'JenkinsService')
+ end
+
+ it 'deletes services when template and attached to a project' do
+ expect { migrate! }
+ .to change { services.where(type: 'JenkinsDeprecatedService').count }.from(1).to(0)
+ .and not_change { services.where(type: 'JenkinsService').count }
+ end
+end
+
+describe RemoveDeprecatedJenkinsServiceRecords, :migration do
+ it_behaves_like 'remove DeprecatedJenkinsService records'
+end
+
+describe EnsureDeprecatedJenkinsServiceRecordsRemoval, :migration do
+ it_behaves_like 'remove DeprecatedJenkinsService records'
+end
diff --git a/spec/migrations/remove_orphaned_invited_members_spec.rb b/spec/migrations/remove_orphaned_invited_members_spec.rb
new file mode 100644
index 00000000000..0ed4c15428a
--- /dev/null
+++ b/spec/migrations/remove_orphaned_invited_members_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20200424050250_remove_orphaned_invited_members.rb')
+
+describe RemoveOrphanedInvitedMembers do
+ let(:members_table) { table(:members) }
+ let(:users_table) { table(:users) }
+ let(:namespaces_table) { table(:namespaces) }
+ let(:projects_table) { table(:projects) }
+
+ let!(:user1) { users_table.create!(name: 'user1', email: 'user1@example.com', projects_limit: 1) }
+ let!(:user2) { users_table.create!(name: 'user2', email: 'user2@example.com', projects_limit: 1) }
+ let!(:group) { namespaces_table.create!(type: 'Group', name: 'group', path: 'group') }
+ let!(:project) { projects_table.create!(name: 'project', path: 'project', namespace_id: group.id) }
+
+ let!(:member1) { create_member(user_id: user1.id, source_type: 'Project', source_id: project.id, access_level: 10) }
+ let!(:member2) { create_member(user_id: user2.id, source_type: 'Group', source_id: group.id, access_level: 20) }
+
+ let!(:invited_member1) do
+ create_member(user_id: nil, source_type: 'Project', source_id: project.id,
+ invite_token: SecureRandom.hex, invite_accepted_at: Time.now,
+ access_level: 20)
+ end
+ let!(:invited_member2) do
+ create_member(user_id: nil, source_type: 'Group', source_id: group.id,
+ invite_token: SecureRandom.hex, invite_accepted_at: Time.now,
+ access_level: 20)
+ end
+
+ let!(:orphaned_member1) do
+ create_member(user_id: nil, source_type: 'Project', source_id: project.id,
+ invite_accepted_at: Time.now, access_level: 30)
+ end
+ let!(:orphaned_member2) do
+ create_member(user_id: nil, source_type: 'Group', source_id: group.id,
+ invite_accepted_at: Time.now, access_level: 20)
+ end
+
+ it 'removes orphaned invited members but keeps current members' do
+ expect { migrate! }.to change { members_table.count }.from(6).to(4)
+
+ expect(members_table.all.pluck(:id)).to contain_exactly(member1.id, member2.id, invited_member1.id, invited_member2.id)
+ end
+
+ def create_member(options)
+ members_table.create!(
+ {
+ notification_level: 0,
+ ldap: false,
+ override: false
+ }.merge(options)
+ )
+ end
+end