Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMarin Jankovski <maxlazio@gmail.com>2019-07-03 12:55:56 +0300
committerMarin Jankovski <maxlazio@gmail.com>2019-07-03 12:55:56 +0300
commitc20c9e2940b0f94547246d05b7b526f0b1571027 (patch)
treec548960a37ab7447ff542e0844e838f973c118fb /spec/lib/gitlab
parent49d689fb3c7781c861f995aaafef4b224581020b (diff)
parent2ca9bda400c0ed647c3ef342dcc0aa56c558cebe (diff)
Merge branch 'master' of gitlab.com:gitlab-org/gitlab-ce
Diffstat (limited to 'spec/lib/gitlab')
-rw-r--r--spec/lib/gitlab/auth/ip_rate_limiter_spec.rb65
-rw-r--r--spec/lib/gitlab/auth_spec.rb9
-rw-r--r--spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb125
-rw-r--r--spec/lib/gitlab/background_migration/delete_conflicting_redirect_routes_range_spec.rb35
-rw-r--r--spec/lib/gitlab/background_migration/migrate_events_to_push_event_payloads_spec.rb433
-rw-r--r--spec/lib/gitlab/background_migration/migrate_stage_status_spec.rb92
-rw-r--r--spec/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder_spec.rb21
-rw-r--r--spec/lib/gitlab/background_migration/move_personal_snippet_files_spec.rb74
-rw-r--r--spec/lib/gitlab/background_migration/normalize_ldap_extern_uids_range_spec.rb36
-rw-r--r--spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb97
-rw-r--r--spec/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id_spec.rb62
-rw-r--r--spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb22
-rw-r--r--spec/lib/gitlab/cleanup/orphan_job_artifact_files_batch_spec.rb66
-rw-r--r--spec/lib/gitlab/cleanup/orphan_job_artifact_files_spec.rb68
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb18
-rw-r--r--spec/lib/gitlab/diff/lines_unfolder_spec.rb33
-rw-r--r--spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb2
-rw-r--r--spec/lib/gitlab/graphql/copy_field_description_spec.rb21
-rw-r--r--spec/lib/gitlab/graphql/find_argument_in_parent_spec.rb44
-rw-r--r--spec/lib/gitlab/graphql/loaders/pipeline_for_sha_loader_spec.rb20
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml1
-rw-r--r--spec/lib/gitlab/import_export/project.json2
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml2
-rw-r--r--spec/lib/gitlab/issuable_sorter_spec.rb2
-rw-r--r--spec/lib/gitlab/legacy_github_import/importer_spec.rb2
-rw-r--r--spec/lib/gitlab/legacy_github_import/release_formatter_spec.rb4
-rw-r--r--spec/lib/gitlab/metrics/dashboard/dynamic_dashboard_service_spec.rb8
-rw-r--r--spec/lib/gitlab/metrics/dashboard/finder_spec.rb11
-rw-r--r--spec/lib/gitlab/metrics/dashboard/project_dashboard_service_spec.rb6
-rw-r--r--spec/lib/gitlab/metrics/dashboard/system_dashboard_service_spec.rb10
-rw-r--r--spec/lib/gitlab/metrics/system_spec.rb4
-rw-r--r--spec/lib/gitlab/reference_extractor_spec.rb6
32 files changed, 391 insertions, 1010 deletions
diff --git a/spec/lib/gitlab/auth/ip_rate_limiter_spec.rb b/spec/lib/gitlab/auth/ip_rate_limiter_spec.rb
new file mode 100644
index 00000000000..8d6bf45ab30
--- /dev/null
+++ b/spec/lib/gitlab/auth/ip_rate_limiter_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Auth::IpRateLimiter, :use_clean_rails_memory_store_caching do
+ let(:ip) { '10.2.2.3' }
+ let(:whitelist) { ['127.0.0.1'] }
+ let(:options) do
+ {
+ enabled: true,
+ ip_whitelist: whitelist,
+ bantime: 1.minute,
+ findtime: 1.minute,
+ maxretry: 2
+ }
+ end
+
+ subject { described_class.new(ip) }
+
+ before do
+ stub_rack_attack_setting(options)
+ end
+
+ after do
+ subject.reset!
+ end
+
+ describe '#register_fail!' do
+ it 'bans after 3 consecutive failures' do
+ expect(subject.banned?).to be_falsey
+
+ 3.times { subject.register_fail! }
+
+ expect(subject.banned?).to be_truthy
+ end
+
+ shared_examples 'whitelisted IPs' do
+ it 'does not ban after max retry limit' do
+ expect(subject.banned?).to be_falsey
+
+ 3.times { subject.register_fail! }
+
+ expect(subject.banned?).to be_falsey
+ end
+ end
+
+ context 'with a whitelisted netmask' do
+ before do
+ options[:ip_whitelist] = ['127.0.0.1', '10.2.2.0/24', 'bad']
+ stub_rack_attack_setting(options)
+ end
+
+ it_behaves_like 'whitelisted IPs'
+ end
+
+ context 'with a whitelisted IP' do
+ before do
+ options[:ip_whitelist] = ['10.2.2.3']
+ stub_rack_attack_setting(options)
+ end
+
+ it_behaves_like 'whitelisted IPs'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index 3b5ca7c950c..d9c73cff01e 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -309,6 +309,15 @@ describe Gitlab::Auth do
.to eq(auth_success)
end
+ it 'succeeds when custom login and token are valid' do
+ deploy_token = create(:deploy_token, username: 'deployer', read_registry: false, projects: [project])
+ auth_success = Gitlab::Auth::Result.new(deploy_token, project, :deploy_token, [:download_code])
+
+ expect(gl_auth).to receive(:rate_limit!).with('ip', success: true, login: 'deployer')
+ expect(gl_auth.find_for_git_client('deployer', deploy_token.token, project: project, ip: 'ip'))
+ .to eq(auth_success)
+ end
+
it 'fails when login is not valid' do
expect(gl_auth).to receive(:rate_limit!).with('ip', success: false, login: 'random_login')
expect(gl_auth.find_for_git_client('random_login', deploy_token.token, project: project, ip: 'ip'))
diff --git a/spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb b/spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb
deleted file mode 100644
index 5076996474f..00000000000
--- a/spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb
+++ /dev/null
@@ -1,125 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::CreateForkNetworkMembershipsRange, :migration, schema: 20170929131201 do
- let(:migration) { described_class.new }
- let(:projects) { table(:projects) }
-
- let(:base1) { projects.create }
- let(:base1_fork1) { projects.create }
- let(:base1_fork2) { projects.create }
-
- let(:base2) { projects.create }
- let(:base2_fork1) { projects.create }
- let(:base2_fork2) { projects.create }
-
- let(:fork_of_fork) { projects.create }
- let(:fork_of_fork2) { projects.create }
- let(:second_level_fork) { projects.create }
- let(:third_level_fork) { projects.create }
-
- let(:fork_network1) { fork_networks.find_by(root_project_id: base1.id) }
- let(:fork_network2) { fork_networks.find_by(root_project_id: base2.id) }
-
- let!(:forked_project_links) { table(:forked_project_links) }
- let!(:fork_networks) { table(:fork_networks) }
- let!(:fork_network_members) { table(:fork_network_members) }
-
- before do
- # The fork-network relation created for the forked project
- fork_networks.create(id: 1, root_project_id: base1.id)
- fork_network_members.create(project_id: base1.id, fork_network_id: 1)
- fork_networks.create(id: 2, root_project_id: base2.id)
- fork_network_members.create(project_id: base2.id, fork_network_id: 2)
-
- # Normal fork links
- forked_project_links.create(id: 1, forked_from_project_id: base1.id, forked_to_project_id: base1_fork1.id)
- forked_project_links.create(id: 2, forked_from_project_id: base1.id, forked_to_project_id: base1_fork2.id)
- forked_project_links.create(id: 3, forked_from_project_id: base2.id, forked_to_project_id: base2_fork1.id)
- forked_project_links.create(id: 4, forked_from_project_id: base2.id, forked_to_project_id: base2_fork2.id)
-
- # Fork links
- forked_project_links.create(id: 5, forked_from_project_id: base1_fork1.id, forked_to_project_id: fork_of_fork.id)
- forked_project_links.create(id: 6, forked_from_project_id: base1_fork1.id, forked_to_project_id: fork_of_fork2.id)
-
- # Forks 3 levels down
- forked_project_links.create(id: 7, forked_from_project_id: fork_of_fork.id, forked_to_project_id: second_level_fork.id)
- forked_project_links.create(id: 8, forked_from_project_id: second_level_fork.id, forked_to_project_id: third_level_fork.id)
-
- migration.perform(1, 8)
- end
-
- it 'creates a memberships for the direct forks' do
- base1_fork1_membership = fork_network_members.find_by(fork_network_id: fork_network1.id,
- project_id: base1_fork1.id)
- base1_fork2_membership = fork_network_members.find_by(fork_network_id: fork_network1.id,
- project_id: base1_fork2.id)
- base2_fork1_membership = fork_network_members.find_by(fork_network_id: fork_network2.id,
- project_id: base2_fork1.id)
- base2_fork2_membership = fork_network_members.find_by(fork_network_id: fork_network2.id,
- project_id: base2_fork2.id)
-
- expect(base1_fork1_membership.forked_from_project_id).to eq(base1.id)
- expect(base1_fork2_membership.forked_from_project_id).to eq(base1.id)
- expect(base2_fork1_membership.forked_from_project_id).to eq(base2.id)
- expect(base2_fork2_membership.forked_from_project_id).to eq(base2.id)
- end
-
- it 'adds the fork network members for forks of forks' do
- fork_of_fork_membership = fork_network_members.find_by(project_id: fork_of_fork.id,
- fork_network_id: fork_network1.id)
- fork_of_fork2_membership = fork_network_members.find_by(project_id: fork_of_fork2.id,
- fork_network_id: fork_network1.id)
- second_level_fork_membership = fork_network_members.find_by(project_id: second_level_fork.id,
- fork_network_id: fork_network1.id)
- third_level_fork_membership = fork_network_members.find_by(project_id: third_level_fork.id,
- fork_network_id: fork_network1.id)
-
- expect(fork_of_fork_membership.forked_from_project_id).to eq(base1_fork1.id)
- expect(fork_of_fork2_membership.forked_from_project_id).to eq(base1_fork1.id)
- expect(second_level_fork_membership.forked_from_project_id).to eq(fork_of_fork.id)
- expect(third_level_fork_membership.forked_from_project_id).to eq(second_level_fork.id)
- end
-
- it 'reschedules itself when there are missing members' do
- allow(migration).to receive(:missing_members?).and_return(true)
-
- expect(BackgroundMigrationWorker)
- .to receive(:perform_in).with(described_class::RESCHEDULE_DELAY, "CreateForkNetworkMembershipsRange", [1, 3])
-
- migration.perform(1, 3)
- end
-
- it 'can be repeated without effect' do
- expect { fork_network_members.count }.not_to change { migration.perform(1, 7) }
- end
-
- it 'knows it is finished for this range' do
- expect(migration.missing_members?(1, 8)).to be_falsy
- end
-
- it 'does not miss members for forks of forks for which the root was deleted' do
- forked_project_links.create(id: 9, forked_from_project_id: base1_fork1.id, forked_to_project_id: projects.create.id)
- base1.destroy
-
- expect(migration.missing_members?(7, 10)).to be_falsy
- end
-
- context 'with more forks' do
- before do
- forked_project_links.create(id: 9, forked_from_project_id: fork_of_fork.id, forked_to_project_id: projects.create.id)
- forked_project_links.create(id: 10, forked_from_project_id: fork_of_fork.id, forked_to_project_id: projects.create.id)
- end
-
- it 'only processes a single batch of links at a time' do
- expect(fork_network_members.count).to eq(10)
-
- migration.perform(8, 10)
-
- expect(fork_network_members.count).to eq(12)
- end
-
- it 'knows when not all memberships within a batch have been created' do
- expect(migration.missing_members?(8, 10)).to be_truthy
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/delete_conflicting_redirect_routes_range_spec.rb b/spec/lib/gitlab/background_migration/delete_conflicting_redirect_routes_range_spec.rb
deleted file mode 100644
index 9bae7e53b71..00000000000
--- a/spec/lib/gitlab/background_migration/delete_conflicting_redirect_routes_range_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::DeleteConflictingRedirectRoutesRange, :migration, schema: 20170907170235 do
- let!(:redirect_routes) { table(:redirect_routes) }
- let!(:routes) { table(:routes) }
-
- before do
- routes.create!(id: 1, source_id: 1, source_type: 'Namespace', path: 'foo1')
- routes.create!(id: 2, source_id: 2, source_type: 'Namespace', path: 'foo2')
- routes.create!(id: 3, source_id: 3, source_type: 'Namespace', path: 'foo3')
- routes.create!(id: 4, source_id: 4, source_type: 'Namespace', path: 'foo4')
- routes.create!(id: 5, source_id: 5, source_type: 'Namespace', path: 'foo5')
-
- # Valid redirects
- redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'bar')
- redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'bar2')
- redirect_routes.create!(source_id: 2, source_type: 'Namespace', path: 'bar3')
-
- # Conflicting redirects
- redirect_routes.create!(source_id: 2, source_type: 'Namespace', path: 'foo1')
- redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo2')
- redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo3')
- redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo4')
- redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo5')
- end
-
- # No-op. See https://gitlab.com/gitlab-com/infrastructure/issues/3460#note_53223252
- it 'NO-OP: does not delete any redirect_routes' do
- expect(redirect_routes.count).to eq(8)
-
- described_class.new.perform(1, 5)
-
- expect(redirect_routes.count).to eq(8)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/migrate_events_to_push_event_payloads_spec.rb b/spec/lib/gitlab/background_migration/migrate_events_to_push_event_payloads_spec.rb
deleted file mode 100644
index 188969951a6..00000000000
--- a/spec/lib/gitlab/background_migration/migrate_events_to_push_event_payloads_spec.rb
+++ /dev/null
@@ -1,433 +0,0 @@
-require 'spec_helper'
-
-# rubocop:disable RSpec/FactoriesInMigrationSpecs
-describe Gitlab::BackgroundMigration::MigrateEventsToPushEventPayloads::Event, :migration, schema: 20170608152748 do
- describe '#commit_title' do
- it 'returns nil when there are no commits' do
- expect(described_class.new.commit_title).to be_nil
- end
-
- it 'returns nil when there are commits without commit messages' do
- event = described_class.new
-
- allow(event).to receive(:commits).and_return([{ id: '123' }])
-
- expect(event.commit_title).to be_nil
- end
-
- it 'returns the commit message when it is less than 70 characters long' do
- event = described_class.new
-
- allow(event).to receive(:commits).and_return([{ message: 'Hello world' }])
-
- expect(event.commit_title).to eq('Hello world')
- end
-
- it 'returns the first line of a commit message if multiple lines are present' do
- event = described_class.new
-
- allow(event).to receive(:commits).and_return([{ message: "Hello\n\nworld" }])
-
- expect(event.commit_title).to eq('Hello')
- end
-
- it 'truncates the commit to 70 characters when it is too long' do
- event = described_class.new
-
- allow(event).to receive(:commits).and_return([{ message: 'a' * 100 }])
-
- expect(event.commit_title).to eq(('a' * 67) + '...')
- end
- end
-
- describe '#commit_from_sha' do
- it 'returns nil when pushing to a new ref' do
- event = described_class.new
-
- allow(event).to receive(:create?).and_return(true)
-
- expect(event.commit_from_sha).to be_nil
- end
-
- it 'returns the ID of the first commit when pushing to an existing ref' do
- event = described_class.new
-
- allow(event).to receive(:create?).and_return(false)
- allow(event).to receive(:data).and_return(before: '123')
-
- expect(event.commit_from_sha).to eq('123')
- end
- end
-
- describe '#commit_to_sha' do
- it 'returns nil when removing an existing ref' do
- event = described_class.new
-
- allow(event).to receive(:remove?).and_return(true)
-
- expect(event.commit_to_sha).to be_nil
- end
-
- it 'returns the ID of the last commit when pushing to an existing ref' do
- event = described_class.new
-
- allow(event).to receive(:remove?).and_return(false)
- allow(event).to receive(:data).and_return(after: '123')
-
- expect(event.commit_to_sha).to eq('123')
- end
- end
-
- describe '#data' do
- it 'returns the deserialized data' do
- event = described_class.new(data: { before: '123' })
-
- expect(event.data).to eq(before: '123')
- end
-
- it 'returns an empty hash when no data is present' do
- event = described_class.new
-
- expect(event.data).to eq({})
- end
- end
-
- describe '#commits' do
- it 'returns an Array of commits' do
- event = described_class.new(data: { commits: [{ id: '123' }] })
-
- expect(event.commits).to eq([{ id: '123' }])
- end
-
- it 'returns an empty array when no data is present' do
- event = described_class.new
-
- expect(event.commits).to eq([])
- end
- end
-
- describe '#commit_count' do
- it 'returns the number of commits' do
- event = described_class.new(data: { total_commits_count: 2 })
-
- expect(event.commit_count).to eq(2)
- end
-
- it 'returns 0 when no data is present' do
- event = described_class.new
-
- expect(event.commit_count).to eq(0)
- end
- end
-
- describe '#ref' do
- it 'returns the name of the ref' do
- event = described_class.new(data: { ref: 'refs/heads/master' })
-
- expect(event.ref).to eq('refs/heads/master')
- end
- end
-
- describe '#trimmed_ref_name' do
- it 'returns the trimmed ref name for a branch' do
- event = described_class.new(data: { ref: 'refs/heads/master' })
-
- expect(event.trimmed_ref_name).to eq('master')
- end
-
- it 'returns the trimmed ref name for a tag' do
- event = described_class.new(data: { ref: 'refs/tags/v1.2' })
-
- expect(event.trimmed_ref_name).to eq('v1.2')
- end
- end
-
- describe '#create?' do
- it 'returns true when creating a new ref' do
- event = described_class.new(data: { before: described_class::BLANK_REF })
-
- expect(event.create?).to eq(true)
- end
-
- it 'returns false when pushing to an existing ref' do
- event = described_class.new(data: { before: '123' })
-
- expect(event.create?).to eq(false)
- end
- end
-
- describe '#remove?' do
- it 'returns true when removing an existing ref' do
- event = described_class.new(data: { after: described_class::BLANK_REF })
-
- expect(event.remove?).to eq(true)
- end
-
- it 'returns false when pushing to an existing ref' do
- event = described_class.new(data: { after: '123' })
-
- expect(event.remove?).to eq(false)
- end
- end
-
- describe '#push_action' do
- let(:event) { described_class.new }
-
- it 'returns :created when creating a new ref' do
- allow(event).to receive(:create?).and_return(true)
-
- expect(event.push_action).to eq(:created)
- end
-
- it 'returns :removed when removing an existing ref' do
- allow(event).to receive(:create?).and_return(false)
- allow(event).to receive(:remove?).and_return(true)
-
- expect(event.push_action).to eq(:removed)
- end
-
- it 'returns :pushed when pushing to an existing ref' do
- allow(event).to receive(:create?).and_return(false)
- allow(event).to receive(:remove?).and_return(false)
-
- expect(event.push_action).to eq(:pushed)
- end
- end
-
- describe '#ref_type' do
- let(:event) { described_class.new }
-
- it 'returns :tag for a tag' do
- allow(event).to receive(:ref).and_return('refs/tags/1.2')
-
- expect(event.ref_type).to eq(:tag)
- end
-
- it 'returns :branch for a branch' do
- allow(event).to receive(:ref).and_return('refs/heads/1.2')
-
- expect(event.ref_type).to eq(:branch)
- end
- end
-end
-
-##
-# The background migration relies on a temporary table, hence we're migrating
-# to a specific version of the database where said table is still present.
-#
-describe Gitlab::BackgroundMigration::MigrateEventsToPushEventPayloads, :migration, schema: 20170825154015 do
- let(:user_class) do
- Class.new(ActiveRecord::Base) do
- self.table_name = 'users'
- end
- end
-
- let(:migration) { described_class.new }
- let(:user_class) { table(:users) }
- let(:author) { build(:user).becomes(user_class).tap(&:save!).becomes(User) }
- let(:namespace) { create(:namespace, owner: author) }
- let(:projects) { table(:projects) }
- let(:project) { projects.create(namespace_id: namespace.id, creator_id: author.id) }
-
- # We can not rely on FactoryBot as the state of Event may change in ways that
- # the background migration does not expect, hence we use the Event class of
- # the migration itself.
- def create_push_event(project, author, data = nil)
- klass = Gitlab::BackgroundMigration::MigrateEventsToPushEventPayloads::Event
-
- klass.create!(
- action: klass::PUSHED,
- project_id: project.id,
- author_id: author.id,
- data: data
- )
- end
-
- describe '#perform' do
- it 'returns if data should not be migrated' do
- allow(migration).to receive(:migrate?).and_return(false)
-
- expect(migration).not_to receive(:find_events)
-
- migration.perform(1, 10)
- end
-
- it 'migrates the range of events if data is to be migrated' do
- event1 = create_push_event(project, author, { commits: [] })
- event2 = create_push_event(project, author, { commits: [] })
-
- allow(migration).to receive(:migrate?).and_return(true)
-
- expect(migration).to receive(:process_event).twice
-
- migration.perform(event1.id, event2.id)
- end
- end
-
- describe '#process_event' do
- it 'processes a regular event' do
- event = double(:event, push_event?: false)
-
- expect(migration).to receive(:replicate_event)
- expect(migration).not_to receive(:create_push_event_payload)
-
- migration.process_event(event)
- end
-
- it 'processes a push event' do
- event = double(:event, push_event?: true)
-
- expect(migration).to receive(:replicate_event)
- expect(migration).to receive(:create_push_event_payload)
-
- migration.process_event(event)
- end
-
- it 'handles an error gracefully' do
- event1 = create_push_event(project, author, { commits: [] })
-
- expect(migration).to receive(:replicate_event).and_call_original
- expect(migration).to receive(:create_push_event_payload).and_raise(ActiveRecord::InvalidForeignKey, 'invalid foreign key')
-
- migration.process_event(event1)
-
- expect(described_class::EventForMigration.all.count).to eq(0)
- end
- end
-
- describe '#replicate_event' do
- it 'replicates the event to the "events_for_migration" table' do
- event = create_push_event(
- project,
- author,
- data: { commits: [] },
- title: 'bla'
- )
-
- attributes = event
- .attributes.with_indifferent_access.except(:title, :data)
-
- expect(described_class::EventForMigration)
- .to receive(:create!)
- .with(attributes)
-
- migration.replicate_event(event)
- end
- end
-
- describe '#create_push_event_payload' do
- let(:push_data) do
- {
- commits: [],
- ref: 'refs/heads/master',
- before: '156e0e9adc587a383a7eeb5b21ddecb9044768a8',
- after: '0' * 40,
- total_commits_count: 1
- }
- end
-
- let(:event) do
- create_push_event(project, author, push_data)
- end
-
- before do
- # The foreign key in push_event_payloads at this point points to the
- # "events_for_migration" table so we need to make sure a row exists in
- # said table.
- migration.replicate_event(event)
- end
-
- it 'creates a push event payload for an event' do
- payload = migration.create_push_event_payload(event)
-
- expect(PushEventPayload.count).to eq(1)
- expect(payload.valid?).to eq(true)
- end
-
- it 'does not create push event payloads for removed events' do
- allow(event).to receive(:id).and_return(-1)
-
- expect { migration.create_push_event_payload(event) }.to raise_error(ActiveRecord::InvalidForeignKey)
-
- expect(PushEventPayload.count).to eq(0)
- end
-
- it 'encodes and decodes the commit IDs from and to binary data' do
- payload = migration.create_push_event_payload(event)
- packed = migration.pack(push_data[:before])
-
- expect(payload.commit_from).to eq(packed)
- expect(payload.commit_to).to be_nil
- end
- end
-
- describe '#find_events' do
- it 'returns the events for the given ID range' do
- event1 = create_push_event(project, author, { commits: [] })
- event2 = create_push_event(project, author, { commits: [] })
- event3 = create_push_event(project, author, { commits: [] })
- events = migration.find_events(event1.id, event2.id)
-
- expect(events.length).to eq(2)
- expect(events.pluck(:id)).not_to include(event3.id)
- end
- end
-
- describe '#migrate?' do
- it 'returns true when data should be migrated' do
- allow(described_class::Event)
- .to receive(:table_exists?).and_return(true)
-
- allow(described_class::PushEventPayload)
- .to receive(:table_exists?).and_return(true)
-
- allow(described_class::EventForMigration)
- .to receive(:table_exists?).and_return(true)
-
- expect(migration.migrate?).to eq(true)
- end
-
- it 'returns false if the "events" table does not exist' do
- allow(described_class::Event)
- .to receive(:table_exists?).and_return(false)
-
- expect(migration.migrate?).to eq(false)
- end
-
- it 'returns false if the "push_event_payloads" table does not exist' do
- allow(described_class::Event)
- .to receive(:table_exists?).and_return(true)
-
- allow(described_class::PushEventPayload)
- .to receive(:table_exists?).and_return(false)
-
- expect(migration.migrate?).to eq(false)
- end
-
- it 'returns false when the "events_for_migration" table does not exist' do
- allow(described_class::Event)
- .to receive(:table_exists?).and_return(true)
-
- allow(described_class::PushEventPayload)
- .to receive(:table_exists?).and_return(true)
-
- allow(described_class::EventForMigration)
- .to receive(:table_exists?).and_return(false)
-
- expect(migration.migrate?).to eq(false)
- end
- end
-
- describe '#pack' do
- it 'packs a SHA1 into a 20 byte binary string' do
- packed = migration.pack('156e0e9adc587a383a7eeb5b21ddecb9044768a8')
-
- expect(packed.bytesize).to eq(20)
- end
-
- it 'returns nil if the input value is nil' do
- expect(migration.pack(nil)).to be_nil
- end
- end
-end
-# rubocop:enable RSpec/FactoriesInMigrationSpecs
diff --git a/spec/lib/gitlab/background_migration/migrate_stage_status_spec.rb b/spec/lib/gitlab/background_migration/migrate_stage_status_spec.rb
deleted file mode 100644
index 89b56906ed0..00000000000
--- a/spec/lib/gitlab/background_migration/migrate_stage_status_spec.rb
+++ /dev/null
@@ -1,92 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::MigrateStageStatus, :migration, schema: 20170711145320 do
- let(:projects) { table(:projects) }
- let(:pipelines) { table(:ci_pipelines) }
- let(:stages) { table(:ci_stages) }
- let(:jobs) { table(:ci_builds) }
-
- let(:statuses) do
- {
- created: 0,
- pending: 1,
- running: 2,
- success: 3,
- failed: 4,
- canceled: 5,
- skipped: 6,
- manual: 7
- }
- end
-
- before do
- projects.create!(id: 1, name: 'gitlab1', path: 'gitlab1')
- pipelines.create!(id: 1, project_id: 1, ref: 'master', sha: 'adf43c3a')
- stages.create!(id: 1, pipeline_id: 1, project_id: 1, name: 'test', status: nil)
- stages.create!(id: 2, pipeline_id: 1, project_id: 1, name: 'deploy', status: nil)
- end
-
- context 'when stage status is known' do
- before do
- create_job(project: 1, pipeline: 1, stage: 'test', status: 'success')
- create_job(project: 1, pipeline: 1, stage: 'test', status: 'running')
- create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'failed')
- end
-
- it 'sets a correct stage status' do
- described_class.new.perform(1, 2)
-
- expect(stages.first.status).to eq statuses[:running]
- expect(stages.second.status).to eq statuses[:failed]
- end
- end
-
- context 'when stage status is not known' do
- it 'sets a skipped stage status' do
- described_class.new.perform(1, 2)
-
- expect(stages.first.status).to eq statuses[:skipped]
- expect(stages.second.status).to eq statuses[:skipped]
- end
- end
-
- context 'when stage status includes status of a retried job' do
- before do
- create_job(project: 1, pipeline: 1, stage: 'test', status: 'canceled')
- create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'failed', retried: true)
- create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'success')
- end
-
- it 'sets a correct stage status' do
- described_class.new.perform(1, 2)
-
- expect(stages.first.status).to eq statuses[:canceled]
- expect(stages.second.status).to eq statuses[:success]
- end
- end
-
- context 'when some job in the stage is blocked / manual' do
- before do
- create_job(project: 1, pipeline: 1, stage: 'test', status: 'failed')
- create_job(project: 1, pipeline: 1, stage: 'test', status: 'manual')
- create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'success', when: 'manual')
- end
-
- it 'sets a correct stage status' do
- described_class.new.perform(1, 2)
-
- expect(stages.first.status).to eq statuses[:manual]
- expect(stages.second.status).to eq statuses[:success]
- end
- end
-
- def create_job(project:, pipeline:, stage:, status:, **opts)
- stages = { test: 1, build: 2, deploy: 3 }
-
- jobs.create!(project_id: project, commit_id: pipeline,
- stage_idx: stages[stage.to_sym], stage: stage,
- status: status, **opts)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder_spec.rb b/spec/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder_spec.rb
deleted file mode 100644
index ea8bdd48e72..00000000000
--- a/spec/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder_spec.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-require 'spec_helper'
-
-# rubocop:disable RSpec/FactoriesInMigrationSpecs
-describe Gitlab::BackgroundMigration::MigrateSystemUploadsToNewFolder, :delete do
- let(:migration) { described_class.new }
-
- before do
- allow(migration).to receive(:logger).and_return(Logger.new(nil))
- end
-
- describe '#perform' do
- it 'renames the path of system-uploads' do
- upload = create(:upload, model: create(:project), path: 'uploads/system/project/avatar.jpg')
-
- migration.perform('uploads/system/', 'uploads/-/system/')
-
- expect(upload.reload.path).to eq('uploads/-/system/project/avatar.jpg')
- end
- end
-end
-# rubocop:enable RSpec/FactoriesInMigrationSpecs
diff --git a/spec/lib/gitlab/background_migration/move_personal_snippet_files_spec.rb b/spec/lib/gitlab/background_migration/move_personal_snippet_files_spec.rb
deleted file mode 100644
index 593486fc56c..00000000000
--- a/spec/lib/gitlab/background_migration/move_personal_snippet_files_spec.rb
+++ /dev/null
@@ -1,74 +0,0 @@
-require 'spec_helper'
-
-# rubocop:disable RSpec/FactoriesInMigrationSpecs
-describe Gitlab::BackgroundMigration::MovePersonalSnippetFiles do
- let(:test_dir) { File.join(Rails.root, 'tmp', 'tests', 'move_snippet_files_test') }
- let(:old_uploads_dir) { File.join('uploads', 'system', 'personal_snippet') }
- let(:new_uploads_dir) { File.join('uploads', '-', 'system', 'personal_snippet') }
- let(:snippet) do
- snippet = create(:personal_snippet)
- create_upload_for_snippet(snippet)
- snippet.update!(description: markdown_linking_file(snippet))
- snippet
- end
-
- let(:migration) { described_class.new }
-
- before do
- allow(migration).to receive(:base_directory) { test_dir }
- end
-
- describe '#perform' do
- it 'moves the file on the disk' do
- expected_path = File.join(test_dir, new_uploads_dir, snippet.id.to_s, "secret#{snippet.id}", 'upload.txt')
-
- migration.perform(old_uploads_dir, new_uploads_dir)
-
- expect(File.exist?(expected_path)).to be_truthy
- end
-
- it 'updates the markdown of the snippet' do
- expected_path = File.join(new_uploads_dir, snippet.id.to_s, "secret#{snippet.id}", 'upload.txt')
- expected_markdown = "[an upload](#{expected_path})"
-
- migration.perform(old_uploads_dir, new_uploads_dir)
-
- expect(snippet.reload.description).to eq(expected_markdown)
- end
-
- it 'updates the markdown of notes' do
- expected_path = File.join(new_uploads_dir, snippet.id.to_s, "secret#{snippet.id}", 'upload.txt')
- expected_markdown = "with [an upload](#{expected_path})"
-
- note = create(:note_on_personal_snippet, noteable: snippet, note: "with #{markdown_linking_file(snippet)}")
-
- migration.perform(old_uploads_dir, new_uploads_dir)
-
- expect(note.reload.note).to eq(expected_markdown)
- end
- end
-
- def create_upload_for_snippet(snippet)
- snippet_path = path_for_file_in_snippet(snippet)
- path = File.join(old_uploads_dir, snippet.id.to_s, snippet_path)
- absolute_path = File.join(test_dir, path)
-
- FileUtils.mkdir_p(File.dirname(absolute_path))
- FileUtils.touch(absolute_path)
-
- create(:upload, model: snippet, path: snippet_path, uploader: PersonalFileUploader)
- end
-
- def path_for_file_in_snippet(snippet)
- secret = "secret#{snippet.id}"
- filename = 'upload.txt'
-
- File.join(secret, filename)
- end
-
- def markdown_linking_file(snippet)
- path = File.join(old_uploads_dir, snippet.id.to_s, path_for_file_in_snippet(snippet))
- "[an upload](#{path})"
- end
-end
-# rubocop:enable RSpec/FactoriesInMigrationSpecs
diff --git a/spec/lib/gitlab/background_migration/normalize_ldap_extern_uids_range_spec.rb b/spec/lib/gitlab/background_migration/normalize_ldap_extern_uids_range_spec.rb
deleted file mode 100644
index dfbf1bb681a..00000000000
--- a/spec/lib/gitlab/background_migration/normalize_ldap_extern_uids_range_spec.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::NormalizeLdapExternUidsRange, :migration, schema: 20170921101004 do
- let!(:identities) { table(:identities) }
-
- before do
- # LDAP identities
- (1..4).each do |i|
- identities.create!(id: i, provider: 'ldapmain', extern_uid: " uid = foo #{i}, ou = People, dc = example, dc = com ", user_id: i)
- end
-
- # Non-LDAP identity
- identities.create!(id: 5, provider: 'foo', extern_uid: " uid = foo 5, ou = People, dc = example, dc = com ", user_id: 5)
-
- # Another LDAP identity
- identities.create!(id: 6, provider: 'ldapmain', extern_uid: " uid = foo 6, ou = People, dc = example, dc = com ", user_id: 6)
- end
-
- it 'normalizes the LDAP identities in the range' do
- described_class.new.perform(1, 3)
- expect(identities.find(1).extern_uid).to eq("uid=foo 1,ou=people,dc=example,dc=com")
- expect(identities.find(2).extern_uid).to eq("uid=foo 2,ou=people,dc=example,dc=com")
- expect(identities.find(3).extern_uid).to eq("uid=foo 3,ou=people,dc=example,dc=com")
- expect(identities.find(4).extern_uid).to eq(" uid = foo 4, ou = People, dc = example, dc = com ")
- expect(identities.find(5).extern_uid).to eq(" uid = foo 5, ou = People, dc = example, dc = com ")
- expect(identities.find(6).extern_uid).to eq(" uid = foo 6, ou = People, dc = example, dc = com ")
-
- described_class.new.perform(4, 6)
- expect(identities.find(1).extern_uid).to eq("uid=foo 1,ou=people,dc=example,dc=com")
- expect(identities.find(2).extern_uid).to eq("uid=foo 2,ou=people,dc=example,dc=com")
- expect(identities.find(3).extern_uid).to eq("uid=foo 3,ou=people,dc=example,dc=com")
- expect(identities.find(4).extern_uid).to eq("uid=foo 4,ou=people,dc=example,dc=com")
- expect(identities.find(5).extern_uid).to eq(" uid = foo 5, ou = People, dc = example, dc = com ")
- expect(identities.find(6).extern_uid).to eq("uid=foo 6,ou=people,dc=example,dc=com")
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb b/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb
deleted file mode 100644
index 0e73c8c59c9..00000000000
--- a/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb
+++ /dev/null
@@ -1,97 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::PopulateForkNetworksRange, :migration, schema: 20170929131201 do
- let(:migration) { described_class.new }
- let(:projects) { table(:projects) }
- let(:base1) { projects.create }
-
- let(:base2) { projects.create }
- let(:base2_fork1) { projects.create }
-
- let!(:forked_project_links) { table(:forked_project_links) }
- let!(:fork_networks) { table(:fork_networks) }
- let!(:fork_network_members) { table(:fork_network_members) }
-
- let(:fork_network1) { fork_networks.find_by(root_project_id: base1.id) }
- let(:fork_network2) { fork_networks.find_by(root_project_id: base2.id) }
-
- before do
- # A normal fork link
- forked_project_links.create(id: 1,
- forked_from_project_id: base1.id,
- forked_to_project_id: projects.create.id)
- forked_project_links.create(id: 2,
- forked_from_project_id: base1.id,
- forked_to_project_id: projects.create.id)
- forked_project_links.create(id: 3,
- forked_from_project_id: base2.id,
- forked_to_project_id: base2_fork1.id)
-
- # create a fork of a fork
- forked_project_links.create(id: 4,
- forked_from_project_id: base2_fork1.id,
- forked_to_project_id: projects.create.id)
- forked_project_links.create(id: 5,
- forked_from_project_id: projects.create.id,
- forked_to_project_id: projects.create.id)
-
- # Stub out the calls to the other migrations
- allow(BackgroundMigrationWorker).to receive(:perform_in)
-
- migration.perform(1, 3)
- end
-
- it 'creates the fork network' do
- expect(fork_network1).not_to be_nil
- expect(fork_network2).not_to be_nil
- end
-
- it 'does not create a fork network for a fork-of-fork' do
- # perfrom the entire batch
- migration.perform(1, 5)
-
- expect(fork_networks.find_by(root_project_id: base2_fork1.id)).to be_nil
- end
-
- it 'creates memberships for the root of fork networks' do
- base1_membership = fork_network_members.find_by(fork_network_id: fork_network1.id,
- project_id: base1.id)
- base2_membership = fork_network_members.find_by(fork_network_id: fork_network2.id,
- project_id: base2.id)
-
- expect(base1_membership).not_to be_nil
- expect(base2_membership).not_to be_nil
- end
-
- it 'creates a fork network for the fork of which the source was deleted' do
- fork = projects.create
- forked_project_links.create(id: 6, forked_from_project_id: 99999, forked_to_project_id: fork.id)
-
- migration.perform(5, 8)
-
- expect(fork_networks.find_by(root_project_id: 99999)).to be_nil
- expect(fork_networks.find_by(root_project_id: fork.id)).not_to be_nil
- expect(fork_network_members.find_by(project_id: fork.id)).not_to be_nil
- end
-
- it 'schedules a job for inserting memberships for forks-of-forks' do
- delay = Gitlab::BackgroundMigration::CreateForkNetworkMembershipsRange::RESCHEDULE_DELAY
-
- expect(BackgroundMigrationWorker)
- .to receive(:perform_in).with(delay, "CreateForkNetworkMembershipsRange", [1, 3])
-
- migration.perform(1, 3)
- end
-
- it 'only processes a single batch of links at a time' do
- expect(fork_networks.count).to eq(2)
-
- migration.perform(3, 5)
-
- expect(fork_networks.count).to eq(3)
- end
-
- it 'can be repeated without effect' do
- expect { migration.perform(1, 3) }.not_to change { fork_network_members.count }
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id_spec.rb b/spec/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id_spec.rb
deleted file mode 100644
index 0cb753c5853..00000000000
--- a/spec/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id_spec.rb
+++ /dev/null
@@ -1,62 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::PopulateMergeRequestsLatestMergeRequestDiffId, :migration, schema: 20171026082505 do
- let(:projects_table) { table(:projects) }
- let(:merge_requests_table) { table(:merge_requests) }
- let(:merge_request_diffs_table) { table(:merge_request_diffs) }
-
- let(:project) { projects_table.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce') }
-
- def create_mr!(name, diffs: 0)
- merge_request =
- merge_requests_table.create!(target_project_id: project.id,
- target_branch: 'master',
- source_project_id: project.id,
- source_branch: name,
- title: name)
-
- diffs.times do
- merge_request_diffs_table.create!(merge_request_id: merge_request.id)
- end
-
- merge_request
- end
-
- def diffs_for(merge_request)
- merge_request_diffs_table.where(merge_request_id: merge_request.id)
- end
-
- describe '#perform' do
- it 'ignores MRs without diffs' do
- merge_request_without_diff = create_mr!('without_diff')
- mr_id = merge_request_without_diff.id
-
- expect(merge_request_without_diff.latest_merge_request_diff_id).to be_nil
-
- expect { subject.perform(mr_id, mr_id) }
- .not_to change { merge_request_without_diff.reload.latest_merge_request_diff_id }
- end
-
- it 'ignores MRs that have a diff ID already set' do
- merge_request_with_multiple_diffs = create_mr!('with_multiple_diffs', diffs: 3)
- diff_id = diffs_for(merge_request_with_multiple_diffs).minimum(:id)
- mr_id = merge_request_with_multiple_diffs.id
-
- merge_request_with_multiple_diffs.update!(latest_merge_request_diff_id: diff_id)
-
- expect { subject.perform(mr_id, mr_id) }
- .not_to change { merge_request_with_multiple_diffs.reload.latest_merge_request_diff_id }
- end
-
- it 'migrates multiple MR diffs to the correct values' do
- merge_requests = Array.new(3).map.with_index { |_, i| create_mr!(i, diffs: 3) }
-
- subject.perform(merge_requests.first.id, merge_requests.last.id)
-
- merge_requests.each do |merge_request|
- expect(merge_request.reload.latest_merge_request_diff_id)
- .to eq(diffs_for(merge_request).maximum(:id))
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb b/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb
index 51e16c99688..d88a2097ba2 100644
--- a/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb
+++ b/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb
@@ -17,15 +17,12 @@ describe Gitlab::Ci::Build::Prerequisite::KubernetesNamespace do
end
context 'build has a deployment' do
- let!(:deployment) { create(:deployment, deployable: build) }
+ let!(:deployment) { create(:deployment, deployable: build, cluster: cluster) }
+ let(:cluster) { nil }
context 'and a cluster to deploy to' do
let(:cluster) { create(:cluster, :group) }
- before do
- allow(build.deployment).to receive(:deployment_platform_cluster).and_return(cluster)
- end
-
it { is_expected.to be_truthy }
context 'and the cluster is not managed' do
@@ -48,28 +45,21 @@ describe Gitlab::Ci::Build::Prerequisite::KubernetesNamespace do
end
context 'and no cluster to deploy to' do
- before do
- expect(deployment.deployment_platform_cluster).to be_nil
- end
-
it { is_expected.to be_falsey }
end
end
end
describe '#complete!' do
- let!(:deployment) { create(:deployment, deployable: build) }
+ let!(:deployment) { create(:deployment, deployable: build, cluster: cluster) }
let(:service) { double(execute: true) }
+ let(:cluster) { nil }
subject { described_class.new(build).complete! }
context 'completion is required' do
let(:cluster) { create(:cluster, :group) }
- before do
- allow(build.deployment).to receive(:deployment_platform_cluster).and_return(cluster)
- end
-
it 'creates a kubernetes namespace' do
expect(Clusters::Gcp::Kubernetes::CreateOrUpdateNamespaceService)
.to receive(:new)
@@ -83,10 +73,6 @@ describe Gitlab::Ci::Build::Prerequisite::KubernetesNamespace do
end
context 'completion is not required' do
- before do
- expect(deployment.deployment_platform_cluster).to be_nil
- end
-
it 'does not create a namespace' do
expect(Clusters::Gcp::Kubernetes::CreateOrUpdateNamespaceService).not_to receive(:new)
diff --git a/spec/lib/gitlab/cleanup/orphan_job_artifact_files_batch_spec.rb b/spec/lib/gitlab/cleanup/orphan_job_artifact_files_batch_spec.rb
new file mode 100644
index 00000000000..4d8edfeac80
--- /dev/null
+++ b/spec/lib/gitlab/cleanup/orphan_job_artifact_files_batch_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Cleanup::OrphanJobArtifactFilesBatch do
+ let(:batch_size) { 10 }
+ let(:dry_run) { true }
+
+ subject(:batch) { described_class.new(batch_size: batch_size, dry_run: dry_run) }
+
+ context 'no dry run' do
+ let(:dry_run) { false }
+
+ it 'deletes only orphan job artifacts from disk' do
+ job_artifact = create(:ci_job_artifact, :archive)
+ orphan_artifact = create(:ci_job_artifact, :archive)
+ batch << artifact_path(job_artifact)
+ batch << artifact_path(orphan_artifact)
+ orphan_artifact.delete
+
+ batch.clean!
+
+ expect(batch.artifact_files.count).to eq(2)
+ expect(batch.lost_and_found.count).to eq(1)
+ expect(batch.lost_and_found.first.artifact_id).to eq(orphan_artifact.id)
+ end
+
+ it 'does not mix up job ID and artifact ID' do
+ # take maximum ID of both tables to avoid any collision
+ max_id = [Ci::Build.maximum(:id), Ci::JobArtifact.maximum(:id)].compact.max.to_i
+ job_a = create(:ci_build, id: max_id + 1)
+ job_b = create(:ci_build, id: max_id + 2)
+ # reuse the build IDs for the job artifact IDs, but swap them
+ job_artifact_b = create(:ci_job_artifact, :archive, job: job_b, id: max_id + 1)
+ job_artifact_a = create(:ci_job_artifact, :archive, job: job_a, id: max_id + 2)
+
+ batch << artifact_path(job_artifact_a)
+ batch << artifact_path(job_artifact_b)
+
+ job_artifact_b.delete
+
+ batch.clean!
+
+ expect(File.exist?(job_artifact_a.file.path)).to be_truthy
+ expect(File.exist?(job_artifact_b.file.path)).to be_falsey
+ end
+ end
+
+ context 'with dry run' do
+ it 'does not remove files' do
+ job_artifact = create(:ci_job_artifact, :archive)
+ batch << job_artifact.file.path
+ job_artifact.delete
+
+ expect(batch).not_to receive(:remove_file!)
+
+ batch.clean!
+
+ expect(File.exist?(job_artifact.file.path)).to be_truthy
+ end
+ end
+
+ def artifact_path(job_artifact)
+ Pathname.new(job_artifact.file.path).parent.to_s
+ end
+end
diff --git a/spec/lib/gitlab/cleanup/orphan_job_artifact_files_spec.rb b/spec/lib/gitlab/cleanup/orphan_job_artifact_files_spec.rb
new file mode 100644
index 00000000000..974cc2c4660
--- /dev/null
+++ b/spec/lib/gitlab/cleanup/orphan_job_artifact_files_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Cleanup::OrphanJobArtifactFiles do
+ let(:null_logger) { Logger.new('/dev/null') }
+ subject(:cleanup) { described_class.new(logger: null_logger) }
+
+ before do
+ allow(null_logger).to receive(:info)
+ end
+
+ it 'passes on dry_run' do
+ expect(Gitlab::Cleanup::OrphanJobArtifactFilesBatch)
+ .to receive(:new)
+ .with(dry_run: false, batch_size: anything, logger: anything)
+ .at_least(:once)
+ .and_call_original
+
+ described_class.new(dry_run: false).run!
+ end
+
+ it 'errors when invalid niceness is given' do
+ cleanup = described_class.new(logger: null_logger, niceness: 'FooBar')
+
+ expect(null_logger).to receive(:error).with(/FooBar/)
+
+ cleanup.run!
+ end
+
+ it 'finds artifacts on disk' do
+ artifact = create(:ci_job_artifact, :archive)
+
+ expect(cleanup).to receive(:find_artifacts).and_yield(artifact.file.path)
+ cleanup.run!
+ end
+
+ it 'stops when limit is reached' do
+ cleanup = described_class.new(limit: 1)
+
+ mock_artifacts_found(cleanup, 'tmp/foo/bar/1', 'tmp/foo/bar/2')
+
+ cleanup.run!
+
+ expect(cleanup.total_found).to eq(1)
+ end
+
+ it 'cleans even if batch is not full' do
+ mock_artifacts_found(cleanup, 'tmp/foo/bar/1')
+
+ expect(cleanup).to receive(:clean_batch!).and_call_original
+ cleanup.run!
+ end
+
+ it 'cleans in batches' do
+ stub_const("#{described_class.name}::BATCH_SIZE", 2)
+ mock_artifacts_found(cleanup, 'tmp/foo/bar/1', 'tmp/foo/bar/2', 'tmp/foo/bar/3')
+
+ expect(cleanup).to receive(:clean_batch!).twice.and_call_original
+ cleanup.run!
+ end
+
+ def mock_artifacts_found(cleanup, *files)
+ mock = allow(cleanup).to receive(:find_artifacts)
+
+ files.each { |file| mock.and_yield(file) }
+ end
+end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 3cf3d032bf4..7409572288c 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -583,6 +583,24 @@ describe Gitlab::Database::MigrationHelpers do
model.add_column_with_default(:projects, :foo, :integer, default: 10, limit: 8)
end
end
+
+ it 'adds a column with an array default value for a jsonb type' do
+ create(:project)
+ allow(model).to receive(:transaction_open?).and_return(false)
+ allow(model).to receive(:transaction).and_yield
+ expect(model).to receive(:update_column_in_batches).with(:projects, :foo, '[{"foo":"json"}]').and_call_original
+
+ model.add_column_with_default(:projects, :foo, :jsonb, default: [{ foo: "json" }])
+ end
+
+ it 'adds a column with an object default value for a jsonb type' do
+ create(:project)
+ allow(model).to receive(:transaction_open?).and_return(false)
+ allow(model).to receive(:transaction).and_yield
+ expect(model).to receive(:update_column_in_batches).with(:projects, :foo, '{"foo":"json"}').and_call_original
+
+ model.add_column_with_default(:projects, :foo, :jsonb, default: { foo: "json" })
+ end
end
context 'inside a transaction' do
diff --git a/spec/lib/gitlab/diff/lines_unfolder_spec.rb b/spec/lib/gitlab/diff/lines_unfolder_spec.rb
index 8a470e12d04..3134ff3d817 100644
--- a/spec/lib/gitlab/diff/lines_unfolder_spec.rb
+++ b/spec/lib/gitlab/diff/lines_unfolder_spec.rb
@@ -842,4 +842,37 @@ describe Gitlab::Diff::LinesUnfolder do
end
end
end
+
+ context 'positioned on an image' do
+ let(:position) do
+ Gitlab::Diff::Position.new(
+ base_sha: '1c59dfa64afbea8c721bb09a06a9d326c952ea19',
+ start_sha: '1c59dfa64afbea8c721bb09a06a9d326c952ea19',
+ head_sha: '1487062132228de836236c522fe52fed4980a46c',
+ old_path: 'image.jpg',
+ new_path: 'image.jpg',
+ position_type: 'image'
+ )
+ end
+
+ before do
+ allow(old_blob).to receive(:binary?).and_return(binary?)
+ end
+
+ context 'diff file is not text' do
+ let(:binary?) { true }
+
+ it 'returns nil' do
+ expect(subject.unfolded_diff_lines).to be_nil
+ end
+ end
+
+ context 'diff file is text' do
+ let(:binary?) { false }
+
+ it 'returns nil' do
+ expect(subject.unfolded_diff_lines).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb b/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb
index 20842f55014..50138d272c4 100644
--- a/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb
+++ b/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb
@@ -67,7 +67,7 @@ describe Gitlab::Graphql::Authorize::AuthorizeResource do
end
describe '#authorize!' do
- it 'does not raise an error' do
+ it 'raises an error' do
expect { loading_resource.authorize!(project) }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
diff --git a/spec/lib/gitlab/graphql/copy_field_description_spec.rb b/spec/lib/gitlab/graphql/copy_field_description_spec.rb
new file mode 100644
index 00000000000..e7462c5b954
--- /dev/null
+++ b/spec/lib/gitlab/graphql/copy_field_description_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Graphql::CopyFieldDescription do
+ subject { Class.new.include(described_class) }
+
+ describe '.copy_field_description' do
+ let(:type) do
+ Class.new(Types::BaseObject) do
+ graphql_name "TestType"
+
+ field :field_name, GraphQL::STRING_TYPE, null: true, description: 'Foo'
+ end
+ end
+
+ it 'returns the correct description' do
+ expect(subject.copy_field_description(type, :field_name)).to eq('Foo')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/find_argument_in_parent_spec.rb b/spec/lib/gitlab/graphql/find_argument_in_parent_spec.rb
new file mode 100644
index 00000000000..91e90315b3e
--- /dev/null
+++ b/spec/lib/gitlab/graphql/find_argument_in_parent_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Graphql::FindArgumentInParent do
+ describe '#find' do
+ def build_node(parent = nil, args: {})
+ props = { irep_node: double(arguments: args) }
+ props[:parent] = parent if parent # The root node shouldn't respond to parent
+
+ double(props)
+ end
+
+ let(:parent) do
+ build_node(
+ build_node(
+ build_node(
+ build_node,
+ args: { myArg: 1 }
+ )
+ )
+ )
+ end
+ let(:arg_name) { :my_arg }
+
+ it 'searches parents and returns the argument' do
+ expect(described_class.find(parent, :my_arg)).to eq(1)
+ end
+
+ it 'can find argument when passed in as both Ruby and GraphQL-formatted symbols and strings' do
+ [:my_arg, :myArg, 'my_arg', 'myArg'].each do |arg|
+ expect(described_class.find(parent, arg)).to eq(1)
+ end
+ end
+
+ it 'returns nil if no arguments found in parents' do
+ expect(described_class.find(parent, :bar)).to eq(nil)
+ end
+
+ it 'can limit the depth it searches to' do
+ expect(described_class.find(parent, :my_arg, limit_depth: 1)).to eq(nil)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/loaders/pipeline_for_sha_loader_spec.rb b/spec/lib/gitlab/graphql/loaders/pipeline_for_sha_loader_spec.rb
new file mode 100644
index 00000000000..927476cc655
--- /dev/null
+++ b/spec/lib/gitlab/graphql/loaders/pipeline_for_sha_loader_spec.rb
@@ -0,0 +1,20 @@
+require 'spec_helper'
+
+describe Gitlab::Graphql::Loaders::PipelineForShaLoader do
+ include GraphqlHelpers
+
+ describe '#find_last' do
+ it 'batch-resolves latest pipeline' do
+ project = create(:project, :repository)
+ pipeline1 = create(:ci_pipeline, project: project, ref: project.default_branch, sha: project.commit.sha)
+ pipeline2 = create(:ci_pipeline, project: project, ref: project.default_branch, sha: project.commit.sha)
+ pipeline3 = create(:ci_pipeline, project: project, ref: 'improve/awesome', sha: project.commit('improve/awesome').sha)
+
+ result = batch(max_queries: 1) do
+ [pipeline1.sha, pipeline3.sha].map { |sha| described_class.new(project, sha).find_last }
+ end
+
+ expect(result).to contain_exactly(pipeline2, pipeline3)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 7a250603b6b..7baa52ffb4f 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -397,6 +397,7 @@ project:
- incident_management_setting
- merge_trains
- designs
+- project_aliases
award_emoji:
- awardable
- user
diff --git a/spec/lib/gitlab/import_export/project.json b/spec/lib/gitlab/import_export/project.json
index 6512fe80a3b..8be074f4b9b 100644
--- a/spec/lib/gitlab/import_export/project.json
+++ b/spec/lib/gitlab/import_export/project.json
@@ -6760,7 +6760,7 @@
},
{
"id": 95,
- "title": "JIRA",
+ "title": "Jira",
"project_id": 5,
"created_at": "2016-06-14T15:01:51.255Z",
"updated_at": "2016-06-14T15:01:51.255Z",
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index a406c25b1d8..28b187c3676 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -123,6 +123,7 @@ Release:
- project_id
- created_at
- updated_at
+- released_at
Releases::Link:
- id
- release_id
@@ -429,6 +430,7 @@ Service:
- confidential_issues_events
- confidential_note_events
- deployment_events
+- description
ProjectHook:
- id
- url
diff --git a/spec/lib/gitlab/issuable_sorter_spec.rb b/spec/lib/gitlab/issuable_sorter_spec.rb
index 642a6cb6caa..5bd76bc6081 100644
--- a/spec/lib/gitlab/issuable_sorter_spec.rb
+++ b/spec/lib/gitlab/issuable_sorter_spec.rb
@@ -26,7 +26,7 @@ describe Gitlab::IssuableSorter do
expect(described_class.sort(project1, unsorted)).to eq(sorted)
end
- context 'for JIRA issues' do
+ context 'for Jira issues' do
let(:sorted) do
[ExternalIssue.new('JIRA-1', project1),
ExternalIssue.new('JIRA-2', project1),
diff --git a/spec/lib/gitlab/legacy_github_import/importer_spec.rb b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
index a0c664da185..9163019514b 100644
--- a/spec/lib/gitlab/legacy_github_import/importer_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
@@ -132,6 +132,7 @@ describe Gitlab::LegacyGithubImport::Importer do
body: 'Release v1.0.0',
draft: false,
created_at: created_at,
+ published_at: created_at,
updated_at: updated_at,
url: "#{api_root}/repos/octocat/Hello-World/releases/1"
)
@@ -144,6 +145,7 @@ describe Gitlab::LegacyGithubImport::Importer do
body: nil,
draft: false,
created_at: created_at,
+ published_at: created_at,
updated_at: updated_at,
url: "#{api_root}/repos/octocat/Hello-World/releases/2"
)
diff --git a/spec/lib/gitlab/legacy_github_import/release_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/release_formatter_spec.rb
index c57b96fb00d..534cf219520 100644
--- a/spec/lib/gitlab/legacy_github_import/release_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/release_formatter_spec.rb
@@ -4,6 +4,7 @@ describe Gitlab::LegacyGithubImport::ReleaseFormatter do
let!(:project) { create(:project, namespace: create(:namespace, path: 'octocat')) }
let(:octocat) { double(id: 123456, login: 'octocat') }
let(:created_at) { DateTime.strptime('2011-01-26T19:01:12Z') }
+ let(:published_at) { DateTime.strptime('2011-01-26T20:00:00Z') }
let(:base_data) do
{
@@ -11,7 +12,7 @@ describe Gitlab::LegacyGithubImport::ReleaseFormatter do
name: 'First release',
draft: false,
created_at: created_at,
- published_at: created_at,
+ published_at: published_at,
body: 'Release v1.0.0'
}
end
@@ -28,6 +29,7 @@ describe Gitlab::LegacyGithubImport::ReleaseFormatter do
name: 'First release',
description: 'Release v1.0.0',
created_at: created_at,
+ released_at: published_at,
updated_at: created_at
}
diff --git a/spec/lib/gitlab/metrics/dashboard/dynamic_dashboard_service_spec.rb b/spec/lib/gitlab/metrics/dashboard/dynamic_dashboard_service_spec.rb
index eecd257b38d..79a78df44ae 100644
--- a/spec/lib/gitlab/metrics/dashboard/dynamic_dashboard_service_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/dynamic_dashboard_service_spec.rb
@@ -6,13 +6,19 @@ describe Gitlab::Metrics::Dashboard::DynamicDashboardService, :use_clean_rails_m
include MetricsDashboardHelpers
set(:project) { build(:project) }
+ set(:user) { create(:user) }
set(:environment) { create(:environment, project: project) }
+ before do
+ project.add_maintainer(user)
+ end
+
describe '#get_dashboard' do
- let(:service_params) { [project, nil, { environment: environment, dashboard_path: nil }] }
+ let(:service_params) { [project, user, { environment: environment, dashboard_path: nil }] }
let(:service_call) { described_class.new(*service_params).get_dashboard }
it_behaves_like 'valid embedded dashboard service response'
+ it_behaves_like 'raises error for users with insufficient permissions'
it 'caches the unprocessed dashboard for subsequent calls' do
expect(YAML).to receive(:safe_load).once.and_call_original
diff --git a/spec/lib/gitlab/metrics/dashboard/finder_spec.rb b/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
index b9a5ee9c2b3..d8ed54c0248 100644
--- a/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
@@ -6,12 +6,17 @@ describe Gitlab::Metrics::Dashboard::Finder, :use_clean_rails_memory_store_cachi
include MetricsDashboardHelpers
set(:project) { build(:project) }
+ set(:user) { create(:user) }
set(:environment) { create(:environment, project: project) }
let(:system_dashboard_path) { Gitlab::Metrics::Dashboard::SystemDashboardService::SYSTEM_DASHBOARD_PATH}
+ before do
+ project.add_maintainer(user)
+ end
+
describe '.find' do
let(:dashboard_path) { '.gitlab/dashboards/test.yml' }
- let(:service_call) { described_class.find(project, nil, environment, dashboard_path: dashboard_path) }
+ let(:service_call) { described_class.find(project, user, environment, dashboard_path: dashboard_path) }
it_behaves_like 'misconfigured dashboard service response', :not_found
@@ -41,13 +46,13 @@ describe Gitlab::Metrics::Dashboard::Finder, :use_clean_rails_memory_store_cachi
end
context 'when no dashboard is specified' do
- let(:service_call) { described_class.find(project, nil, environment) }
+ let(:service_call) { described_class.find(project, user, environment) }
it_behaves_like 'valid dashboard service response'
end
context 'when the dashboard is expected to be embedded' do
- let(:service_call) { described_class.find(project, nil, environment, dashboard_path: nil, embedded: true) }
+ let(:service_call) { described_class.find(project, user, environment, dashboard_path: nil, embedded: true) }
it_behaves_like 'valid embedded dashboard service response'
end
diff --git a/spec/lib/gitlab/metrics/dashboard/project_dashboard_service_spec.rb b/spec/lib/gitlab/metrics/dashboard/project_dashboard_service_spec.rb
index 57d82421b5d..468e8ec9ef2 100644
--- a/spec/lib/gitlab/metrics/dashboard/project_dashboard_service_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/project_dashboard_service_spec.rb
@@ -5,8 +5,8 @@ require 'rails_helper'
describe Gitlab::Metrics::Dashboard::ProjectDashboardService, :use_clean_rails_memory_store_caching do
include MetricsDashboardHelpers
- set(:user) { build(:user) }
- set(:project) { build(:project) }
+ set(:user) { create(:user) }
+ set(:project) { create(:project) }
set(:environment) { create(:environment, project: project) }
before do
@@ -22,6 +22,8 @@ describe Gitlab::Metrics::Dashboard::ProjectDashboardService, :use_clean_rails_m
it_behaves_like 'misconfigured dashboard service response', :not_found
end
+ it_behaves_like 'raises error for users with insufficient permissions'
+
context 'when the dashboard exists' do
let(:project) { project_with_dashboard(dashboard_path) }
diff --git a/spec/lib/gitlab/metrics/dashboard/system_dashboard_service_spec.rb b/spec/lib/gitlab/metrics/dashboard/system_dashboard_service_spec.rb
index 2af745bd4d7..13f22dd01c5 100644
--- a/spec/lib/gitlab/metrics/dashboard/system_dashboard_service_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/system_dashboard_service_spec.rb
@@ -5,15 +5,21 @@ require 'spec_helper'
describe Gitlab::Metrics::Dashboard::SystemDashboardService, :use_clean_rails_memory_store_caching do
include MetricsDashboardHelpers
- set(:project) { build(:project) }
+ set(:user) { create(:user) }
+ set(:project) { create(:project) }
set(:environment) { create(:environment, project: project) }
+ before do
+ project.add_maintainer(user)
+ end
+
describe 'get_dashboard' do
let(:dashboard_path) { described_class::SYSTEM_DASHBOARD_PATH }
- let(:service_params) { [project, nil, { environment: environment, dashboard_path: dashboard_path }] }
+ let(:service_params) { [project, user, { environment: environment, dashboard_path: dashboard_path }] }
let(:service_call) { described_class.new(*service_params).get_dashboard }
it_behaves_like 'valid dashboard service response'
+ it_behaves_like 'raises error for users with insufficient permissions'
it 'caches the unprocessed dashboard for subsequent calls' do
expect(YAML).to receive(:safe_load).once.and_call_original
diff --git a/spec/lib/gitlab/metrics/system_spec.rb b/spec/lib/gitlab/metrics/system_spec.rb
index b0603d96eb2..da87df15746 100644
--- a/spec/lib/gitlab/metrics/system_spec.rb
+++ b/spec/lib/gitlab/metrics/system_spec.rb
@@ -52,13 +52,13 @@ describe Gitlab::Metrics::System do
end
describe '.cpu_time' do
- it 'returns a Fixnum' do
+ it 'returns a Float' do
expect(described_class.cpu_time).to be_an(Float)
end
end
describe '.real_time' do
- it 'returns a Fixnum' do
+ it 'returns a Float' do
expect(described_class.real_time).to be_an(Float)
end
end
diff --git a/spec/lib/gitlab/reference_extractor_spec.rb b/spec/lib/gitlab/reference_extractor_spec.rb
index d982053d92e..7513dbeeb6f 100644
--- a/spec/lib/gitlab/reference_extractor_spec.rb
+++ b/spec/lib/gitlab/reference_extractor_spec.rb
@@ -197,14 +197,14 @@ describe Gitlab::ReferenceExtractor do
let(:issue) { create(:issue, project: project) }
context 'when GitLab issues are enabled' do
- it 'returns both JIRA and internal issues' do
+ it 'returns both Jira and internal issues' do
subject.analyze("JIRA-123 and FOOBAR-4567 and #{issue.to_reference}")
expect(subject.issues).to eq [ExternalIssue.new('JIRA-123', project),
ExternalIssue.new('FOOBAR-4567', project),
issue]
end
- it 'returns only JIRA issues if the internal one does not exists' do
+ it 'returns only Jira issues if the internal one does not exists' do
subject.analyze("JIRA-123 and FOOBAR-4567 and #999")
expect(subject.issues).to eq [ExternalIssue.new('JIRA-123', project),
ExternalIssue.new('FOOBAR-4567', project)]
@@ -217,7 +217,7 @@ describe Gitlab::ReferenceExtractor do
project.save!
end
- it 'returns only JIRA issues' do
+ it 'returns only Jira issues' do
subject.analyze("JIRA-123 and FOOBAR-4567 and #{issue.to_reference}")
expect(subject.issues).to eq [ExternalIssue.new('JIRA-123', project),
ExternalIssue.new('FOOBAR-4567', project)]