Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-09-14 15:12:34 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2022-09-14 15:12:34 +0300
commit16daf112d6cfe2c87d8837382a00d88aa8c0ff5c (patch)
tree017d43acafea2928550fe7c8fdbb7cf36335ba9f /spec/lib/gitlab
parent3cb798d80b6b5235b5f5febaaacef410e75c2963 (diff)
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'spec/lib/gitlab')
-rw-r--r--spec/lib/gitlab/audit/type/definition_spec.rb219
-rw-r--r--spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb3
-rw-r--r--spec/lib/gitlab/git/commit_stats_spec.rb16
-rw-r--r--spec/lib/gitlab/git/compare_spec.rb5
-rw-r--r--spec/lib/gitlab/git/diff_collection_spec.rb2
-rw-r--r--spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/client_spec.rb24
-rw-r--r--spec/lib/gitlab/github_import/importer/protected_branch_importer_spec.rb91
-rw-r--r--spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb222
-rw-r--r--spec/lib/gitlab/github_import/representation/protected_branch_spec.rb51
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb8
-rw-r--r--spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb8
-rw-r--r--spec/lib/gitlab/sidekiq_queue_spec.rb10
-rw-r--r--spec/lib/gitlab/utils/execution_tracker_spec.rb24
14 files changed, 660 insertions, 25 deletions
diff --git a/spec/lib/gitlab/audit/type/definition_spec.rb b/spec/lib/gitlab/audit/type/definition_spec.rb
new file mode 100644
index 00000000000..9f4282a4ec0
--- /dev/null
+++ b/spec/lib/gitlab/audit/type/definition_spec.rb
@@ -0,0 +1,219 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Audit::Type::Definition do
+ let(:attributes) do
+ { name: 'group_deploy_token_destroyed',
+ description: 'Group deploy token is deleted',
+ introduced_by_issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/1',
+ introduced_by_mr: 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/1',
+ group: 'govern::compliance',
+ milestone: '15.4',
+ saved_to_database: true,
+ streamed: true }
+ end
+
+ let(:path) { File.join('types', 'group_deploy_token_destroyed.yml') }
+ let(:definition) { described_class.new(path, attributes) }
+ let(:yaml_content) { attributes.deep_stringify_keys.to_yaml }
+
+ describe '#key' do
+ subject { definition.key }
+
+ it 'returns a symbol from name' do
+ is_expected.to eq(:group_deploy_token_destroyed)
+ end
+ end
+
+ describe '#validate!', :aggregate_failures do
+ using RSpec::Parameterized::TableSyntax
+
+ # rubocop:disable Layout/LineLength
+ where(:param, :value, :result) do
+ :path | 'audit_event/types/invalid.yml' | /Audit event type 'group_deploy_token_destroyed' has an invalid path/
+ :name | nil | %r{property '/name' is not of type: string}
+ :description | nil | %r{property '/description' is not of type: string}
+ :introduced_by_issue | nil | %r{property '/introduced_by_issue' is not of type: string}
+ :introduced_by_mr | nil | %r{property '/introduced_by_mr' is not of type: string}
+ :group | nil | %r{property '/group' is not of type: string}
+ :milestone | nil | %r{property '/milestone' is not of type: string}
+ end
+ # rubocop:enable Layout/LineLength
+
+ with_them do
+ let(:params) { attributes.merge(path: path) }
+
+ before do
+ params[param] = value
+ end
+
+ it do
+ expect do
+ described_class.new(
+ params[:path], params.except(:path)
+ ).validate!
+ end.to raise_error(result)
+ end
+ end
+
+ context 'when both saved_to_database and streamed are false' do
+ let(:params) { attributes.merge({ path: path, saved_to_database: false, streamed: false }) }
+
+ it 'raises an exception' do
+ expect do
+ described_class.new(
+ params[:path], params.except(:path)
+ ).validate!
+ end.to raise_error(/root is invalid: error_type=not/)
+ end
+ end
+ end
+
+ describe '.paths' do
+ it 'returns at least one path' do
+ expect(described_class.paths).not_to be_empty
+ end
+ end
+
+ describe '.get' do
+ before do
+ allow(described_class).to receive(:definitions) do
+ { definition.key => definition }
+ end
+ end
+
+ context 'when audit event type is not defined' do
+ let(:undefined_audit_event_type) { 'undefined_audit_event_type' }
+
+ it 'returns nil' do
+ expect(described_class.get(undefined_audit_event_type)).to be nil
+ end
+ end
+
+ context 'when audit event type is defined' do
+ let(:audit_event_type) { 'group_deploy_token_destroyed' }
+
+ it 'returns an instance of Gitlab::Audit::Type::Definition' do
+ expect(described_class.get(audit_event_type)).to be_an_instance_of(described_class)
+ end
+
+ it 'returns the properties as defined for that audit event type', :aggregate_failures do
+ audit_event_type_definition = described_class.get(audit_event_type)
+
+ expect(audit_event_type_definition.name).to eq "group_deploy_token_destroyed"
+ expect(audit_event_type_definition.description).to eq "Group deploy token is deleted"
+ expect(audit_event_type_definition.group).to eq "govern::compliance"
+ expect(audit_event_type_definition.milestone).to eq "15.4"
+ expect(audit_event_type_definition.saved_to_database).to be true
+ expect(audit_event_type_definition.streamed).to be true
+ end
+ end
+ end
+
+ describe '.load_from_file' do
+ it 'properly loads a definition from file' do
+ expect_file_read(path, content: yaml_content)
+
+ expect(described_class.send(:load_from_file, path).attributes)
+ .to eq(definition.attributes)
+ end
+
+ context 'for missing file' do
+ let(:path) { 'missing/audit_events/type/file.yml' }
+
+ it 'raises exception' do
+ expect do
+ described_class.send(:load_from_file, path)
+ end.to raise_error(/Invalid definition for/)
+ end
+ end
+
+ context 'for invalid definition' do
+ it 'raises exception' do
+ expect_file_read(path, content: '{}')
+
+ expect do
+ described_class.send(:load_from_file, path)
+ end.to raise_error(%r{property '/name' is not of type: string})
+ end
+ end
+ end
+
+ describe '.load_all!' do
+ let(:store1) { Dir.mktmpdir('path1') }
+ let(:store2) { Dir.mktmpdir('path2') }
+ let(:definitions) { {} }
+
+ before do
+ allow(described_class).to receive(:paths).and_return(
+ [
+ File.join(store1, '**', '*.yml'),
+ File.join(store2, '**', '*.yml')
+ ]
+ )
+ end
+
+ subject { described_class.send(:load_all!) }
+
+ after do
+ FileUtils.rm_rf(store1)
+ FileUtils.rm_rf(store2)
+ end
+
+ it "when there are no audit event types a list of definitions is empty" do
+ is_expected.to be_empty
+ end
+
+ it "when there's a single audit event type it properly loads them" do
+ write_audit_event_type(store1, path, yaml_content)
+
+ is_expected.to be_one
+ end
+
+ it "when the same audit event type is stored multiple times raises exception" do
+ write_audit_event_type(store1, path, yaml_content)
+ write_audit_event_type(store2, path, yaml_content)
+
+ expect { subject }
+ .to raise_error(/Audit event type 'group_deploy_token_destroyed' is already defined/)
+ end
+
+ it "when one of the YAMLs is invalid it does raise exception" do
+ write_audit_event_type(store1, path, '{}')
+
+ expect { subject }.to raise_error(/Invalid definition for .* '' must match the filename/)
+ end
+ end
+
+ describe '.definitions' do
+ let(:store1) { Dir.mktmpdir('path1') }
+
+ before do
+ allow(described_class).to receive(:paths).and_return(
+ [
+ File.join(store1, '**', '*.yml')
+ ]
+ )
+ end
+
+ subject { described_class.definitions }
+
+ after do
+ FileUtils.rm_rf(store1)
+ end
+
+ it "loads the definitions for all the audit event types" do
+ write_audit_event_type(store1, path, yaml_content)
+
+ is_expected.to be_one
+ end
+ end
+
+ def write_audit_event_type(store, path, content)
+ path = File.join(store, path)
+ dir = File.dirname(path)
+ FileUtils.mkdir_p(dir)
+ File.write(path, content)
+ end
+end
diff --git a/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb b/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb
index b5137f9db6b..e1135f4d546 100644
--- a/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb
+++ b/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb
@@ -5,7 +5,8 @@ require 'spec_helper'
RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFile do
include RepoHelpers
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
+
let(:commit) { project.commit("5d6ed1503801ca9dc28e95eeb85a7cf863527aee") }
let(:diffs) { commit.raw_diffs.to_a }
let(:diff) { diffs.first }
diff --git a/spec/lib/gitlab/git/commit_stats_spec.rb b/spec/lib/gitlab/git/commit_stats_spec.rb
index 29d3909efec..81d9dda4b8f 100644
--- a/spec/lib/gitlab/git/commit_stats_spec.rb
+++ b/spec/lib/gitlab/git/commit_stats_spec.rb
@@ -2,17 +2,19 @@
require "spec_helper"
-RSpec.describe Gitlab::Git::CommitStats, :seed_helper do
- let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
- let(:commit) { Gitlab::Git::Commit.find(repository, SeedRepo::Commit::ID) }
+RSpec.describe Gitlab::Git::CommitStats do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:repository) { project.repository.raw }
+
+ let(:commit) { Gitlab::Git::Commit.find(repository, TestEnv::BRANCH_SHA['feature']) }
def verify_stats!
stats = described_class.new(repository, commit)
expect(stats).to have_attributes(
- additions: eq(11),
- deletions: eq(6),
- total: eq(17)
+ additions: eq(5),
+ deletions: eq(0),
+ total: eq(5)
)
end
@@ -21,7 +23,7 @@ RSpec.describe Gitlab::Git::CommitStats, :seed_helper do
verify_stats!
- expect(Rails.cache.fetch("commit_stats:group/project:#{commit.id}")).to eq([11, 6])
+ expect(Rails.cache.fetch("commit_stats:#{repository.gl_project_path}:#{commit.id}")).to eq([5, 0])
expect(repository.gitaly_commit_client).not_to receive(:commit_stats)
diff --git a/spec/lib/gitlab/git/compare_spec.rb b/spec/lib/gitlab/git/compare_spec.rb
index 51043355ede..e8c683cf8aa 100644
--- a/spec/lib/gitlab/git/compare_spec.rb
+++ b/spec/lib/gitlab/git/compare_spec.rb
@@ -2,8 +2,9 @@
require "spec_helper"
-RSpec.describe Gitlab::Git::Compare, :seed_helper do
- let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
+RSpec.describe Gitlab::Git::Compare do
+ let_it_be(:repository) { create(:project, :repository).repository.raw }
+
let(:compare) { Gitlab::Git::Compare.new(repository, SeedRepo::BigCommit::ID, SeedRepo::Commit::ID, straight: false) }
let(:compare_straight) { Gitlab::Git::Compare.new(repository, SeedRepo::BigCommit::ID, SeedRepo::Commit::ID, straight: true) }
diff --git a/spec/lib/gitlab/git/diff_collection_spec.rb b/spec/lib/gitlab/git/diff_collection_spec.rb
index 0e3e92e03cf..7fa5bd8a92b 100644
--- a/spec/lib/gitlab/git/diff_collection_spec.rb
+++ b/spec/lib/gitlab/git/diff_collection_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
+RSpec.describe Gitlab::Git::DiffCollection do
before do
stub_const('MutatingConstantIterator', Class.new)
diff --git a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
index 03d1c125e36..747611a59e6 100644
--- a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
+++ b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require 'json'
require 'tempfile'
-RSpec.describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do
+RSpec.describe Gitlab::Git::RuggedImpl::UseRugged do
let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
let(:feature_flag_name) { wrapper.rugged_feature_keys.first }
diff --git a/spec/lib/gitlab/github_import/client_spec.rb b/spec/lib/gitlab/github_import/client_spec.rb
index 2bd3910ad87..4eb0f7ac711 100644
--- a/spec/lib/gitlab/github_import/client_spec.rb
+++ b/spec/lib/gitlab/github_import/client_spec.rb
@@ -98,6 +98,30 @@ RSpec.describe Gitlab::GithubImport::Client do
end
end
+ describe '#branches' do
+ it 'returns the branches' do
+ client = described_class.new('foo')
+
+ expect(client)
+ .to receive(:each_object)
+ .with(:branches, 'foo/bar')
+
+ client.branches('foo/bar')
+ end
+ end
+
+ describe '#branch_protection' do
+ it 'returns the protection details for the given branch' do
+ client = described_class.new('foo')
+
+ expect(client.octokit)
+ .to receive(:branch_protection).with('org/repo', 'bar')
+ expect(client).to receive(:with_rate_limit).and_yield
+
+ client.branch_protection('org/repo', 'bar')
+ end
+ end
+
describe '#each_page' do
let(:client) { described_class.new('foo') }
let(:object1) { double(:object1) }
diff --git a/spec/lib/gitlab/github_import/importer/protected_branch_importer_spec.rb b/spec/lib/gitlab/github_import/importer/protected_branch_importer_spec.rb
new file mode 100644
index 00000000000..6dc6db739f4
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/protected_branch_importer_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchImporter do
+ subject(:importer) { described_class.new(github_protected_branch, project, client) }
+
+ let(:allow_force_pushes_on_github) { true }
+ let(:github_protected_branch) do
+ Gitlab::GithubImport::Representation::ProtectedBranch.new(
+ id: 'protection',
+ allow_force_pushes: allow_force_pushes_on_github
+ )
+ end
+
+ let(:project) { create(:project, :repository) }
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
+
+ describe '#execute' do
+ let(:create_service) { instance_double('ProtectedBranches::CreateService') }
+
+ shared_examples 'create branch protection by the strictest ruleset' do
+ let(:expected_ruleset) do
+ {
+ name: 'protection',
+ push_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }],
+ merge_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }],
+ allow_force_push: expected_allow_force_push
+ }
+ end
+
+ it 'calls service with the correct arguments' do
+ expect(ProtectedBranches::CreateService).to receive(:new).with(
+ project,
+ project.creator,
+ expected_ruleset
+ ).and_return(create_service)
+
+ expect(create_service).to receive(:execute).with(skip_authorization: true)
+ importer.execute
+ end
+
+ it 'creates protected branch and access levels for given github rule' do
+ expect { importer.execute }.to change(ProtectedBranch, :count).by(1)
+ .and change(ProtectedBranch::PushAccessLevel, :count).by(1)
+ .and change(ProtectedBranch::MergeAccessLevel, :count).by(1)
+ end
+ end
+
+ context 'when branch is protected on GitLab' do
+ before do
+ create(
+ :protected_branch,
+ project: project,
+ name: 'protect*',
+ allow_force_push: allow_force_pushes_on_gitlab
+ )
+ end
+
+ context 'when branch protection rule on Gitlab is stricter than on Github' do
+ let(:allow_force_pushes_on_github) { true }
+ let(:allow_force_pushes_on_gitlab) { false }
+ let(:expected_allow_force_push) { false }
+
+ it_behaves_like 'create branch protection by the strictest ruleset'
+ end
+
+ context 'when branch protection rule on Github is stricter than on Gitlab' do
+ let(:allow_force_pushes_on_github) { false }
+ let(:allow_force_pushes_on_gitlab) { true }
+ let(:expected_allow_force_push) { false }
+
+ it_behaves_like 'create branch protection by the strictest ruleset'
+ end
+
+ context 'when branch protection rules on Github and Gitlab are the same' do
+ let(:allow_force_pushes_on_github) { true }
+ let(:allow_force_pushes_on_gitlab) { true }
+ let(:expected_allow_force_push) { true }
+
+ it_behaves_like 'create branch protection by the strictest ruleset'
+ end
+ end
+
+ context 'when branch is not protected on GitLab' do
+ let(:expected_allow_force_push) { true }
+
+ it_behaves_like 'create branch protection by the strictest ruleset'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb b/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb
new file mode 100644
index 00000000000..9266b1b0585
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb
@@ -0,0 +1,222 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchesImporter do
+ subject(:importer) { described_class.new(project, client, parallel: parallel) }
+
+ let(:project) { instance_double('Project', id: 4, import_source: 'foo/bar') }
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
+ let(:parallel) { true }
+
+ let(:branches) do
+ branch = Struct.new(:name, :protection, keyword_init: true)
+ protection = Struct.new(:enabled, keyword_init: true)
+
+ [
+ branch.new(name: 'main', protection: protection.new(enabled: false)),
+ branch.new(name: 'staging', protection: protection.new(enabled: true))
+ ]
+ end
+
+ let(:github_protection_rule) do
+ response = Struct.new(:name, :url, :required_signatures, :enforce_admins, :required_linear_history,
+ :allow_force_pushes, :allow_deletion, :block_creations, :required_conversation_resolution,
+ keyword_init: true
+ )
+ required_signatures = Struct.new(:url, :enabled, keyword_init: true)
+ enforce_admins = Struct.new(:url, :enabled, keyword_init: true)
+ allow_option = Struct.new(:enabled, keyword_init: true)
+ response.new(
+ name: 'main',
+ url: 'https://example.com/branches/main/protection',
+ required_signatures: required_signatures.new(
+ url: 'https://example.com/branches/main/protection/required_signatures',
+ enabled: false
+ ),
+ enforce_admins: enforce_admins.new(
+ url: 'https://example.com/branches/main/protection/enforce_admins',
+ enabled: false
+ ),
+ required_linear_history: allow_option.new(
+ enabled: false
+ ),
+ allow_force_pushes: allow_option.new(
+ enabled: false
+ ),
+ allow_deletion: allow_option.new(
+ enabled: false
+ ),
+ block_creations: allow_option.new(
+ enabled: true
+ ),
+ required_conversation_resolution: allow_option.new(
+ enabled: false
+ )
+ )
+ end
+
+ describe '#parallel?' do
+ context 'when running in parallel mode' do
+ it { expect(importer).to be_parallel }
+ end
+
+ context 'when running in sequential mode' do
+ let(:parallel) { false }
+
+ it { expect(importer).not_to be_parallel }
+ end
+ end
+
+ describe '#execute' do
+ context 'when running in parallel mode' do
+ it 'imports protected branches in parallel' do
+ expect(importer).to receive(:parallel_import)
+
+ importer.execute
+ end
+ end
+
+ context 'when running in sequential mode' do
+ let(:parallel) { false }
+
+ it 'imports protected branches in sequence' do
+ expect(importer).to receive(:sequential_import)
+
+ importer.execute
+ end
+ end
+ end
+
+ describe '#sequential_import', :clean_gitlab_redis_cache do
+ let(:parallel) { false }
+
+ before do
+ allow(client).to receive(:branches).and_return(branches)
+ allow(client)
+ .to receive(:branch_protection)
+ .with(project.import_source, 'staging')
+ .and_return(github_protection_rule)
+ .once
+ end
+
+ it 'imports each protected branch in sequence' do
+ protected_branch_importer = instance_double('Gitlab::GithubImport::Importer::ProtectedBranchImporter')
+
+ expect(Gitlab::GithubImport::Importer::ProtectedBranchImporter)
+ .to receive(:new)
+ .with(
+ an_instance_of(Gitlab::GithubImport::Representation::ProtectedBranch),
+ project,
+ client
+ )
+ .and_return(protected_branch_importer)
+
+ expect(protected_branch_importer).to receive(:execute)
+ expect(Gitlab::GithubImport::ObjectCounter)
+ .to receive(:increment).with(project, :protected_branch, :fetched)
+
+ importer.sequential_import
+ end
+ end
+
+ describe '#parallel_import', :clean_gitlab_redis_cache do
+ before do
+ allow(client).to receive(:branches).and_return(branches)
+ allow(client)
+ .to receive(:branch_protection)
+ .with(project.import_source, 'staging')
+ .and_return(github_protection_rule)
+ .once
+ end
+
+ it 'imports each protected branch in parallel' do
+ expect(Gitlab::GithubImport::ImportProtectedBranchWorker)
+ .to receive(:bulk_perform_in)
+ .with(
+ 1.second,
+ [[project.id, an_instance_of(Hash), an_instance_of(String)]],
+ batch_delay: 1.minute,
+ batch_size: 1000
+ )
+ expect(Gitlab::GithubImport::ObjectCounter)
+ .to receive(:increment).with(project, :protected_branch, :fetched)
+
+ waiter = importer.parallel_import
+
+ expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
+ expect(waiter.jobs_remaining).to eq(1)
+ end
+ end
+
+ describe '#each_object_to_import', :clean_gitlab_redis_cache do
+ let(:branch_struct) { Struct.new(:protection, :name, :url, keyword_init: true) }
+ let(:protection_struct) { Struct.new(:enabled, keyword_init: true) }
+ let(:protected_branch) { branch_struct.new(name: 'main', protection: protection_struct.new(enabled: true)) }
+ let(:unprotected_branch) { branch_struct.new(name: 'staging', protection: protection_struct.new(enabled: false)) }
+
+ let(:page_counter) { instance_double(Gitlab::GithubImport::PageCounter) }
+
+ before do
+ allow(client).to receive(:branches).with(project.import_source)
+ .and_return([protected_branch, unprotected_branch])
+ allow(client).to receive(:branch_protection)
+ .with(project.import_source, protected_branch.name).once
+ .and_return(github_protection_rule)
+ allow(Gitlab::GithubImport::ObjectCounter).to receive(:increment)
+ .with(project, :protected_branch, :fetched)
+ end
+
+ it 'imports each protected branch page by page' do
+ subject.each_object_to_import do |object|
+ expect(object).to eq github_protection_rule
+ end
+ expect(Gitlab::GithubImport::ObjectCounter).to have_received(:increment).once
+ end
+
+ context 'when protected branch is already processed' do
+ it "doesn't process this branch" do
+ subject.mark_as_imported(protected_branch)
+
+ subject.each_object_to_import {}
+ expect(Gitlab::GithubImport::ObjectCounter).not_to have_received(:increment)
+ end
+ end
+ end
+
+ describe '#importer_class' do
+ it { expect(importer.importer_class).to eq Gitlab::GithubImport::Importer::ProtectedBranchImporter }
+ end
+
+ describe '#representation_class' do
+ it { expect(importer.representation_class).to eq Gitlab::GithubImport::Representation::ProtectedBranch }
+ end
+
+ describe '#sidekiq_worker_class' do
+ it { expect(importer.sidekiq_worker_class).to eq Gitlab::GithubImport::ImportProtectedBranchWorker }
+ end
+
+ describe '#object_type' do
+ it { expect(importer.object_type).to eq :protected_branch }
+ end
+
+ describe '#collection_method' do
+ it { expect(importer.collection_method).to eq :protected_branches }
+ end
+
+ describe '#id_for_already_imported_cache' do
+ it 'returns the ID of the given protected branch' do
+ expect(importer.id_for_already_imported_cache(github_protection_rule)).to eq('main')
+ end
+ end
+
+ describe '#collection_options' do
+ it 'returns an empty Hash' do
+ # For large projects (e.g. kubernetes/kubernetes) GitHub's API may produce
+ # HTTP 500 errors when using explicit sorting options, regardless of what
+ # order you sort in. Not using any sorting options at all allows us to
+ # work around this.
+ expect(importer.collection_options).to eq({})
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/representation/protected_branch_spec.rb b/spec/lib/gitlab/github_import/representation/protected_branch_spec.rb
new file mode 100644
index 00000000000..e762dc469c1
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/protected_branch_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Representation::ProtectedBranch do
+ shared_examples 'a ProtectedBranch rule' do
+ it 'returns an instance of ProtectedBranch' do
+ expect(protected_branch).to be_an_instance_of(described_class)
+ end
+
+ context 'with ProtectedBranch' do
+ it 'includes the protected branch ID (name)' do
+ expect(protected_branch.id).to eq 'main'
+ end
+
+ it 'includes the protected branch allow_force_pushes' do
+ expect(protected_branch.allow_force_pushes).to eq true
+ end
+ end
+ end
+
+ describe '.from_api_response' do
+ let(:response) do
+ response = Struct.new(:url, :allow_force_pushes, keyword_init: true)
+ allow_force_pushes = Struct.new(:enabled, keyword_init: true)
+ response.new(
+ url: 'https://example.com/branches/main/protection',
+ allow_force_pushes: allow_force_pushes.new(
+ enabled: true
+ )
+ )
+ end
+
+ it_behaves_like 'a ProtectedBranch rule' do
+ let(:protected_branch) { described_class.from_api_response(response) }
+ end
+ end
+
+ describe '.from_json_hash' do
+ it_behaves_like 'a ProtectedBranch rule' do
+ let(:hash) do
+ {
+ 'id' => 'main',
+ 'allow_force_pushes' => true
+ }
+ end
+
+ let(:protected_branch) { described_class.from_json_hash(hash) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb
index 09548d21106..cc730e203f6 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb
@@ -41,10 +41,10 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Server, :clean_gitlab_r
describe '#call' do
it 'removes the stored job from redis before execution' do
bare_job = { 'class' => 'TestDeduplicationWorker', 'args' => ['hello'] }
- job_definition = Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob.new(bare_job.dup, 'test_deduplication')
+ job_definition = Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob.new(bare_job.dup, 'default')
expect(Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob)
- .to receive(:new).with(a_hash_including(bare_job), 'test_deduplication')
+ .to receive(:new).with(a_hash_including(bare_job), 'default')
.and_return(job_definition).twice # once in client middleware
expect(job_definition).to receive(:delete!).ordered.and_call_original
@@ -60,10 +60,10 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Server, :clean_gitlab_r
it 'removes the stored job from redis after execution' do
bare_job = { 'class' => 'TestDeduplicationWorker', 'args' => ['hello'] }
- job_definition = Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob.new(bare_job.dup, 'test_deduplication')
+ job_definition = Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob.new(bare_job.dup, 'default')
expect(Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob)
- .to receive(:new).with(a_hash_including(bare_job), 'test_deduplication')
+ .to receive(:new).with(a_hash_including(bare_job), 'default')
.and_return(job_definition).twice # once in client middleware
expect(TestDeduplicationWorker).to receive(:work).ordered.and_call_original
diff --git a/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
index d4391d3023a..a576cf3e2ab 100644
--- a/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
+++ b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
@@ -46,7 +46,7 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
expect(migrator.execute('PostReceive' => 'new_queue')).to eq(scanned: 3, migrated: 0)
expect(set_after.length).to eq(3)
- expect(set_after.map(&:first)).to all(include('queue' => 'authorized_projects',
+ expect(set_after.map(&:first)).to all(include('queue' => 'default',
'class' => 'AuthorizedProjectsWorker'))
end
end
@@ -62,7 +62,7 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
if item['class'] == 'AuthorizedProjectsWorker'
expect(item).to include('queue' => 'new_queue', 'args' => [i])
else
- expect(item).to include('queue' => 'post_receive', 'args' => [i])
+ expect(item).to include('queue' => 'default', 'args' => [i])
end
expect(score).to be_within(schedule_jitter).of(i.succ.hours.from_now.to_i)
@@ -116,7 +116,7 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
expect(migrator.execute('PostReceive' => 'new_queue')).to eq(scanned: 4, migrated: 0)
expect(set_after.length).to eq(3)
- expect(set_after.map(&:first)).to all(include('queue' => 'authorized_projects'))
+ expect(set_after.map(&:first)).to all(include('queue' => 'default'))
end
end
@@ -138,7 +138,7 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
expect(migrator.execute('PostReceive' => 'new_queue')).to eq(scanned: 4, migrated: 1)
expect(set_after.group_by { |job| job.first['queue'] }.transform_values(&:count))
- .to eq('authorized_projects' => 6, 'new_queue' => 1)
+ .to eq('default' => 6, 'new_queue' => 1)
end
it 'iterates through the entire set of jobs' do
diff --git a/spec/lib/gitlab/sidekiq_queue_spec.rb b/spec/lib/gitlab/sidekiq_queue_spec.rb
index 5e91282612e..93632848788 100644
--- a/spec/lib/gitlab/sidekiq_queue_spec.rb
+++ b/spec/lib/gitlab/sidekiq_queue_spec.rb
@@ -4,15 +4,15 @@ require 'spec_helper'
RSpec.describe Gitlab::SidekiqQueue, :clean_gitlab_redis_queues do
around do |example|
- Sidekiq::Queue.new('default').clear
+ Sidekiq::Queue.new('foobar').clear
Sidekiq::Testing.disable!(&example)
- Sidekiq::Queue.new('default').clear
+ Sidekiq::Queue.new('foobar').clear
end
def add_job(args, user:, klass: 'AuthorizedProjectsWorker')
Sidekiq::Client.push(
'class' => klass,
- 'queue' => 'default',
+ 'queue' => 'foobar',
'args' => args,
'meta.user' => user.username
)
@@ -20,7 +20,7 @@ RSpec.describe Gitlab::SidekiqQueue, :clean_gitlab_redis_queues do
describe '#drop_jobs!' do
shared_examples 'queue processing' do
- let(:sidekiq_queue) { described_class.new('default') }
+ let(:sidekiq_queue) { described_class.new('foobar') }
let_it_be(:sidekiq_queue_user) { create(:user) }
before do
@@ -80,7 +80,7 @@ RSpec.describe Gitlab::SidekiqQueue, :clean_gitlab_redis_queues do
it 'raises NoMetadataError' do
add_job([1], user: create(:user))
- expect { described_class.new('default').drop_jobs!({ username: 'sidekiq_queue_user' }, timeout: 1) }
+ expect { described_class.new('foobar').drop_jobs!({ username: 'sidekiq_queue_user' }, timeout: 1) }
.to raise_error(described_class::NoMetadataError)
end
end
diff --git a/spec/lib/gitlab/utils/execution_tracker_spec.rb b/spec/lib/gitlab/utils/execution_tracker_spec.rb
new file mode 100644
index 00000000000..6c42863658c
--- /dev/null
+++ b/spec/lib/gitlab/utils/execution_tracker_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Utils::ExecutionTracker do
+ subject(:tracker) { described_class.new }
+
+ describe '#over_limit?' do
+ it 'is true when max runtime is exceeded' do
+ monotonic_time_before = 1 # this will be the start time
+ monotonic_time_after = described_class::MAX_RUNTIME.to_i + 1 # this will be returned when over_limit? is called
+
+ allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(monotonic_time_before, monotonic_time_after)
+
+ tracker
+
+ expect(tracker).to be_over_limit
+ end
+
+ it 'is false when max runtime is not exceeded' do
+ expect(tracker).not_to be_over_limit
+ end
+ end
+end