Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'spec/models')
-rw-r--r--spec/models/application_setting/term_spec.rb15
-rw-r--r--spec/models/application_setting_spec.rb15
-rw-r--r--spec/models/blob_viewer/readme_spec.rb2
-rw-r--r--spec/models/ci/build_spec.rb5
-rw-r--r--spec/models/ci/build_trace_chunk_spec.rb396
-rw-r--r--spec/models/ci/pipeline_spec.rb2
-rw-r--r--spec/models/ci/runner_spec.rb240
-rw-r--r--spec/models/concerns/sha_attribute_spec.rb67
-rw-r--r--spec/models/group_spec.rb89
-rw-r--r--spec/models/merge_request_spec.rb18
-rw-r--r--spec/models/namespace_spec.rb15
-rw-r--r--spec/models/project_import_state_spec.rb13
-rw-r--r--spec/models/project_services/microsoft_teams_service_spec.rb2
-rw-r--r--spec/models/project_spec.rb213
-rw-r--r--spec/models/project_wiki_spec.rb6
-rw-r--r--spec/models/remote_mirror_spec.rb267
-rw-r--r--spec/models/repository_spec.rb37
-rw-r--r--spec/models/term_agreement_spec.rb8
-rw-r--r--spec/models/wiki_page_spec.rb2
19 files changed, 1351 insertions, 61 deletions
diff --git a/spec/models/application_setting/term_spec.rb b/spec/models/application_setting/term_spec.rb
new file mode 100644
index 00000000000..1eddf3c56ff
--- /dev/null
+++ b/spec/models/application_setting/term_spec.rb
@@ -0,0 +1,15 @@
+require 'spec_helper'
+
+describe ApplicationSetting::Term do
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:terms) }
+ end
+
+ describe '.latest' do
+ it 'finds the latest terms' do
+ terms = create(:term)
+
+ expect(described_class.latest).to eq(terms)
+ end
+ end
+end
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index ae2d34750a7..10d6109cae7 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -301,6 +301,21 @@ describe ApplicationSetting do
expect(subject).to be_invalid
end
end
+
+ describe 'enforcing terms' do
+ it 'requires the terms to present when enforcing users to accept' do
+ subject.enforce_terms = true
+
+ expect(subject).to be_invalid
+ end
+
+ it 'is valid when terms are created' do
+ create(:term)
+ subject.enforce_terms = true
+
+ expect(subject).to be_valid
+ end
+ end
end
describe '.current' do
diff --git a/spec/models/blob_viewer/readme_spec.rb b/spec/models/blob_viewer/readme_spec.rb
index b9946c0315a..8d11d58cfca 100644
--- a/spec/models/blob_viewer/readme_spec.rb
+++ b/spec/models/blob_viewer/readme_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe BlobViewer::Readme do
include FakeBlobHelpers
- let(:project) { create(:project, :repository) }
+ let(:project) { create(:project, :repository, :wiki_repo) }
let(:blob) { fake_blob(path: 'README.md') }
subject { described_class.new(blob) }
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 3158e006720..dc810489011 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -1518,7 +1518,10 @@ describe Ci::Build do
{ key: 'CI_PROJECT_VISIBILITY', value: 'private', public: true },
{ key: 'CI_PIPELINE_ID', value: pipeline.id.to_s, public: true },
{ key: 'CI_CONFIG_PATH', value: pipeline.ci_yaml_file_path, public: true },
- { key: 'CI_PIPELINE_SOURCE', value: pipeline.source, public: true }
+ { key: 'CI_PIPELINE_SOURCE', value: pipeline.source, public: true },
+ { key: 'CI_COMMIT_MESSAGE', value: pipeline.git_commit_message, public: true },
+ { key: 'CI_COMMIT_TITLE', value: pipeline.git_commit_title, public: true },
+ { key: 'CI_COMMIT_DESCRIPTION', value: pipeline.git_commit_description, public: true }
]
end
diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb
new file mode 100644
index 00000000000..cbcf1e55979
--- /dev/null
+++ b/spec/models/ci/build_trace_chunk_spec.rb
@@ -0,0 +1,396 @@
+require 'spec_helper'
+
+describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
+ set(:build) { create(:ci_build, :running) }
+ let(:chunk_index) { 0 }
+ let(:data_store) { :redis }
+ let(:raw_data) { nil }
+
+ let(:build_trace_chunk) do
+ described_class.new(build: build, chunk_index: chunk_index, data_store: data_store, raw_data: raw_data)
+ end
+
+ before do
+ stub_feature_flags(ci_enable_live_trace: true)
+ end
+
+ context 'FastDestroyAll' do
+ let(:parent) { create(:project) }
+ let(:pipeline) { create(:ci_pipeline, project: parent) }
+ let(:build) { create(:ci_build, :running, :trace_live, pipeline: pipeline, project: parent) }
+ let(:subjects) { build.trace_chunks }
+
+ it_behaves_like 'fast destroyable'
+
+ def external_data_counter
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.scan_each(match: "gitlab:ci:trace:*:chunks:*").to_a.size
+ end
+ end
+ end
+
+ describe 'CHUNK_SIZE' do
+ it 'Chunk size can not be changed without special care' do
+ expect(described_class::CHUNK_SIZE).to eq(128.kilobytes)
+ end
+ end
+
+ describe '#data' do
+ subject { build_trace_chunk.data }
+
+ context 'when data_store is redis' do
+ let(:data_store) { :redis }
+
+ before do
+ build_trace_chunk.send(:redis_set_data, 'Sample data in redis')
+ end
+
+ it { is_expected.to eq('Sample data in redis') }
+ end
+
+ context 'when data_store is database' do
+ let(:data_store) { :db }
+ let(:raw_data) { 'Sample data in db' }
+
+ it { is_expected.to eq('Sample data in db') }
+ end
+
+ context 'when data_store is others' do
+ before do
+ build_trace_chunk.send(:write_attribute, :data_store, -1)
+ end
+
+ it { expect { subject }.to raise_error('Unsupported data store') }
+ end
+ end
+
+ describe '#set_data' do
+ subject { build_trace_chunk.send(:set_data, value) }
+
+ let(:value) { 'Sample data' }
+
+ context 'when value bytesize is bigger than CHUNK_SIZE' do
+ let(:value) { 'a' * (described_class::CHUNK_SIZE + 1) }
+
+ it { expect { subject }.to raise_error('too much data') }
+ end
+
+ context 'when data_store is redis' do
+ let(:data_store) { :redis }
+
+ it do
+ expect(build_trace_chunk.send(:redis_data)).to be_nil
+
+ subject
+
+ expect(build_trace_chunk.send(:redis_data)).to eq(value)
+ end
+
+ context 'when fullfilled chunk size' do
+ let(:value) { 'a' * described_class::CHUNK_SIZE }
+
+ it 'schedules stashing data' do
+ expect(Ci::BuildTraceChunkFlushWorker).to receive(:perform_async).once
+
+ subject
+ end
+ end
+ end
+
+ context 'when data_store is database' do
+ let(:data_store) { :db }
+
+ it 'sets data' do
+ expect(build_trace_chunk.raw_data).to be_nil
+
+ subject
+
+ expect(build_trace_chunk.raw_data).to eq(value)
+ expect(build_trace_chunk.persisted?).to be_truthy
+ end
+
+ context 'when raw_data is not changed' do
+ it 'does not execute UPDATE' do
+ expect(build_trace_chunk.raw_data).to be_nil
+ build_trace_chunk.save!
+
+ # First set
+ expect(ActiveRecord::QueryRecorder.new { subject }.count).to be > 0
+ expect(build_trace_chunk.raw_data).to eq(value)
+ expect(build_trace_chunk.persisted?).to be_truthy
+
+ # Second set
+ build_trace_chunk.reload
+ expect(ActiveRecord::QueryRecorder.new { subject }.count).to be(0)
+ end
+ end
+
+ context 'when fullfilled chunk size' do
+ it 'does not schedule stashing data' do
+ expect(Ci::BuildTraceChunkFlushWorker).not_to receive(:perform_async)
+
+ subject
+ end
+ end
+ end
+
+ context 'when data_store is others' do
+ before do
+ build_trace_chunk.send(:write_attribute, :data_store, -1)
+ end
+
+ it { expect { subject }.to raise_error('Unsupported data store') }
+ end
+ end
+
+ describe '#truncate' do
+ subject { build_trace_chunk.truncate(offset) }
+
+ shared_examples_for 'truncates' do
+ context 'when offset is negative' do
+ let(:offset) { -1 }
+
+ it { expect { subject }.to raise_error('Offset is out of range') }
+ end
+
+ context 'when offset is bigger than data size' do
+ let(:offset) { data.bytesize + 1 }
+
+ it { expect { subject }.to raise_error('Offset is out of range') }
+ end
+
+ context 'when offset is 10' do
+ let(:offset) { 10 }
+
+ it 'truncates' do
+ subject
+
+ expect(build_trace_chunk.data).to eq(data.byteslice(0, offset))
+ end
+ end
+ end
+
+ context 'when data_store is redis' do
+ let(:data_store) { :redis }
+ let(:data) { 'Sample data in redis' }
+
+ before do
+ build_trace_chunk.send(:redis_set_data, data)
+ end
+
+ it_behaves_like 'truncates'
+ end
+
+ context 'when data_store is database' do
+ let(:data_store) { :db }
+ let(:raw_data) { 'Sample data in db' }
+ let(:data) { raw_data }
+
+ it_behaves_like 'truncates'
+ end
+ end
+
+ describe '#append' do
+ subject { build_trace_chunk.append(new_data, offset) }
+
+ let(:new_data) { 'Sample new data' }
+ let(:offset) { 0 }
+ let(:total_data) { data + new_data }
+
+ shared_examples_for 'appends' do
+ context 'when offset is negative' do
+ let(:offset) { -1 }
+
+ it { expect { subject }.to raise_error('Offset is out of range') }
+ end
+
+ context 'when offset is bigger than data size' do
+ let(:offset) { data.bytesize + 1 }
+
+ it { expect { subject }.to raise_error('Offset is out of range') }
+ end
+
+ context 'when offset is bigger than data size' do
+ let(:new_data) { 'a' * (described_class::CHUNK_SIZE + 1) }
+
+ it { expect { subject }.to raise_error('Chunk size overflow') }
+ end
+
+ context 'when offset is EOF' do
+ let(:offset) { data.bytesize }
+
+ it 'appends' do
+ subject
+
+ expect(build_trace_chunk.data).to eq(total_data)
+ end
+ end
+
+ context 'when offset is 10' do
+ let(:offset) { 10 }
+
+ it 'appends' do
+ subject
+
+ expect(build_trace_chunk.data).to eq(data.byteslice(0, offset) + new_data)
+ end
+ end
+ end
+
+ context 'when data_store is redis' do
+ let(:data_store) { :redis }
+ let(:data) { 'Sample data in redis' }
+
+ before do
+ build_trace_chunk.send(:redis_set_data, data)
+ end
+
+ it_behaves_like 'appends'
+ end
+
+ context 'when data_store is database' do
+ let(:data_store) { :db }
+ let(:raw_data) { 'Sample data in db' }
+ let(:data) { raw_data }
+
+ it_behaves_like 'appends'
+ end
+ end
+
+ describe '#size' do
+ subject { build_trace_chunk.size }
+
+ context 'when data_store is redis' do
+ let(:data_store) { :redis }
+
+ context 'when data exists' do
+ let(:data) { 'Sample data in redis' }
+
+ before do
+ build_trace_chunk.send(:redis_set_data, data)
+ end
+
+ it { is_expected.to eq(data.bytesize) }
+ end
+
+ context 'when data exists' do
+ it { is_expected.to eq(0) }
+ end
+ end
+
+ context 'when data_store is database' do
+ let(:data_store) { :db }
+
+ context 'when data exists' do
+ let(:raw_data) { 'Sample data in db' }
+ let(:data) { raw_data }
+
+ it { is_expected.to eq(data.bytesize) }
+ end
+
+ context 'when data does not exist' do
+ it { is_expected.to eq(0) }
+ end
+ end
+ end
+
+ describe '#use_database!' do
+ subject { build_trace_chunk.use_database! }
+
+ context 'when data_store is redis' do
+ let(:data_store) { :redis }
+
+ context 'when data exists' do
+ let(:data) { 'Sample data in redis' }
+
+ before do
+ build_trace_chunk.send(:redis_set_data, data)
+ end
+
+ it 'stashes the data' do
+ expect(build_trace_chunk.data_store).to eq('redis')
+ expect(build_trace_chunk.send(:redis_data)).to eq(data)
+ expect(build_trace_chunk.raw_data).to be_nil
+
+ subject
+
+ expect(build_trace_chunk.data_store).to eq('db')
+ expect(build_trace_chunk.send(:redis_data)).to be_nil
+ expect(build_trace_chunk.raw_data).to eq(data)
+ end
+ end
+
+ context 'when data does not exist' do
+ it 'does not call UPDATE' do
+ expect(ActiveRecord::QueryRecorder.new { subject }.count).to eq(0)
+ end
+ end
+ end
+
+ context 'when data_store is database' do
+ let(:data_store) { :db }
+
+ it 'does not call UPDATE' do
+ expect(ActiveRecord::QueryRecorder.new { subject }.count).to eq(0)
+ end
+ end
+ end
+
+ describe 'ExclusiveLock' do
+ before do
+ allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain) { nil }
+ stub_const('Ci::BuildTraceChunk::WRITE_LOCK_RETRY', 1)
+ end
+
+ it 'raise an error' do
+ expect { build_trace_chunk.append('ABC', 0) }.to raise_error('Failed to obtain write lock')
+ end
+ end
+
+ describe 'deletes data in redis after a parent record destroyed' do
+ let(:project) { create(:project) }
+
+ before do
+ pipeline = create(:ci_pipeline, project: project)
+ create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project)
+ create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project)
+ create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project)
+ end
+
+ shared_examples_for 'deletes all build_trace_chunk and data in redis' do
+ it do
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.scan_each(match: "gitlab:ci:trace:*:chunks:*").to_a.size).to eq(3)
+ end
+
+ expect(described_class.count).to eq(3)
+
+ subject
+
+ expect(described_class.count).to eq(0)
+
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.scan_each(match: "gitlab:ci:trace:*:chunks:*").to_a.size).to eq(0)
+ end
+ end
+ end
+
+ context 'when traces are archived' do
+ let(:subject) do
+ project.builds.each do |build|
+ build.success!
+ end
+ end
+
+ it_behaves_like 'deletes all build_trace_chunk and data in redis'
+ end
+
+ context 'when project is destroyed' do
+ let(:subject) do
+ project.destroy!
+ end
+
+ it_behaves_like 'deletes all build_trace_chunk and data in redis'
+ end
+ end
+end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index dd94515b0a4..ddd66a6be87 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -173,7 +173,7 @@ describe Ci::Pipeline, :mailer do
it 'includes all predefined variables in a valid order' do
keys = subject.map { |variable| variable[:key] }
- expect(keys).to eq %w[CI_PIPELINE_ID CI_CONFIG_PATH CI_PIPELINE_SOURCE]
+ expect(keys).to eq %w[CI_PIPELINE_ID CI_CONFIG_PATH CI_PIPELINE_SOURCE CI_COMMIT_MESSAGE CI_COMMIT_TITLE CI_COMMIT_DESCRIPTION]
end
end
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index ab170e6351c..cc4d4e5e4ae 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -19,6 +19,63 @@ describe Ci::Runner do
end
end
end
+
+ context 'either_projects_or_group' do
+ let(:group) { create(:group) }
+
+ it 'disallows assigning to a group if already assigned to a group' do
+ runner = create(:ci_runner, groups: [group])
+
+ runner.groups << build(:group)
+
+ expect(runner).not_to be_valid
+ expect(runner.errors.full_messages).to eq ['Runner can only be assigned to one group']
+ end
+
+ it 'disallows assigning to a group if already assigned to a project' do
+ project = create(:project)
+ runner = create(:ci_runner, projects: [project])
+
+ runner.groups << build(:group)
+
+ expect(runner).not_to be_valid
+ expect(runner.errors.full_messages).to eq ['Runner can only be assigned either to projects or to a group']
+ end
+
+ it 'disallows assigning to a project if already assigned to a group' do
+ runner = create(:ci_runner, groups: [group])
+
+ runner.projects << build(:project)
+
+ expect(runner).not_to be_valid
+ expect(runner.errors.full_messages).to eq ['Runner can only be assigned either to projects or to a group']
+ end
+
+ it 'allows assigning to a group if not assigned to a group nor a project' do
+ runner = create(:ci_runner)
+
+ runner.groups << build(:group)
+
+ expect(runner).to be_valid
+ end
+
+ it 'allows assigning to a project if not assigned to a group nor a project' do
+ runner = create(:ci_runner)
+
+ runner.projects << build(:project)
+
+ expect(runner).to be_valid
+ end
+
+ it 'allows assigning to a project if already assigned to a project' do
+ project = create(:project)
+ runner = create(:ci_runner, projects: [project])
+
+ runner.projects << build(:project)
+
+ expect(runner).to be_valid
+ end
+ end
end
describe '#access_level' do
@@ -49,6 +106,80 @@ describe Ci::Runner do
end
end
+ describe '.shared' do
+ let(:group) { create(:group) }
+ let(:project) { create(:project) }
+
+ it 'returns the shared group runner' do
+ runner = create(:ci_runner, :shared, groups: [group])
+
+ expect(described_class.shared).to eq [runner]
+ end
+
+ it 'returns the shared project runner' do
+ runner = create(:ci_runner, :shared, projects: [project])
+
+ expect(described_class.shared).to eq [runner]
+ end
+ end
+
+ describe '.belonging_to_project' do
+ it 'returns the specific project runner' do
+ # own
+ specific_project = create(:project)
+ specific_runner = create(:ci_runner, :specific, projects: [specific_project])
+
+ # other
+ other_project = create(:project)
+ create(:ci_runner, :specific, projects: [other_project])
+
+ expect(described_class.belonging_to_project(specific_project.id)).to eq [specific_runner]
+ end
+ end
+
+ describe '.belonging_to_parent_group_of_project' do
+ let(:project) { create(:project, group: group) }
+ let(:group) { create(:group) }
+ let(:runner) { create(:ci_runner, :specific, groups: [group]) }
+ let!(:unrelated_group) { create(:group) }
+ let!(:unrelated_project) { create(:project, group: unrelated_group) }
+ let!(:unrelated_runner) { create(:ci_runner, :specific, groups: [unrelated_group]) }
+
+ it 'returns the specific group runner' do
+ expect(described_class.belonging_to_parent_group_of_project(project.id)).to contain_exactly(runner)
+ end
+
+ context 'with a parent group with a runner', :nested_groups do
+ let(:runner) { create(:ci_runner, :specific, groups: [parent_group]) }
+ let(:project) { create(:project, group: group) }
+ let(:group) { create(:group, parent: parent_group) }
+ let(:parent_group) { create(:group) }
+
+ it 'returns the group runner from the parent group' do
+ expect(described_class.belonging_to_parent_group_of_project(project.id)).to contain_exactly(runner)
+ end
+ end
+ end
+
+ describe '.owned_or_shared' do
+ it 'returns a globally shared, a project specific and a group specific runner' do
+ # group specific
+ group = create(:group)
+ project = create(:project, group: group)
+ group_runner = create(:ci_runner, :specific, groups: [group])
+
+ # project specific
+ project_runner = create(:ci_runner, :specific, projects: [project])
+
+ # globally shared
+ shared_runner = create(:ci_runner, :shared)
+
+ expect(described_class.owned_or_shared(project.id)).to contain_exactly(
+ group_runner, project_runner, shared_runner
+ )
+ end
+ end
+
describe '#display_name' do
it 'returns the description if it has a value' do
runner = FactoryBot.build(:ci_runner, description: 'Linux/Ruby-1.9.3-p448')
@@ -163,7 +294,9 @@ describe Ci::Runner do
describe '#can_pick?' do
let(:pipeline) { create(:ci_pipeline) }
let(:build) { create(:ci_build, pipeline: pipeline) }
- let(:runner) { create(:ci_runner) }
+ let(:runner) { create(:ci_runner, tag_list: tag_list, run_untagged: run_untagged) }
+ let(:tag_list) { [] }
+ let(:run_untagged) { true }
subject { runner.can_pick?(build) }
@@ -171,6 +304,13 @@ describe Ci::Runner do
build.project.runners << runner
end
+ context 'a different runner' do
+ it 'cannot handle builds' do
+ other_runner = create(:ci_runner)
+ expect(other_runner.can_pick?(build)).to be_falsey
+ end
+ end
+
context 'when runner does not have tags' do
it 'can handle builds without tags' do
expect(runner.can_pick?(build)).to be_truthy
@@ -184,9 +324,7 @@ describe Ci::Runner do
end
context 'when runner has tags' do
- before do
- runner.tag_list = %w(bb cc)
- end
+ let(:tag_list) { %w(bb cc) }
shared_examples 'tagged build picker' do
it 'can handle build with matching tags' do
@@ -211,9 +349,7 @@ describe Ci::Runner do
end
context 'when runner cannot pick untagged jobs' do
- before do
- runner.run_untagged = false
- end
+ let(:run_untagged) { false }
it 'cannot handle builds without tags' do
expect(runner.can_pick?(build)).to be_falsey
@@ -224,8 +360,9 @@ describe Ci::Runner do
end
context 'when runner is shared' do
+ let(:runner) { create(:ci_runner, :shared) }
+
before do
- runner.is_shared = true
build.project.runners = []
end
@@ -234,9 +371,7 @@ describe Ci::Runner do
end
context 'when runner is locked' do
- before do
- runner.locked = true
- end
+ let(:runner) { create(:ci_runner, :shared, locked: true) }
it 'can handle builds' do
expect(runner.can_pick?(build)).to be_truthy
@@ -260,6 +395,17 @@ describe Ci::Runner do
expect(runner.can_pick?(build)).to be_falsey
end
end
+
+ context 'when runner is assigned to a group' do
+ before do
+ build.project.runners = []
+ runner.groups << create(:group, projects: [build.project])
+ end
+
+ it 'can handle builds' do
+ expect(runner.can_pick?(build)).to be_truthy
+ end
+ end
end
context 'when access_level of runner is not_protected' do
@@ -583,4 +729,76 @@ describe Ci::Runner do
expect(described_class.search(runner.description.upcase)).to eq([runner])
end
end
+
+ describe '#assigned_to_group?' do
+ subject { runner.assigned_to_group? }
+
+ context 'when project runner' do
+ let(:runner) { create(:ci_runner, description: 'Project runner', projects: [project]) }
+ let(:project) { create(:project) }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when shared runner' do
+ let(:runner) { create(:ci_runner, :shared, description: 'Shared runner') }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when group runner' do
+ let(:group) { create(:group) }
+ let(:runner) { create(:ci_runner, description: 'Group runner', groups: [group]) }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ describe '#assigned_to_project?' do
+ subject { runner.assigned_to_project? }
+
+ context 'when group runner' do
+ let(:runner) { create(:ci_runner, description: 'Group runner', groups: [group]) }
+ let(:group) { create(:group) }
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when shared runner' do
+ let(:runner) { create(:ci_runner, :shared, description: 'Shared runner') }
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when project runner' do
+ let(:runner) { create(:ci_runner, description: 'Group runner', projects: [project]) }
+ let(:project) { create(:project) }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ describe '#pick_build!' do
+ context 'runner can pick the build' do
+ it 'calls #tick_runner_queue' do
+ ci_build = build(:ci_build)
+ runner = build(:ci_runner)
+ allow(runner).to receive(:can_pick?).with(ci_build).and_return(true)
+
+ expect(runner).to receive(:tick_runner_queue)
+
+ runner.pick_build!(ci_build)
+ end
+ end
+
+ context 'runner cannot pick the build' do
+ it 'does not call #tick_runner_queue' do
+ ci_build = build(:ci_build)
+ runner = build(:ci_runner)
+ allow(runner).to receive(:can_pick?).with(ci_build).and_return(false)
+
+ expect(runner).not_to receive(:tick_runner_queue)
+
+ runner.pick_build!(ci_build)
+ end
+ end
+ end
end
diff --git a/spec/models/concerns/sha_attribute_spec.rb b/spec/models/concerns/sha_attribute_spec.rb
index 21893e0cbaa..592feddf1dc 100644
--- a/spec/models/concerns/sha_attribute_spec.rb
+++ b/spec/models/concerns/sha_attribute_spec.rb
@@ -13,33 +13,74 @@ describe ShaAttribute do
end
describe '#sha_attribute' do
- context 'when the table exists' do
+ context 'when in non-production' do
before do
- allow(model).to receive(:table_exists?).and_return(true)
+ allow(Rails.env).to receive(:production?).and_return(false)
end
- it 'defines a SHA attribute for a binary column' do
- expect(model).to receive(:attribute)
- .with(:sha1, an_instance_of(Gitlab::Database::ShaAttribute))
+ context 'when the table exists' do
+ before do
+ allow(model).to receive(:table_exists?).and_return(true)
+ end
- model.sha_attribute(:sha1)
+ it 'defines a SHA attribute for a binary column' do
+ expect(model).to receive(:attribute)
+ .with(:sha1, an_instance_of(Gitlab::Database::ShaAttribute))
+
+ model.sha_attribute(:sha1)
+ end
+
+ it 'raises ArgumentError when the column type is not :binary' do
+ expect { model.sha_attribute(:name) }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'when the table does not exist' do
+ it 'allows the attribute to be added' do
+ allow(model).to receive(:table_exists?).and_return(false)
+
+ expect(model).not_to receive(:columns)
+ expect(model).to receive(:attribute)
+
+ model.sha_attribute(:name)
+ end
end
- it 'raises ArgumentError when the column type is not :binary' do
- expect { model.sha_attribute(:name) }.to raise_error(ArgumentError)
+ context 'when the column does not exist' do
+ it 'raises ArgumentError' do
+ allow(model).to receive(:table_exists?).and_return(true)
+
+ expect(model).to receive(:columns)
+ expect(model).not_to receive(:attribute)
+
+ expect { model.sha_attribute(:no_name) }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'when other execeptions are raised' do
+ it 'logs and re-rasises the error' do
+ allow(model).to receive(:table_exists?).and_raise(ActiveRecord::NoDatabaseError.new('does not exist'))
+
+ expect(model).not_to receive(:columns)
+ expect(model).not_to receive(:attribute)
+ expect(Gitlab::AppLogger).to receive(:error)
+
+ expect { model.sha_attribute(:name) }.to raise_error(ActiveRecord::NoDatabaseError)
+ end
end
end
- context 'when the table does not exist' do
+ context 'when in production' do
before do
- allow(model).to receive(:table_exists?).and_return(false)
+ allow(Rails.env).to receive(:production?).and_return(true)
end
- it 'does nothing' do
+ it 'defines a SHA attribute' do
+ expect(model).not_to receive(:table_exists?)
expect(model).not_to receive(:columns)
- expect(model).not_to receive(:attribute)
+ expect(model).to receive(:attribute).with(:sha1, an_instance_of(Gitlab::Database::ShaAttribute))
- model.sha_attribute(:name)
+ model.sha_attribute(:sha1)
end
end
end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index d620943693c..0907d28d33b 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -424,6 +424,95 @@ describe Group do
end
end
+ describe '#direct_and_indirect_members', :nested_groups do
+ let!(:group) { create(:group, :nested) }
+ let!(:sub_group) { create(:group, parent: group) }
+ let!(:master) { group.parent.add_user(create(:user), GroupMember::MASTER) }
+ let!(:developer) { group.add_user(create(:user), GroupMember::DEVELOPER) }
+ let!(:other_developer) { group.add_user(create(:user), GroupMember::DEVELOPER) }
+
+ it 'returns parents members' do
+ expect(group.direct_and_indirect_members).to include(developer)
+ expect(group.direct_and_indirect_members).to include(master)
+ end
+
+ it 'returns descendant members' do
+ expect(group.direct_and_indirect_members).to include(other_developer)
+ end
+ end
+
+ describe '#users_with_descendants', :nested_groups do
+ let(:user_a) { create(:user) }
+ let(:user_b) { create(:user) }
+
+ let(:group) { create(:group) }
+ let(:nested_group) { create(:group, parent: group) }
+ let(:deep_nested_group) { create(:group, parent: nested_group) }
+
+ it 'returns member users on every nest level without duplication' do
+ group.add_developer(user_a)
+ nested_group.add_developer(user_b)
+ deep_nested_group.add_developer(user_a)
+
+ expect(group.users_with_descendants).to contain_exactly(user_a, user_b)
+ expect(nested_group.users_with_descendants).to contain_exactly(user_a, user_b)
+ expect(deep_nested_group.users_with_descendants).to contain_exactly(user_a)
+ end
+ end
+
+ describe '#direct_and_indirect_users', :nested_groups do
+ let(:user_a) { create(:user) }
+ let(:user_b) { create(:user) }
+ let(:user_c) { create(:user) }
+ let(:user_d) { create(:user) }
+
+ let(:group) { create(:group) }
+ let(:nested_group) { create(:group, parent: group) }
+ let(:deep_nested_group) { create(:group, parent: nested_group) }
+ let(:project) { create(:project, namespace: group) }
+
+ before do
+ group.add_developer(user_a)
+ group.add_developer(user_c)
+ nested_group.add_developer(user_b)
+ deep_nested_group.add_developer(user_a)
+ project.add_developer(user_d)
+ end
+
+ it 'returns member users on every nest level without duplication' do
+ expect(group.direct_and_indirect_users).to contain_exactly(user_a, user_b, user_c, user_d)
+ expect(nested_group.direct_and_indirect_users).to contain_exactly(user_a, user_b, user_c)
+ expect(deep_nested_group.direct_and_indirect_users).to contain_exactly(user_a, user_b, user_c)
+ end
+
+ it 'does not return members of projects belonging to ancestor groups' do
+ expect(nested_group.direct_and_indirect_users).not_to include(user_d)
+ end
+ end
+
+ describe '#project_users_with_descendants', :nested_groups do
+ let(:user_a) { create(:user) }
+ let(:user_b) { create(:user) }
+ let(:user_c) { create(:user) }
+
+ let(:group) { create(:group) }
+ let(:nested_group) { create(:group, parent: group) }
+ let(:deep_nested_group) { create(:group, parent: nested_group) }
+ let(:project_a) { create(:project, namespace: group) }
+ let(:project_b) { create(:project, namespace: nested_group) }
+ let(:project_c) { create(:project, namespace: deep_nested_group) }
+
+ it 'returns members of all projects in group and subgroups' do
+ project_a.add_developer(user_a)
+ project_b.add_developer(user_b)
+ project_c.add_developer(user_c)
+
+ expect(group.project_users_with_descendants).to contain_exactly(user_a, user_b, user_c)
+ expect(nested_group.project_users_with_descendants).to contain_exactly(user_b, user_c)
+ expect(deep_nested_group.project_users_with_descendants).to contain_exactly(user_c)
+ end
+ end
+
describe '#user_ids_for_project_authorizations' do
it 'returns the user IDs for which to refresh authorizations' do
master = create(:user)
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index becb146422e..04379e7d2c3 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -1069,6 +1069,22 @@ describe MergeRequest do
end
end
+ describe '#short_merge_commit_sha' do
+ let(:merge_request) { build_stubbed(:merge_request) }
+
+ it 'returns short id when there is a merge_commit_sha' do
+ merge_request.merge_commit_sha = 'f7ce827c314c9340b075657fd61c789fb01cf74d'
+
+ expect(merge_request.short_merge_commit_sha).to eq('f7ce827c')
+ end
+
+ it 'returns nil when there is no merge_commit_sha' do
+ merge_request.merge_commit_sha = nil
+
+ expect(merge_request.short_merge_commit_sha).to be_nil
+ end
+ end
+
describe '#can_be_reverted?' do
context 'when there is no merge_commit for the MR' do
before do
@@ -1213,7 +1229,7 @@ describe MergeRequest do
it 'enqueues MergeWorker job and updates merge_jid' do
merge_request = create(:merge_request)
user_id = double(:user_id)
- params = double(:params)
+ params = {}
merge_jid = 'hash-123'
expect(MergeWorker).to receive(:perform_async).with(merge_request.id, user_id, params) do
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 506057dce87..6f702d8d95e 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -399,6 +399,21 @@ describe Namespace do
end
end
+ describe '#self_and_hierarchy', :nested_groups do
+ let!(:group) { create(:group, path: 'git_lab') }
+ let!(:nested_group) { create(:group, parent: group) }
+ let!(:deep_nested_group) { create(:group, parent: nested_group) }
+ let!(:very_deep_nested_group) { create(:group, parent: deep_nested_group) }
+ let!(:another_group) { create(:group, path: 'gitllab') }
+ let!(:another_group_nested) { create(:group, path: 'foo', parent: another_group) }
+
+ it 'returns the correct tree' do
+ expect(group.self_and_hierarchy).to contain_exactly(group, nested_group, deep_nested_group, very_deep_nested_group)
+ expect(nested_group.self_and_hierarchy).to contain_exactly(group, nested_group, deep_nested_group, very_deep_nested_group)
+ expect(very_deep_nested_group.self_and_hierarchy).to contain_exactly(group, nested_group, deep_nested_group, very_deep_nested_group)
+ end
+ end
+
describe '#ancestors', :nested_groups do
let(:group) { create(:group) }
let(:nested_group) { create(:group, parent: group) }
diff --git a/spec/models/project_import_state_spec.rb b/spec/models/project_import_state_spec.rb
new file mode 100644
index 00000000000..f7033b28c76
--- /dev/null
+++ b/spec/models/project_import_state_spec.rb
@@ -0,0 +1,13 @@
+require 'rails_helper'
+
+describe ProjectImportState, type: :model do
+ subject { create(:import_state) }
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:project) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:project) }
+ end
+end
diff --git a/spec/models/project_services/microsoft_teams_service_spec.rb b/spec/models/project_services/microsoft_teams_service_spec.rb
index 733086e258f..8d9ee96227f 100644
--- a/spec/models/project_services/microsoft_teams_service_spec.rb
+++ b/spec/models/project_services/microsoft_teams_service_spec.rb
@@ -30,7 +30,7 @@ describe MicrosoftTeamsService do
describe "#execute" do
let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
+ set(:project) { create(:project, :repository, :wiki_repo) }
before do
allow(chat_service).to receive_messages(
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 87731f05b7d..41622fbbb6f 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -63,7 +63,6 @@ describe Project do
it { is_expected.to have_many(:build_trace_section_names)}
it { is_expected.to have_many(:runner_projects) }
it { is_expected.to have_many(:runners) }
- it { is_expected.to have_many(:active_runners) }
it { is_expected.to have_many(:variables) }
it { is_expected.to have_many(:triggers) }
it { is_expected.to have_many(:pages_domains) }
@@ -1147,45 +1146,106 @@ describe Project do
end
end
- describe '#any_runners' do
- let(:project) { create(:project, shared_runners_enabled: shared_runners_enabled) }
- let(:specific_runner) { create(:ci_runner) }
- let(:shared_runner) { create(:ci_runner, :shared) }
+ describe '#any_runners?' do
+ context 'shared runners' do
+ let(:project) { create :project, shared_runners_enabled: shared_runners_enabled }
+ let(:specific_runner) { create :ci_runner }
+ let(:shared_runner) { create :ci_runner, :shared }
- context 'for shared runners disabled' do
- let(:shared_runners_enabled) { false }
+ context 'for shared runners disabled' do
+ let(:shared_runners_enabled) { false }
- it 'has no runners available' do
- expect(project.any_runners?).to be_falsey
- end
+ it 'has no runners available' do
+ expect(project.any_runners?).to be_falsey
+ end
- it 'has a specific runner' do
- project.runners << specific_runner
- expect(project.any_runners?).to be_truthy
- end
+ it 'has a specific runner' do
+ project.runners << specific_runner
+
+ expect(project.any_runners?).to be_truthy
+ end
+
+ it 'has a shared runner, but they are prohibited to use' do
+ shared_runner
+
+ expect(project.any_runners?).to be_falsey
+ end
+
+ it 'checks the presence of specific runner' do
+ project.runners << specific_runner
+
+ expect(project.any_runners? { |runner| runner == specific_runner }).to be_truthy
+ end
- it 'has a shared runner, but they are prohibited to use' do
- shared_runner
- expect(project.any_runners?).to be_falsey
+ it 'returns false if match cannot be found' do
+ project.runners << specific_runner
+
+ expect(project.any_runners? { false }).to be_falsey
+ end
end
- it 'checks the presence of specific runner' do
- project.runners << specific_runner
- expect(project.any_runners? { |runner| runner == specific_runner }).to be_truthy
+ context 'for shared runners enabled' do
+ let(:shared_runners_enabled) { true }
+
+ it 'has a shared runner' do
+ shared_runner
+
+ expect(project.any_runners?).to be_truthy
+ end
+
+ it 'checks the presence of shared runner' do
+ shared_runner
+
+ expect(project.any_runners? { |runner| runner == shared_runner }).to be_truthy
+ end
+
+ it 'returns false if match cannot be found' do
+ shared_runner
+
+ expect(project.any_runners? { false }).to be_falsey
+ end
end
end
- context 'for shared runners enabled' do
- let(:shared_runners_enabled) { true }
+ context 'group runners' do
+ let(:project) { create :project, group_runners_enabled: group_runners_enabled }
+ let(:group) { create :group, projects: [project] }
+ let(:group_runner) { create :ci_runner, groups: [group] }
- it 'has a shared runner' do
- shared_runner
- expect(project.any_runners?).to be_truthy
+ context 'for group runners disabled' do
+ let(:group_runners_enabled) { false }
+
+ it 'has no runners available' do
+ expect(project.any_runners?).to be_falsey
+ end
+
+ it 'has a group runner, but they are prohibited to use' do
+ group_runner
+
+ expect(project.any_runners?).to be_falsey
+ end
end
- it 'checks the presence of shared runner' do
- shared_runner
- expect(project.any_runners? { |runner| runner == shared_runner }).to be_truthy
+ context 'for group runners enabled' do
+ let(:group_runners_enabled) { true }
+
+ it 'has a group runner' do
+ group_runner
+
+ expect(project.any_runners?).to be_truthy
+ end
+
+ it 'checks the presence of group runner' do
+ group_runner
+
+ expect(project.any_runners? { |runner| runner == group_runner }).to be_truthy
+ end
+
+ it 'returns false if match cannot be found' do
+ group_runner
+
+ expect(project.any_runners? { false }).to be_falsey
+ end
end
end
end
@@ -1643,7 +1703,8 @@ describe Project do
it 'resets project import_error' do
error_message = 'Some error'
- mirror = create(:project_empty_repo, :import_started, import_error: error_message)
+ mirror = create(:project_empty_repo, :import_started)
+ mirror.import_state.update_attributes(last_error: error_message)
expect { mirror.import_finish }.to change { mirror.import_error }.from(error_message).to(nil)
end
@@ -1791,6 +1852,85 @@ describe Project do
it { expect(project.gitea_import?).to be true }
end
+ describe '#has_remote_mirror?' do
+ let(:project) { create(:project, :remote_mirror, :import_started) }
+ subject { project.has_remote_mirror? }
+
+ before do
+ allow_any_instance_of(RemoteMirror).to receive(:refresh_remote)
+ end
+
+ it 'returns true when a remote mirror is enabled' do
+ is_expected.to be_truthy
+ end
+
+ it 'returns false when remote mirror is disabled' do
+ project.remote_mirrors.first.update_attributes(enabled: false)
+
+ is_expected.to be_falsy
+ end
+ end
+
+ describe '#update_remote_mirrors' do
+ let(:project) { create(:project, :remote_mirror, :import_started) }
+ delegate :update_remote_mirrors, to: :project
+
+ before do
+ allow_any_instance_of(RemoteMirror).to receive(:refresh_remote)
+ end
+
+ it 'syncs enabled remote mirror' do
+ expect_any_instance_of(RemoteMirror).to receive(:sync)
+
+ update_remote_mirrors
+ end
+
+ # TODO: study if remote_mirror_available_overridden is still a necessary attribute considering that
+ # it is no longer under any license
+ it 'does nothing when remote mirror is disabled globally and not overridden' do
+ stub_application_setting(mirror_available: false)
+ project.remote_mirror_available_overridden = false
+
+ expect_any_instance_of(RemoteMirror).not_to receive(:sync)
+
+ update_remote_mirrors
+ end
+
+ it 'does not sync disabled remote mirrors' do
+ project.remote_mirrors.first.update_attributes(enabled: false)
+
+ expect_any_instance_of(RemoteMirror).not_to receive(:sync)
+
+ update_remote_mirrors
+ end
+ end
+
+ describe '#remote_mirror_available?' do
+ let(:project) { create(:project) }
+
+ context 'when remote mirror global setting is enabled' do
+ it 'returns true' do
+ expect(project.remote_mirror_available?).to be(true)
+ end
+ end
+
+ context 'when remote mirror global setting is disabled' do
+ before do
+ stub_application_setting(mirror_available: false)
+ end
+
+ it 'returns true when overridden' do
+ project.remote_mirror_available_overridden = true
+
+ expect(project.remote_mirror_available?).to be(true)
+ end
+
+ it 'returns false when not overridden' do
+ expect(project.remote_mirror_available?).to be(false)
+ end
+ end
+ end
+
describe '#ancestors_upto', :nested_groups do
let(:parent) { create(:group) }
let(:child) { create(:group, parent: parent) }
@@ -3287,7 +3427,8 @@ describe Project do
context 'with an import JID' do
it 'unsets the import JID' do
- project = create(:project, import_jid: '123')
+ project = create(:project)
+ create(:import_state, project: project, jid: '123')
expect(Gitlab::SidekiqStatus)
.to receive(:unset)
@@ -3549,6 +3690,18 @@ describe Project do
end
end
+ describe '#toggle_ci_cd_settings!' do
+ it 'toggles the value on #settings' do
+ project = create(:project, group_runners_enabled: false)
+
+ expect(project.group_runners_enabled).to be false
+
+ project.toggle_ci_cd_settings!(:group_runners_enabled)
+
+ expect(project.group_runners_enabled).to be true
+ end
+ end
+
describe '#gitlab_deploy_token' do
let(:project) { create(:project) }
diff --git a/spec/models/project_wiki_spec.rb b/spec/models/project_wiki_spec.rb
index cbe7d111fcd..d6c4031329d 100644
--- a/spec/models/project_wiki_spec.rb
+++ b/spec/models/project_wiki_spec.rb
@@ -1,7 +1,7 @@
require "spec_helper"
describe ProjectWiki do
- let(:project) { create(:project) }
+ let(:project) { create(:project, :wiki_repo) }
let(:repository) { project.repository }
let(:user) { project.owner }
let(:gitlab_shell) { Gitlab::Shell.new }
@@ -328,6 +328,8 @@ describe ProjectWiki do
end
describe '#create_repo!' do
+ let(:project) { create(:project) }
+
it 'creates a repository' do
expect(raw_repository.exists?).to eq(false)
expect(subject.repository).to receive(:after_create)
@@ -339,6 +341,8 @@ describe ProjectWiki do
end
describe '#ensure_repository' do
+ let(:project) { create(:project) }
+
it 'creates the repository if it not exist' do
expect(raw_repository.exists?).to eq(false)
diff --git a/spec/models/remote_mirror_spec.rb b/spec/models/remote_mirror_spec.rb
new file mode 100644
index 00000000000..a80800c6c92
--- /dev/null
+++ b/spec/models/remote_mirror_spec.rb
@@ -0,0 +1,267 @@
+require 'rails_helper'
+
+describe RemoteMirror do
+ describe 'URL validation' do
+ context 'with a valid URL' do
+ it 'should be valid' do
+ remote_mirror = build(:remote_mirror)
+ expect(remote_mirror).to be_valid
+ end
+ end
+
+ context 'with an invalid URL' do
+ it 'should not be valid' do
+ remote_mirror = build(:remote_mirror, url: 'ftp://invalid.invalid')
+ expect(remote_mirror).not_to be_valid
+ expect(remote_mirror.errors[:url].size).to eq(2)
+ end
+ end
+ end
+
+ describe 'encrypting credentials' do
+ context 'when setting URL for a first time' do
+ it 'stores the URL without credentials' do
+ mirror = create_mirror(url: 'http://foo:bar@test.com')
+
+ expect(mirror.read_attribute(:url)).to eq('http://test.com')
+ end
+
+ it 'stores the credentials on a separate field' do
+ mirror = create_mirror(url: 'http://foo:bar@test.com')
+
+ expect(mirror.credentials).to eq({ user: 'foo', password: 'bar' })
+ end
+
+ it 'handles credentials with large content' do
+ mirror = create_mirror(url: 'http://bxnhm8dote33ct932r3xavslj81wxmr7o8yux8do10oozckkif:9ne7fuvjn40qjt35dgt8v86q9m9g9essryxj76sumg2ccl2fg26c0krtz2gzfpyq4hf22h328uhq6npuiq6h53tpagtsj7vsrz75@test.com')
+
+ expect(mirror.credentials).to eq({
+ user: 'bxnhm8dote33ct932r3xavslj81wxmr7o8yux8do10oozckkif',
+ password: '9ne7fuvjn40qjt35dgt8v86q9m9g9essryxj76sumg2ccl2fg26c0krtz2gzfpyq4hf22h328uhq6npuiq6h53tpagtsj7vsrz75'
+ })
+ end
+ end
+
+ context 'when updating the URL' do
+ it 'allows a new URL without credentials' do
+ mirror = create_mirror(url: 'http://foo:bar@test.com')
+
+ mirror.update_attribute(:url, 'http://test.com')
+
+ expect(mirror.url).to eq('http://test.com')
+ expect(mirror.credentials).to eq({ user: nil, password: nil })
+ end
+
+ it 'allows a new URL with credentials' do
+ mirror = create_mirror(url: 'http://test.com')
+
+ mirror.update_attribute(:url, 'http://foo:bar@test.com')
+
+ expect(mirror.url).to eq('http://foo:bar@test.com')
+ expect(mirror.credentials).to eq({ user: 'foo', password: 'bar' })
+ end
+
+ it 'updates the remote config if credentials changed' do
+ mirror = create_mirror(url: 'http://foo:bar@test.com')
+ repo = mirror.project.repository
+
+ mirror.update_attribute(:url, 'http://foo:baz@test.com')
+
+ config = repo.raw_repository.rugged.config
+ expect(config["remote.#{mirror.remote_name}.url"]).to eq('http://foo:baz@test.com')
+ end
+
+ it 'removes previous remote' do
+ mirror = create_mirror(url: 'http://foo:bar@test.com')
+
+ expect(RepositoryRemoveRemoteWorker).to receive(:perform_async).with(mirror.project.id, mirror.remote_name).and_call_original
+
+ mirror.update_attributes(url: 'http://test.com')
+ end
+ end
+ end
+
+ describe '#remote_name' do
+ context 'when remote name is persisted in the database' do
+ it 'returns remote name with random value' do
+ allow(SecureRandom).to receive(:hex).and_return('secret')
+
+ remote_mirror = create(:remote_mirror)
+
+ expect(remote_mirror.remote_name).to eq("remote_mirror_secret")
+ end
+ end
+
+ context 'when remote name is not persisted in the database' do
+ it 'returns remote name with remote mirror id' do
+ remote_mirror = create(:remote_mirror)
+ remote_mirror.remote_name = nil
+
+ expect(remote_mirror.remote_name).to eq("remote_mirror_#{remote_mirror.id}")
+ end
+ end
+
+ context 'when remote is not persisted in the database' do
+ it 'returns nil' do
+ remote_mirror = build(:remote_mirror, remote_name: nil)
+
+ expect(remote_mirror.remote_name).to be_nil
+ end
+ end
+ end
+
+ describe '#safe_url' do
+ context 'when URL contains credentials' do
+ it 'masks the credentials' do
+ mirror = create_mirror(url: 'http://foo:bar@test.com')
+
+ expect(mirror.safe_url).to eq('http://*****:*****@test.com')
+ end
+ end
+
+ context 'when URL does not contain credentials' do
+ it 'shows the full URL' do
+ mirror = create_mirror(url: 'http://test.com')
+
+ expect(mirror.safe_url).to eq('http://test.com')
+ end
+ end
+ end
+
+ context 'when remote mirror gets destroyed' do
+ it 'removes remote' do
+ mirror = create_mirror(url: 'http://foo:bar@test.com')
+
+ expect(RepositoryRemoveRemoteWorker).to receive(:perform_async).with(mirror.project.id, mirror.remote_name).and_call_original
+
+ mirror.destroy!
+ end
+ end
+
+ context 'stuck mirrors' do
+ it 'includes mirrors stuck in started with no last_update_at set' do
+ mirror = create_mirror(url: 'http://cantbeblank',
+ update_status: 'started',
+ last_update_at: nil,
+ updated_at: 25.hours.ago)
+
+ expect(described_class.stuck.last).to eq(mirror)
+ end
+ end
+
+ context '#sync' do
+ let(:remote_mirror) { create(:project, :repository, :remote_mirror).remote_mirrors.first }
+
+ around do |example|
+ Timecop.freeze { example.run }
+ end
+
+ context 'with remote mirroring disabled' do
+ it 'returns nil' do
+ remote_mirror.update_attributes(enabled: false)
+
+ expect(remote_mirror.sync).to be_nil
+ end
+ end
+
+ context 'with remote mirroring enabled' do
+ context 'with only protected branches enabled' do
+ context 'when it did not update in the last minute' do
+ it 'schedules a RepositoryUpdateRemoteMirrorWorker to run now' do
+ expect(RepositoryUpdateRemoteMirrorWorker).to receive(:perform_async).with(remote_mirror.id, Time.now)
+
+ remote_mirror.sync
+ end
+ end
+
+ context 'when it did update in the last minute' do
+ it 'schedules a RepositoryUpdateRemoteMirrorWorker to run in the next minute' do
+ remote_mirror.last_update_started_at = Time.now - 30.seconds
+
+ expect(RepositoryUpdateRemoteMirrorWorker).to receive(:perform_in).with(RemoteMirror::PROTECTED_BACKOFF_DELAY, remote_mirror.id, Time.now)
+
+ remote_mirror.sync
+ end
+ end
+ end
+
+ context 'with only protected branches disabled' do
+ before do
+ remote_mirror.only_protected_branches = false
+ end
+
+ context 'when it did not update in the last 5 minutes' do
+ it 'schedules a RepositoryUpdateRemoteMirrorWorker to run now' do
+ expect(RepositoryUpdateRemoteMirrorWorker).to receive(:perform_async).with(remote_mirror.id, Time.now)
+
+ remote_mirror.sync
+ end
+ end
+
+ context 'when it did update within the last 5 minutes' do
+ it 'schedules a RepositoryUpdateRemoteMirrorWorker to run in the next 5 minutes' do
+ remote_mirror.last_update_started_at = Time.now - 30.seconds
+
+ expect(RepositoryUpdateRemoteMirrorWorker).to receive(:perform_in).with(RemoteMirror::UNPROTECTED_BACKOFF_DELAY, remote_mirror.id, Time.now)
+
+ remote_mirror.sync
+ end
+ end
+ end
+ end
+ end
+
+ context '#updated_since?' do
+ let(:remote_mirror) { create(:project, :repository, :remote_mirror).remote_mirrors.first }
+ let(:timestamp) { Time.now - 5.minutes }
+
+ around do |example|
+ Timecop.freeze { example.run }
+ end
+
+ before do
+ remote_mirror.update_attributes(last_update_started_at: Time.now)
+ end
+
+ context 'when remote mirror does not have status failed' do
+ it 'returns true when last update started after the timestamp' do
+ expect(remote_mirror.updated_since?(timestamp)).to be true
+ end
+
+ it 'returns false when last update started before the timestamp' do
+ expect(remote_mirror.updated_since?(Time.now + 5.minutes)).to be false
+ end
+ end
+
+ context 'when remote mirror has status failed' do
+ it 'returns false when last update started after the timestamp' do
+ remote_mirror.update_attributes(update_status: 'failed')
+
+ expect(remote_mirror.updated_since?(timestamp)).to be false
+ end
+ end
+ end
+
+ context 'no project' do
+ it 'includes mirror with a project in pending_delete' do
+ mirror = create_mirror(url: 'http://cantbeblank',
+ update_status: 'finished',
+ enabled: true,
+ last_update_at: nil,
+ updated_at: 25.hours.ago)
+ project = mirror.project
+ project.pending_delete = true
+ project.save
+ mirror.reload
+
+ expect(mirror.sync).to be_nil
+ expect(mirror.valid?).to be_truthy
+ expect(mirror.update_status).to eq('finished')
+ end
+ end
+
+ def create_mirror(params)
+ project = FactoryBot.create(:project, :repository)
+ project.remote_mirrors.create!(params)
+ end
+end
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 630b9e0519f..4b736b02b7d 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -758,6 +758,38 @@ describe Repository do
end
end
+ describe '#async_remove_remote' do
+ before do
+ masterrev = repository.find_branch('master').dereferenced_target
+ create_remote_branch('joe', 'remote_branch', masterrev)
+ end
+
+ context 'when worker is scheduled successfully' do
+ before do
+ masterrev = repository.find_branch('master').dereferenced_target
+ create_remote_branch('remote_name', 'remote_branch', masterrev)
+
+ allow(RepositoryRemoveRemoteWorker).to receive(:perform_async).and_return('1234')
+ end
+
+ it 'returns job_id' do
+ expect(repository.async_remove_remote('joe')).to eq('1234')
+ end
+ end
+
+ context 'when worker does not schedule successfully' do
+ before do
+ allow(RepositoryRemoveRemoteWorker).to receive(:perform_async).and_return(nil)
+ end
+
+ it 'returns nil' do
+ expect(Rails.logger).to receive(:info).with("Remove remote job failed to create for #{project.id} with remote name joe.")
+
+ expect(repository.async_remove_remote('joe')).to be_nil
+ end
+ end
+ end
+
describe '#fetch_ref' do
let(:broken_repository) { create(:project, :broken_storage).repository }
@@ -2338,6 +2370,11 @@ describe Repository do
end
end
+ def create_remote_branch(remote_name, branch_name, target)
+ rugged = repository.rugged
+ rugged.references.create("refs/remotes/#{remote_name}/#{branch_name}", target.id)
+ end
+
describe '#ancestor?' do
let(:commit) { repository.commit }
let(:ancestor) { commit.parents.first }
diff --git a/spec/models/term_agreement_spec.rb b/spec/models/term_agreement_spec.rb
new file mode 100644
index 00000000000..a59bf119692
--- /dev/null
+++ b/spec/models/term_agreement_spec.rb
@@ -0,0 +1,8 @@
+require 'spec_helper'
+
+describe TermAgreement do
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:term) }
+ it { is_expected.to validate_presence_of(:user) }
+ end
+end
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index 90b7e7715a8..1c765ceac2f 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -1,7 +1,7 @@
require "spec_helper"
describe WikiPage do
- let(:project) { create(:project) }
+ let(:project) { create(:project, :wiki_repo) }
let(:user) { project.owner }
let(:wiki) { ProjectWiki.new(project, user) }