Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'spec/models')
-rw-r--r--spec/models/analytics/cycle_analytics/project_stage_spec.rb9
-rw-r--r--spec/models/application_setting_spec.rb39
-rw-r--r--spec/models/award_emoji_spec.rb9
-rw-r--r--spec/models/bulk_imports/entity_spec.rb51
-rw-r--r--spec/models/bulk_imports/export_status_spec.rb30
-rw-r--r--spec/models/bulk_imports/failure_spec.rb36
-rw-r--r--spec/models/ci/bridge_spec.rb6
-rw-r--r--spec/models/ci/build_metadata_spec.rb57
-rw-r--r--spec/models/ci/build_spec.rb152
-rw-r--r--spec/models/ci/build_trace_chunks/redis_spec.rb8
-rw-r--r--spec/models/ci/build_trace_spec.rb7
-rw-r--r--spec/models/ci/daily_build_group_report_result_spec.rb37
-rw-r--r--spec/models/ci/job_token/project_scope_link_spec.rb11
-rw-r--r--spec/models/ci/job_token/scope_spec.rb4
-rw-r--r--spec/models/ci/pipeline_metadata_spec.rb14
-rw-r--r--spec/models/ci/pipeline_spec.rb113
-rw-r--r--spec/models/ci/processable_spec.rb2
-rw-r--r--spec/models/ci/resource_group_spec.rb6
-rw-r--r--spec/models/ci/runner_spec.rb184
-rw-r--r--spec/models/ci/secure_file_spec.rb66
-rw-r--r--spec/models/ci/unit_test_spec.rb40
-rw-r--r--spec/models/ci/variable_spec.rb2
-rw-r--r--spec/models/clusters/agents/implicit_authorization_spec.rb2
-rw-r--r--spec/models/clusters/applications/cert_manager_spec.rb52
-rw-r--r--spec/models/clusters/platforms/kubernetes_spec.rb67
-rw-r--r--spec/models/commit_collection_spec.rb5
-rw-r--r--spec/models/compare_spec.rb15
-rw-r--r--spec/models/concerns/approvable_spec.rb8
-rw-r--r--spec/models/concerns/atomic_internal_id_spec.rb13
-rw-r--r--spec/models/concerns/cascading_namespace_setting_attribute_spec.rb347
-rw-r--r--spec/models/concerns/ci/partitionable_spec.rb26
-rw-r--r--spec/models/concerns/counter_attribute_spec.rb4
-rw-r--r--spec/models/concerns/id_in_ordered_spec.rb7
-rw-r--r--spec/models/concerns/issuable_spec.rb18
-rw-r--r--spec/models/concerns/mentionable_spec.rb2
-rw-r--r--spec/models/concerns/noteable_spec.rb100
-rw-r--r--spec/models/concerns/participable_spec.rb3
-rw-r--r--spec/models/concerns/prometheus_adapter_spec.rb2
-rw-r--r--spec/models/concerns/routable_spec.rb6
-rw-r--r--spec/models/concerns/token_authenticatable_spec.rb100
-rw-r--r--spec/models/container_repository_spec.rb2
-rw-r--r--spec/models/deployment_spec.rb15
-rw-r--r--spec/models/diff_note_spec.rb28
-rw-r--r--spec/models/diff_viewer/server_side_spec.rb26
-rw-r--r--spec/models/discussion_spec.rb9
-rw-r--r--spec/models/environment_spec.rb59
-rw-r--r--spec/models/factories_spec.rb2
-rw-r--r--spec/models/group_spec.rb19
-rw-r--r--spec/models/hooks/web_hook_spec.rb120
-rw-r--r--spec/models/incident_management/timeline_event_spec.rb7
-rw-r--r--spec/models/incident_management/timeline_event_tag_link_spec.rb10
-rw-r--r--spec/models/incident_management/timeline_event_tag_spec.rb28
-rw-r--r--spec/models/integration_spec.rb11
-rw-r--r--spec/models/integrations/chat_message/issue_message_spec.rb17
-rw-r--r--spec/models/integrations/chat_message/wiki_page_message_spec.rb26
-rw-r--r--spec/models/integrations/datadog_spec.rb4
-rw-r--r--spec/models/integrations/harbor_spec.rb70
-rw-r--r--spec/models/integrations/jira_spec.rb9
-rw-r--r--spec/models/integrations/microsoft_teams_spec.rb6
-rw-r--r--spec/models/integrations/prometheus_spec.rb4
-rw-r--r--spec/models/jira_connect/public_key_spec.rb90
-rw-r--r--spec/models/jira_connect_installation_spec.rb98
-rw-r--r--spec/models/label_note_spec.rb14
-rw-r--r--spec/models/member_spec.rb24
-rw-r--r--spec/models/members/member_role_spec.rb34
-rw-r--r--spec/models/merge_request/cleanup_schedule_spec.rb13
-rw-r--r--spec/models/merge_request_diff_file_spec.rb20
-rw-r--r--spec/models/merge_request_diff_spec.rb119
-rw-r--r--spec/models/merge_request_spec.rb311
-rw-r--r--spec/models/milestone_spec.rb128
-rw-r--r--spec/models/ml/candidate_spec.rb10
-rw-r--r--spec/models/ml/experiment_spec.rb33
-rw-r--r--spec/models/namespace/aggregation_schedule_spec.rb26
-rw-r--r--spec/models/namespace/package_setting_spec.rb9
-rw-r--r--spec/models/namespace_setting_spec.rb4
-rw-r--r--spec/models/namespace_spec.rb21
-rw-r--r--spec/models/note_spec.rb10
-rw-r--r--spec/models/notification_recipient_spec.rb20
-rw-r--r--spec/models/operations/feature_flags/strategy_spec.rb26
-rw-r--r--spec/models/packages/package_spec.rb8
-rw-r--r--spec/models/packages/rpm/repository_file_spec.rb44
-rw-r--r--spec/models/pages_domain_spec.rb2
-rw-r--r--spec/models/personal_access_token_spec.rb25
-rw-r--r--spec/models/pool_repository_spec.rb13
-rw-r--r--spec/models/preloaders/project_root_ancestor_preloader_spec.rb48
-rw-r--r--spec/models/project_authorization_spec.rb186
-rw-r--r--spec/models/project_group_link_spec.rb6
-rw-r--r--spec/models/project_setting_spec.rb4
-rw-r--r--spec/models/project_spec.rb106
-rw-r--r--spec/models/project_statistics_spec.rb48
-rw-r--r--spec/models/protected_branch_spec.rb24
-rw-r--r--spec/models/repository_spec.rb191
-rw-r--r--spec/models/resource_label_event_spec.rb10
-rw-r--r--spec/models/user_detail_spec.rb135
-rw-r--r--spec/models/user_preference_spec.rb51
-rw-r--r--spec/models/user_spec.rb263
-rw-r--r--spec/models/users/namespace_callout_spec.rb39
-rw-r--r--spec/models/users/phone_number_validation_spec.rb81
-rw-r--r--spec/models/wiki_directory_spec.rb49
-rw-r--r--spec/models/wiki_page_spec.rb148
100 files changed, 3095 insertions, 1528 deletions
diff --git a/spec/models/analytics/cycle_analytics/project_stage_spec.rb b/spec/models/analytics/cycle_analytics/project_stage_spec.rb
index a67f9fec443..697b7aee022 100644
--- a/spec/models/analytics/cycle_analytics/project_stage_spec.rb
+++ b/spec/models/analytics/cycle_analytics/project_stage_spec.rb
@@ -48,10 +48,11 @@ RSpec.describe Analytics::CycleAnalytics::ProjectStage do
subject(:distinct_start_and_end_event_identifiers) { described_class.distinct_stages_within_hierarchy(top_level_group).to_a.pluck(:start_event_identifier, :end_event_identifier) }
it 'returns distinct stages by start and end events (using stage_event_hash_id)' do
- expect(distinct_start_and_end_event_identifiers).to match_array([
- %w[issue_created issue_deployed_to_production],
- %w[merge_request_created merge_request_merged]
- ])
+ expect(distinct_start_and_end_event_identifiers).to match_array(
+ [
+ %w[issue_created issue_deployed_to_production],
+ %w[merge_request_created merge_request_merged]
+ ])
end
end
end
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index b5f153e7add..77bb6b502b5 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -203,6 +203,17 @@ RSpec.describe ApplicationSetting do
it { is_expected.to allow_value([]).for(:valid_runner_registrars) }
it { is_expected.to allow_value(%w(project group)).for(:valid_runner_registrars) }
+ context 'when deactivate_dormant_users is enabled' do
+ before do
+ stub_application_setting(deactivate_dormant_users: true)
+ end
+
+ it { is_expected.not_to allow_value(nil).for(:deactivate_dormant_users_period) }
+ it { is_expected.to allow_value(90).for(:deactivate_dormant_users_period) }
+ it { is_expected.to allow_value(365).for(:deactivate_dormant_users_period) }
+ it { is_expected.not_to allow_value(89).for(:deactivate_dormant_users_period) }
+ end
+
context 'help_page_documentation_base_url validations' do
it { is_expected.to allow_value(nil).for(:help_page_documentation_base_url) }
it { is_expected.to allow_value('https://docs.gitlab.com').for(:help_page_documentation_base_url) }
@@ -257,11 +268,12 @@ RSpec.describe ApplicationSetting do
subject.grafana_url = ' ' + http
expect(subject.save).to be false
- expect(subject.errors[:grafana_url]).to eq([
- 'must be a valid relative or absolute URL. ' \
- 'Please check your Grafana URL setting in ' \
- 'Admin Area > Settings > Metrics and profiling > Metrics - Grafana'
- ])
+ expect(subject.errors[:grafana_url]).to eq(
+ [
+ 'must be a valid relative or absolute URL. ' \
+ 'Please check your Grafana URL setting in ' \
+ 'Admin Area > Settings > Metrics and profiling > Metrics - Grafana'
+ ])
end
end
@@ -270,11 +282,12 @@ RSpec.describe ApplicationSetting do
subject.grafana_url = javascript
expect(subject.save).to be false
- expect(subject.errors[:grafana_url]).to eq([
- 'is blocked: Only allowed schemes are http, https. Please check your ' \
- 'Grafana URL setting in ' \
- 'Admin Area > Settings > Metrics and profiling > Metrics - Grafana'
- ])
+ expect(subject.errors[:grafana_url]).to eq(
+ [
+ 'is blocked: Only allowed schemes are http, https. Please check your ' \
+ 'Grafana URL setting in ' \
+ 'Admin Area > Settings > Metrics and profiling > Metrics - Grafana'
+ ])
end
end
end
@@ -1453,4 +1466,10 @@ RSpec.describe ApplicationSetting do
expect(setting.personal_access_token_prefix).to eql('glpat-')
end
end
+
+ describe '.personal_access_tokens_disabled?' do
+ it 'is false' do
+ expect(setting.personal_access_tokens_disabled?).to eq(false)
+ end
+ end
end
diff --git a/spec/models/award_emoji_spec.rb b/spec/models/award_emoji_spec.rb
index 4da19267b1c..2593c9b3595 100644
--- a/spec/models/award_emoji_spec.rb
+++ b/spec/models/award_emoji_spec.rb
@@ -290,4 +290,13 @@ RSpec.describe AwardEmoji do
end
end
end
+
+ describe '#to_ability_name' do
+ let(:merge_request) { create(:merge_request) }
+ let(:award_emoji) { build(:award_emoji, user: build(:user), awardable: merge_request) }
+
+ it 'returns correct ability name' do
+ expect(award_emoji.to_ability_name).to be('emoji')
+ end
+ end
end
diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb
index 874009d552a..f4f2b174a7b 100644
--- a/spec/models/bulk_imports/entity_spec.rb
+++ b/spec/models/bulk_imports/entity_spec.rb
@@ -46,6 +46,8 @@ RSpec.describe BulkImports::Entity, type: :model do
end
it 'is invalid as a project_entity' do
+ stub_feature_flags(bulk_import_projects: true)
+
entity = build(:bulk_import_entity, :project_entity, group: build(:group), project: nil)
expect(entity).not_to be_valid
@@ -55,6 +57,8 @@ RSpec.describe BulkImports::Entity, type: :model do
context 'when associated with a project and no group' do
it 'is valid' do
+ stub_feature_flags(bulk_import_projects: true)
+
entity = build(:bulk_import_entity, :project_entity, group: nil, project: build(:project))
expect(entity).to be_valid
@@ -84,6 +88,8 @@ RSpec.describe BulkImports::Entity, type: :model do
context 'when the parent is a project import' do
it 'is invalid' do
+ stub_feature_flags(bulk_import_projects: true)
+
entity = build(:bulk_import_entity, parent: build(:bulk_import_entity, :project_entity))
expect(entity).not_to be_valid
@@ -124,6 +130,39 @@ RSpec.describe BulkImports::Entity, type: :model do
.to include('Import failed: Destination cannot be a subgroup of the source group. Change the destination and try again.')
end
end
+
+ context 'when bulk_import_projects feature flag is disabled and source_type is a project_entity' do
+ it 'is invalid' do
+ stub_feature_flags(bulk_import_projects: false)
+
+ entity = build(:bulk_import_entity, :project_entity)
+
+ expect(entity).not_to be_valid
+ expect(entity.errors[:base]).to include('invalid entity source type')
+ end
+ end
+
+ context 'when bulk_import_projects feature flag is enabled and source_type is a project_entity' do
+ it 'is valid' do
+ stub_feature_flags(bulk_import_projects: true)
+
+ entity = build(:bulk_import_entity, :project_entity)
+
+ expect(entity).to be_valid
+ end
+ end
+
+ context 'when bulk_import_projects feature flag is enabled on root ancestor level and source_type is a project_entity' do
+ it 'is valid' do
+ top_level_namespace = create(:group)
+
+ stub_feature_flags(bulk_import_projects: top_level_namespace)
+
+ entity = build(:bulk_import_entity, :project_entity, destination_namespace: top_level_namespace.full_path)
+
+ expect(entity).to be_valid
+ end
+ end
end
describe '#encoded_source_full_path' do
@@ -209,7 +248,7 @@ RSpec.describe BulkImports::Entity, type: :model do
it 'returns group export relations url' do
entity = build(:bulk_import_entity, :group_entity)
- expect(entity.export_relations_url_path).to eq("/groups/#{entity.encoded_source_full_path}/export_relations")
+ expect(entity.export_relations_url_path).to eq("/groups/#{entity.source_xid}/export_relations")
end
end
@@ -217,7 +256,7 @@ RSpec.describe BulkImports::Entity, type: :model do
it 'returns project export relations url' do
entity = build(:bulk_import_entity, :project_entity)
- expect(entity.export_relations_url_path).to eq("/projects/#{entity.encoded_source_full_path}/export_relations")
+ expect(entity.export_relations_url_path).to eq("/projects/#{entity.source_xid}/export_relations")
end
end
end
@@ -227,7 +266,7 @@ RSpec.describe BulkImports::Entity, type: :model do
entity = build(:bulk_import_entity)
expect(entity.relation_download_url_path('test'))
- .to eq("/groups/#{entity.encoded_source_full_path}/export_relations/download?relation=test")
+ .to eq("/groups/#{entity.source_xid}/export_relations/download?relation=test")
end
end
@@ -263,15 +302,15 @@ RSpec.describe BulkImports::Entity, type: :model do
describe '#base_resource_url_path' do
it 'returns base entity url path' do
- entity = build(:bulk_import_entity)
+ entity = build(:bulk_import_entity, source_xid: nil)
- expect(entity.base_resource_url_path).to eq("/groups/#{entity.encoded_source_full_path}")
+ expect(entity.base_resource_path).to eq("/groups/#{entity.encoded_source_full_path}")
end
end
describe '#wiki_url_path' do
it 'returns entity wiki url path' do
- entity = build(:bulk_import_entity)
+ entity = build(:bulk_import_entity, source_xid: nil)
expect(entity.wikis_url_path).to eq("/groups/#{entity.encoded_source_full_path}/wikis")
end
diff --git a/spec/models/bulk_imports/export_status_spec.rb b/spec/models/bulk_imports/export_status_spec.rb
index 6ade82409dc..0921c3bdce2 100644
--- a/spec/models/bulk_imports/export_status_spec.rb
+++ b/spec/models/bulk_imports/export_status_spec.rb
@@ -157,12 +157,36 @@ RSpec.describe BulkImports::ExportStatus do
end
context 'when something goes wrong during export status fetch' do
- it 'returns exception class as error' do
+ let(:exception) { BulkImports::NetworkError.new('Error!') }
+
+ before do
allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
- allow(client).to receive(:get).and_raise(StandardError, 'Error!')
+ allow(client).to receive(:get).once.and_raise(exception)
end
+ end
+
+ it 'raises RetryPipelineError' do
+ allow(exception).to receive(:retriable?).with(tracker).and_return(true)
+
+ expect { subject.failed? }.to raise_error(BulkImports::RetryPipelineError)
+ end
- expect(subject.error).to eq('Error!')
+ context 'when error is not retriable' do
+ it 'returns exception class as error' do
+ expect(subject.error).to eq('Error!')
+ expect(subject.failed?).to eq(true)
+ end
+ end
+
+ context 'when error raised is not a network error' do
+ it 'returns exception class as error' do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ allow(client).to receive(:get).once.and_raise(StandardError, 'Standard Error!')
+ end
+
+ expect(subject.error).to eq('Standard Error!')
+ expect(subject.failed?).to eq(true)
+ end
end
end
end
diff --git a/spec/models/bulk_imports/failure_spec.rb b/spec/models/bulk_imports/failure_spec.rb
index cde62659a48..b3fd60ba348 100644
--- a/spec/models/bulk_imports/failure_spec.rb
+++ b/spec/models/bulk_imports/failure_spec.rb
@@ -3,15 +3,45 @@
require 'spec_helper'
RSpec.describe BulkImports::Failure, type: :model do
+ let(:failure) { create(:bulk_import_failure) }
+
describe 'associations' do
it { is_expected.to belong_to(:entity).required }
end
describe 'validations' do
- before do
- create(:bulk_import_failure)
+ it { is_expected.to validate_presence_of(:entity) }
+ end
+
+ describe '#relation' do
+ context 'when pipeline class is valid' do
+ it 'returns pipeline defined relation' do
+ failure.update!(pipeline_class: 'BulkImports::Common::Pipelines::WikiPipeline')
+
+ expect(failure.relation).to eq('wiki')
+ end
end
- it { is_expected.to validate_presence_of(:entity) }
+ context 'when pipeline class is invalid' do
+ it 'returns default relation' do
+ failure.update!(pipeline_class: 'foobar')
+
+ expect(failure.relation).to eq('foobar')
+ end
+
+ context 'when pipeline class is outside of BulkImports namespace' do
+ it 'returns default relation' do
+ failure.update!(pipeline_class: 'Gitlab::ImportExport::Importer')
+
+ expect(failure.relation).to eq('importer')
+ end
+ end
+
+ it 'returns demodulized, underscored, chomped string' do
+ failure.update!(pipeline_class: 'BulkImports::Pipelines::Test::TestRelationPipeline')
+
+ expect(failure.relation).to eq('test_relation')
+ end
+ end
end
end
diff --git a/spec/models/ci/bridge_spec.rb b/spec/models/ci/bridge_spec.rb
index 40c2d62c465..44a6bec0130 100644
--- a/spec/models/ci/bridge_spec.rb
+++ b/spec/models/ci/bridge_spec.rb
@@ -86,9 +86,9 @@ RSpec.describe Ci::Bridge do
describe '#scoped_variables' do
it 'returns a hash representing variables' do
variables = %w[
- CI_JOB_NAME CI_JOB_STAGE CI_COMMIT_SHA CI_COMMIT_SHORT_SHA
- CI_COMMIT_BEFORE_SHA CI_COMMIT_REF_NAME CI_COMMIT_REF_SLUG
- CI_PROJECT_ID CI_PROJECT_NAME CI_PROJECT_PATH
+ CI_JOB_NAME CI_JOB_NAME_SLUG CI_JOB_STAGE CI_COMMIT_SHA
+ CI_COMMIT_SHORT_SHA CI_COMMIT_BEFORE_SHA CI_COMMIT_REF_NAME
+ CI_COMMIT_REF_SLUG CI_PROJECT_ID CI_PROJECT_NAME CI_PROJECT_PATH
CI_PROJECT_PATH_SLUG CI_PROJECT_NAMESPACE CI_PROJECT_ROOT_NAMESPACE
CI_PIPELINE_IID CI_CONFIG_PATH CI_PIPELINE_SOURCE CI_COMMIT_MESSAGE
CI_COMMIT_TITLE CI_COMMIT_DESCRIPTION CI_COMMIT_REF_PROTECTED
diff --git a/spec/models/ci/build_metadata_spec.rb b/spec/models/ci/build_metadata_spec.rb
index e904463a5ca..16cff72db64 100644
--- a/spec/models/ci/build_metadata_spec.rb
+++ b/spec/models/ci/build_metadata_spec.rb
@@ -14,8 +14,8 @@ RSpec.describe Ci::BuildMetadata do
status: 'success')
end
- let(:build) { create(:ci_build, pipeline: pipeline) }
- let(:metadata) { build.metadata }
+ let(:job) { create(:ci_build, pipeline: pipeline) }
+ let(:metadata) { job.metadata }
it_behaves_like 'having unique enum values'
@@ -35,7 +35,7 @@ RSpec.describe Ci::BuildMetadata do
context 'when project timeout is set' do
context 'when runner is assigned to the job' do
before do
- build.update!(runner: runner)
+ job.update!(runner: runner)
end
context 'when runner timeout is not set' do
@@ -59,13 +59,13 @@ RSpec.describe Ci::BuildMetadata do
context 'when job timeout is set' do
context 'when job timeout is higher than project timeout' do
- let(:build) { create(:ci_build, pipeline: pipeline, options: { job_timeout: 3000 }) }
+ let(:job) { create(:ci_build, pipeline: pipeline, options: { job_timeout: 3000 }) }
it_behaves_like 'sets timeout', 'job_timeout_source', 3000
end
context 'when job timeout is lower than project timeout' do
- let(:build) { create(:ci_build, pipeline: pipeline, options: { job_timeout: 1000 }) }
+ let(:job) { create(:ci_build, pipeline: pipeline, options: { job_timeout: 1000 }) }
it_behaves_like 'sets timeout', 'job_timeout_source', 1000
end
@@ -73,18 +73,18 @@ RSpec.describe Ci::BuildMetadata do
context 'when both runner and job timeouts are set' do
before do
- build.update!(runner: runner)
+ job.update!(runner: runner)
end
context 'when job timeout is higher than runner timeout' do
- let(:build) { create(:ci_build, pipeline: pipeline, options: { job_timeout: 3000 }) }
+ let(:job) { create(:ci_build, pipeline: pipeline, options: { job_timeout: 3000 }) }
let(:runner) { create(:ci_runner, maximum_timeout: 2100) }
it_behaves_like 'sets timeout', 'runner_timeout_source', 2100
end
context 'when job timeout is lower than runner timeout' do
- let(:build) { create(:ci_build, pipeline: pipeline, options: { job_timeout: 1900 }) }
+ let(:job) { create(:ci_build, pipeline: pipeline, options: { job_timeout: 1900 }) }
let(:runner) { create(:ci_runner, maximum_timeout: 2100) }
it_behaves_like 'sets timeout', 'job_timeout_source', 1900
@@ -135,20 +135,51 @@ RSpec.describe Ci::BuildMetadata do
describe 'set_cancel_gracefully' do
it 'sets cancel_gracefully' do
- build.set_cancel_gracefully
+ job.set_cancel_gracefully
- expect(build.cancel_gracefully?).to be true
+ expect(job.cancel_gracefully?).to be true
end
it 'returns false' do
- expect(build.cancel_gracefully?).to be false
+ expect(job.cancel_gracefully?).to be false
end
end
context 'loose foreign key on ci_builds_metadata.project_id' do
it_behaves_like 'cleanup by a loose foreign key' do
- let!(:parent) { create(:project) }
- let!(:model) { create(:ci_build_metadata, project: parent) }
+ let!(:parent) { project }
+ let!(:model) { metadata }
+ end
+ end
+
+ describe 'partitioning' do
+ context 'with job' do
+ let(:status) { build(:commit_status, partition_id: 123) }
+ let(:metadata) { build(:ci_build_metadata, build: status) }
+
+ it 'copies the partition_id from job' do
+ expect { metadata.valid? }.to change(metadata, :partition_id).to(123)
+ end
+
+ context 'when it is already set' do
+ let(:metadata) { build(:ci_build_metadata, build: status, partition_id: 125) }
+
+ it 'does not change the partition_id value' do
+ expect { metadata.valid? }.not_to change(metadata, :partition_id)
+ end
+ end
+ end
+
+ context 'without job' do
+ subject(:metadata) do
+ build(:ci_build_metadata, build: nil)
+ end
+
+ it { is_expected.to validate_presence_of(:partition_id) }
+
+ it 'does not change the partition_id value' do
+ expect { metadata.valid? }.not_to change(metadata, :partition_id)
+ end
end
end
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 7ee381b29ea..9713734e97a 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -160,6 +160,42 @@ RSpec.describe Ci::Build do
end
end
+ describe '.with_erasable_artifacts' do
+ subject { described_class.with_erasable_artifacts }
+
+ context 'when job does not have any artifacts' do
+ let!(:job) { create(:ci_build) }
+
+ it 'does not return the job' do
+ is_expected.not_to include(job)
+ end
+ end
+
+ ::Ci::JobArtifact.erasable_file_types.each do |type|
+ context "when job has a #{type} artifact" do
+ it 'returns the job' do
+ job = create(:ci_build)
+ create(
+ :ci_job_artifact,
+ file_format: ::Ci::JobArtifact::TYPE_AND_FORMAT_PAIRS[type.to_sym],
+ file_type: type,
+ job: job
+ )
+
+ is_expected.to include(job)
+ end
+ end
+ end
+
+ context 'when job has a non-erasable artifact' do
+ let!(:job) { create(:ci_build, :trace_artifact) }
+
+ it 'does not return the job' do
+ is_expected.not_to include(job)
+ end
+ end
+ end
+
describe '.with_live_trace' do
subject { described_class.with_live_trace }
@@ -284,10 +320,10 @@ RSpec.describe Ci::Build do
let(:artifact_scope) { Ci::JobArtifact.where(file_type: 'archive') }
- let!(:build_1) { create(:ci_build, :artifacts) }
- let!(:build_2) { create(:ci_build, :codequality_reports) }
- let!(:build_3) { create(:ci_build, :test_reports) }
- let!(:build_4) { create(:ci_build, :artifacts) }
+ let!(:build_1) { create(:ci_build, :artifacts, pipeline: pipeline) }
+ let!(:build_2) { create(:ci_build, :codequality_reports, pipeline: pipeline) }
+ let!(:build_3) { create(:ci_build, :test_reports, pipeline: pipeline) }
+ let!(:build_4) { create(:ci_build, :artifacts, pipeline: pipeline) }
it 'returns artifacts matching the given scope' do
expect(builds).to contain_exactly(build_1, build_4)
@@ -596,15 +632,6 @@ RSpec.describe Ci::Build do
it { expect(subject).to be_falsey }
end
- context 'when prevent_outdated_deployment_jobs FF is disabled' do
- before do
- stub_feature_flags(prevent_outdated_deployment_jobs: false)
- expect(build.deployment).not_to receive(:rollback?)
- end
-
- it { expect(subject).to be_falsey }
- end
-
context 'when build can prevent rollback deployment' do
before do
expect(build.deployment).to receive(:older_than_last_successful_deployment?).and_return(true)
@@ -2668,6 +2695,7 @@ RSpec.describe Ci::Build do
{ key: 'CI_JOB_JWT_V1', value: 'ci.job.jwt', public: false, masked: true },
{ key: 'CI_JOB_JWT_V2', value: 'ci.job.jwtv2', public: false, masked: true },
{ key: 'CI_JOB_NAME', value: 'test', public: true, masked: false },
+ { key: 'CI_JOB_NAME_SLUG', value: 'test', public: true, masked: false },
{ key: 'CI_JOB_STAGE', value: 'test', public: true, masked: false },
{ key: 'CI_NODE_TOTAL', value: '1', public: true, masked: false },
{ key: 'CI_BUILD_NAME', value: 'test', public: true, masked: false },
@@ -2780,6 +2808,14 @@ RSpec.describe Ci::Build do
end
end
+ context 'when the opt_in_jwt project setting is true' do
+ it 'does not include the JWT variables' do
+ project.ci_cd_settings.update!(opt_in_jwt: true)
+
+ expect(subject.pluck(:key)).not_to include('CI_JOB_JWT', 'CI_JOB_JWT_V1', 'CI_JOB_JWT_V2')
+ end
+ end
+
describe 'variables ordering' do
context 'when variables hierarchy is stubbed' do
let(:build_pre_var) { { key: 'build', value: 'value', public: true, masked: false } }
@@ -3069,8 +3105,24 @@ RSpec.describe Ci::Build do
end
context 'when build is for tag' do
+ let(:tag_name) { project.repository.tags.first.name }
+ let(:tag_message) { project.repository.tags.first.message }
+
+ let!(:pipeline) do
+ create(:ci_pipeline, project: project,
+ sha: project.commit.id,
+ ref: tag_name,
+ status: 'success')
+ end
+
+ let!(:build) { create(:ci_build, pipeline: pipeline, ref: tag_name) }
+
let(:tag_variable) do
- { key: 'CI_COMMIT_TAG', value: 'master', public: true, masked: false }
+ { key: 'CI_COMMIT_TAG', value: tag_name, public: true, masked: false }
+ end
+
+ let(:tag_message_variable) do
+ { key: 'CI_COMMIT_TAG_MESSAGE', value: tag_message, public: true, masked: false }
end
before do
@@ -3081,7 +3133,7 @@ RSpec.describe Ci::Build do
it do
build.reload
- expect(subject).to include(tag_variable)
+ expect(subject).to include(tag_variable, tag_message_variable)
end
end
@@ -3474,6 +3526,49 @@ RSpec.describe Ci::Build do
it { is_expected.to include(key: job_variable.key, value: job_variable.value, public: false, masked: false) }
end
+
+ context 'when ID tokens are defined on the build' do
+ before do
+ rsa_key = OpenSSL::PKey::RSA.generate(3072).to_s
+ stub_application_setting(ci_jwt_signing_key: rsa_key)
+ build.metadata.update!(id_tokens: {
+ 'ID_TOKEN_1' => { id_token: { aud: 'developers' } },
+ 'ID_TOKEN_2' => { id_token: { aud: 'maintainers' } }
+ })
+ end
+
+ subject(:runner_vars) { build.variables.to_runner_variables }
+
+ it 'includes the ID token variables' do
+ expect(runner_vars).to include(
+ a_hash_including(key: 'ID_TOKEN_1', public: false, masked: true),
+ a_hash_including(key: 'ID_TOKEN_2', public: false, masked: true)
+ )
+
+ id_token_var_1 = runner_vars.find { |var| var[:key] == 'ID_TOKEN_1' }
+ id_token_var_2 = runner_vars.find { |var| var[:key] == 'ID_TOKEN_2' }
+ id_token_1 = JWT.decode(id_token_var_1[:value], nil, false).first
+ id_token_2 = JWT.decode(id_token_var_2[:value], nil, false).first
+ expect(id_token_1['aud']).to eq('developers')
+ expect(id_token_2['aud']).to eq('maintainers')
+ end
+
+ context 'when a NoSigningKeyError is raised' do
+ it 'does not include the ID token variables' do
+ allow(::Gitlab::Ci::JwtV2).to receive(:for_build).and_raise(::Gitlab::Ci::Jwt::NoSigningKeyError)
+
+ expect(runner_vars.map { |var| var[:key] }).not_to include('ID_TOKEN_1', 'ID_TOKEN_2')
+ end
+ end
+
+ context 'when a RSAError is raised' do
+ it 'does not include the ID token variables' do
+ allow(::Gitlab::Ci::JwtV2).to receive(:for_build).and_raise(::OpenSSL::PKey::RSAError)
+
+ expect(runner_vars.map { |var| var[:key] }).not_to include('ID_TOKEN_1', 'ID_TOKEN_2')
+ end
+ end
+ end
end
describe '#scoped_variables' do
@@ -5171,10 +5266,11 @@ RSpec.describe Ci::Build do
it { expect(matchers.size).to eq(2) }
it 'groups build ids' do
- expect(matchers.map(&:build_ids)).to match_array([
- [build_without_tags.id],
- match_array([build_with_tags.id, other_build_with_tags.id])
- ])
+ expect(matchers.map(&:build_ids)).to match_array(
+ [
+ [build_without_tags.id],
+ match_array([build_with_tags.id, other_build_with_tags.id])
+ ])
end
it { expect(matchers.map(&:tag_list)).to match_array([[], %w[tag1 tag2]]) }
@@ -5362,7 +5458,7 @@ RSpec.describe Ci::Build do
end
describe '#clone' do
- let_it_be(:user) { FactoryBot.build(:user) }
+ let_it_be(:user) { create(:user) }
context 'when given new job variables' do
context 'when the cloned build has an action' do
@@ -5371,10 +5467,11 @@ RSpec.describe Ci::Build do
create(:ci_job_variable, job: build, key: 'TEST_KEY', value: 'old value')
create(:ci_job_variable, job: build, key: 'OLD_KEY', value: 'i will not live for long')
- new_build = build.clone(current_user: user, new_job_variables_attributes: [
- { key: 'TEST_KEY', value: 'new value' },
- { key: 'NEW_KEY', value: 'exciting new value' }
- ])
+ new_build = build.clone(current_user: user, new_job_variables_attributes:
+ [
+ { key: 'TEST_KEY', value: 'new value' },
+ { key: 'NEW_KEY', value: 'exciting new value' }
+ ])
new_build.save!
expect(new_build.job_variables.count).to be(2)
@@ -5388,9 +5485,10 @@ RSpec.describe Ci::Build do
build = create(:ci_build)
create(:ci_job_variable, job: build, key: 'TEST_KEY', value: 'old value')
- new_build = build.clone(current_user: user, new_job_variables_attributes: [
- { key: 'TEST_KEY', value: 'new value' }
- ])
+ new_build = build.clone(
+ current_user: user,
+ new_job_variables_attributes: [{ key: 'TEST_KEY', value: 'new value' }]
+ )
new_build.save!
expect(new_build.job_variables.count).to be(1)
diff --git a/spec/models/ci/build_trace_chunks/redis_spec.rb b/spec/models/ci/build_trace_chunks/redis_spec.rb
index c004887d609..0d8cda7b3d8 100644
--- a/spec/models/ci/build_trace_chunks/redis_spec.rb
+++ b/spec/models/ci/build_trace_chunks/redis_spec.rb
@@ -211,15 +211,15 @@ RSpec.describe Ci::BuildTraceChunks::Redis, :clean_gitlab_redis_shared_state do
it 'deletes multiple data' do
Gitlab::Redis::SharedState.with do |redis|
- expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:0")).to be_truthy
- expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:1")).to be_truthy
+ expect(redis.exists?("gitlab:ci:trace:#{build.id}:chunks:0")).to eq(true)
+ expect(redis.exists?("gitlab:ci:trace:#{build.id}:chunks:1")).to eq(true)
end
subject
Gitlab::Redis::SharedState.with do |redis|
- expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:0")).to be_falsy
- expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:1")).to be_falsy
+ expect(redis.exists?("gitlab:ci:trace:#{build.id}:chunks:0")).to eq(false)
+ expect(redis.exists?("gitlab:ci:trace:#{build.id}:chunks:1")).to eq(false)
end
end
end
diff --git a/spec/models/ci/build_trace_spec.rb b/spec/models/ci/build_trace_spec.rb
index bd24e8be1ac..f2df4874b84 100644
--- a/spec/models/ci/build_trace_spec.rb
+++ b/spec/models/ci/build_trace_spec.rb
@@ -28,9 +28,10 @@ RSpec.describe Ci::BuildTrace do
it_behaves_like 'delegates methods'
it 'returns formatted trace' do
- expect(subject.lines).to eq([
- { offset: 0, content: [{ text: 'the-stream' }] }
- ])
+ expect(subject.lines).to eq(
+ [
+ { offset: 0, content: [{ text: 'the-stream' }] }
+ ])
end
context 'with invalid UTF-8 data' do
diff --git a/spec/models/ci/daily_build_group_report_result_spec.rb b/spec/models/ci/daily_build_group_report_result_spec.rb
index d0141a1469e..cd55817243f 100644
--- a/spec/models/ci/daily_build_group_report_result_spec.rb
+++ b/spec/models/ci/daily_build_group_report_result_spec.rb
@@ -41,24 +41,25 @@ RSpec.describe Ci::DailyBuildGroupReportResult do
let!(:new_pipeline) { create(:ci_pipeline) }
it 'creates or updates matching report results' do
- described_class.upsert_reports([
- {
- project_id: rspec_coverage.project_id,
- ref_path: rspec_coverage.ref_path,
- last_pipeline_id: new_pipeline.id,
- date: rspec_coverage.date,
- group_name: 'rspec',
- data: { 'coverage' => 81.0 }
- },
- {
- project_id: rspec_coverage.project_id,
- ref_path: rspec_coverage.ref_path,
- last_pipeline_id: new_pipeline.id,
- date: rspec_coverage.date,
- group_name: 'karma',
- data: { 'coverage' => 87.0 }
- }
- ])
+ described_class.upsert_reports(
+ [
+ {
+ project_id: rspec_coverage.project_id,
+ ref_path: rspec_coverage.ref_path,
+ last_pipeline_id: new_pipeline.id,
+ date: rspec_coverage.date,
+ group_name: 'rspec',
+ data: { 'coverage' => 81.0 }
+ },
+ {
+ project_id: rspec_coverage.project_id,
+ ref_path: rspec_coverage.ref_path,
+ last_pipeline_id: new_pipeline.id,
+ date: rspec_coverage.date,
+ group_name: 'karma',
+ data: { 'coverage' => 87.0 }
+ }
+ ])
rspec_coverage.reload
diff --git a/spec/models/ci/job_token/project_scope_link_spec.rb b/spec/models/ci/job_token/project_scope_link_spec.rb
index c000a3e29f7..92ed86b55b2 100644
--- a/spec/models/ci/job_token/project_scope_link_spec.rb
+++ b/spec/models/ci/job_token/project_scope_link_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Ci::JobToken::ProjectScopeLink do
it { is_expected.to belong_to(:target_project) }
it { is_expected.to belong_to(:added_by) }
+ let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project) }
it_behaves_like 'cleanup by a loose foreign key' do
@@ -89,16 +90,22 @@ RSpec.describe Ci::JobToken::ProjectScopeLink do
end
end
+ describe 'enums' do
+ let(:directions) { { outbound: 0, inbound: 1 } }
+
+ it { is_expected.to define_enum_for(:direction).with_values(directions) }
+ end
+
context 'loose foreign key on ci_job_token_project_scope_links.source_project_id' do
it_behaves_like 'cleanup by a loose foreign key' do
- let!(:parent) { create(:project) }
+ let!(:parent) { create(:project, namespace: group) }
let!(:model) { create(:ci_job_token_project_scope_link, source_project: parent) }
end
end
context 'loose foreign key on ci_job_token_project_scope_links.target_project_id' do
it_behaves_like 'cleanup by a loose foreign key' do
- let!(:parent) { create(:project) }
+ let!(:parent) { create(:project, namespace: group) }
let!(:model) { create(:ci_job_token_project_scope_link, target_project: parent) }
end
end
diff --git a/spec/models/ci/job_token/scope_spec.rb b/spec/models/ci/job_token/scope_spec.rb
index 4b95adf8476..1e3f6d044d2 100644
--- a/spec/models/ci/job_token/scope_spec.rb
+++ b/spec/models/ci/job_token/scope_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Ci::JobToken::Scope do
- let_it_be(:project) { create(:project, ci_job_token_scope_enabled: true).tap(&:save!) }
+ let_it_be(:project) { create(:project, ci_outbound_job_token_scope_enabled: true).tap(&:save!) }
let(:scope) { described_class.new(project) }
@@ -53,7 +53,7 @@ RSpec.describe Ci::JobToken::Scope do
context 'when project scope setting is disabled' do
before do
- project.ci_job_token_scope_enabled = false
+ project.ci_outbound_job_token_scope_enabled = false
end
it 'considers any project to be part of the scope' do
diff --git a/spec/models/ci/pipeline_metadata_spec.rb b/spec/models/ci/pipeline_metadata_spec.rb
new file mode 100644
index 00000000000..0704cbc8ec1
--- /dev/null
+++ b/spec/models/ci/pipeline_metadata_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::PipelineMetadata do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:pipeline) }
+
+ describe 'validations' do
+ it { is_expected.to validate_length_of(:title).is_at_least(1).is_at_most(255) }
+ it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_presence_of(:pipeline) }
+ end
+end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index ec03030a4b8..b2316949497 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -43,12 +43,14 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
it { is_expected.to have_one(:triggered_by_pipeline) }
it { is_expected.to have_one(:source_job) }
it { is_expected.to have_one(:pipeline_config) }
+ it { is_expected.to have_one(:pipeline_metadata) }
it { is_expected.to respond_to :git_author_name }
it { is_expected.to respond_to :git_author_email }
it { is_expected.to respond_to :git_author_full_text }
it { is_expected.to respond_to :short_sha }
it { is_expected.to delegate_method(:full_path).to(:project).with_prefix }
+ it { is_expected.to delegate_method(:title).to(:pipeline_metadata).allow_nil }
describe 'validations' do
it { is_expected.to validate_presence_of(:sha) }
@@ -2981,6 +2983,24 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
let_it_be(:pipeline) { create(:ci_empty_pipeline, :created) }
+ it 'logs the event' do
+ allow(Gitlab::AppJsonLogger).to receive(:info)
+
+ pipeline.cancel_running
+
+ expect(Gitlab::AppJsonLogger)
+ .to have_received(:info)
+ .with(
+ a_hash_including(
+ event: 'pipeline_cancel_running',
+ pipeline_id: pipeline.id,
+ auto_canceled_by_pipeline_id: nil,
+ cascade_to_children: true,
+ execute_async: true
+ )
+ )
+ end
+
context 'when there is a running external job and a regular job' do
before do
create(:ci_build, :running, pipeline: pipeline)
@@ -3813,7 +3833,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
describe '#upstream_root' do
subject { pipeline.upstream_root }
- let_it_be(:pipeline) { create(:ci_pipeline) }
+ let_it_be_with_refind(:pipeline) { create(:ci_pipeline) }
context 'when pipeline is child of child pipeline' do
let!(:root_ancestor) { create(:ci_pipeline) }
@@ -4529,10 +4549,11 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
it 'returns accessibility report with collected data' do
- expect(subject.urls.keys).to match_array([
- "https://pa11y.org/",
- "https://about.gitlab.com/"
- ])
+ expect(subject.urls.keys).to match_array(
+ [
+ "https://pa11y.org/",
+ "https://about.gitlab.com/"
+ ])
end
context 'when builds are retried' do
@@ -5316,19 +5337,18 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
- describe '#authorized_cluster_agents' do
+ describe '#cluster_agent_authorizations' do
let(:pipeline) { create(:ci_empty_pipeline, :created) }
- let(:agent) { instance_double(Clusters::Agent) }
- let(:authorization) { instance_double(Clusters::Agents::GroupAuthorization, agent: agent) }
+ let(:authorization) { instance_double(Clusters::Agents::GroupAuthorization) }
let(:finder) { double(execute: [authorization]) }
- it 'retrieves agent records from the finder and caches the result' do
+ it 'retrieves authorization records from the finder and caches the result' do
expect(Clusters::AgentAuthorizationsFinder).to receive(:new).once
.with(pipeline.project)
.and_return(finder)
- expect(pipeline.authorized_cluster_agents).to contain_exactly(agent)
- expect(pipeline.authorized_cluster_agents).to contain_exactly(agent) # cached
+ expect(pipeline.cluster_agent_authorizations).to contain_exactly(authorization)
+ expect(pipeline.cluster_agent_authorizations).to contain_exactly(authorization) # cached
end
end
@@ -5486,7 +5506,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
describe 'partitioning' do
- let(:pipeline) { build(:ci_pipeline) }
+ let(:pipeline) { build(:ci_pipeline, partition_id: nil) }
before do
allow(described_class).to receive(:current_partition_value) { 123 }
@@ -5516,4 +5536,73 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
end
+
+ describe '#notes=' do
+ context 'when notes already exist' do
+ it 'does not create duplicate notes', :aggregate_failures do
+ time = Time.zone.now
+ pipeline = create(:ci_pipeline, user: user, project: project)
+ note = Note.new(
+ note: 'note',
+ noteable_type: 'Commit',
+ noteable_id: pipeline.id,
+ commit_id: pipeline.id,
+ author_id: user.id,
+ project_id: pipeline.project_id,
+ created_at: time
+ )
+ another_note = note.dup.tap { |note| note.note = 'another note' }
+
+ expect(project.notes.for_commit_id(pipeline.sha).count).to eq(0)
+
+ pipeline.notes = [note]
+
+ expect(project.notes.for_commit_id(pipeline.sha).count).to eq(1)
+
+ pipeline.notes = [note, note, another_note]
+
+ expect(project.notes.for_commit_id(pipeline.sha).count).to eq(2)
+ expect(project.notes.for_commit_id(pipeline.sha).pluck(:note)).to contain_exactly(note.note, another_note.note)
+ end
+ end
+ end
+
+ describe '#has_erasable_artifacts?' do
+ subject { pipeline.has_erasable_artifacts? }
+
+ context 'when pipeline is not complete' do
+ let(:pipeline) { create(:ci_pipeline, :running, :with_job) }
+
+ context 'and has erasable artifacts' do
+ before do
+ create(:ci_job_artifact, :archive, job: pipeline.builds.first)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'when pipeline is complete' do
+ let(:pipeline) { create(:ci_pipeline, :success, :with_job) }
+
+ context 'and has no artifacts' do
+ it { is_expected.to be_falsey }
+ end
+
+ Ci::JobArtifact.erasable_file_types.each do |type|
+ context "and has an artifact of type #{type}" do
+ before do
+ create(
+ :ci_job_artifact,
+ file_format: ::Ci::JobArtifact::TYPE_AND_FORMAT_PAIRS[type.to_sym],
+ file_type: type,
+ job: pipeline.builds.first
+ )
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/ci/processable_spec.rb b/spec/models/ci/processable_spec.rb
index 61e2864a518..a199111b1e3 100644
--- a/spec/models/ci/processable_spec.rb
+++ b/spec/models/ci/processable_spec.rb
@@ -177,7 +177,7 @@ RSpec.describe Ci::Processable do
Ci::Build.attribute_names.map(&:to_sym) +
Ci::Build.attribute_aliases.keys.map(&:to_sym) +
Ci::Build.reflect_on_all_associations.map(&:name) +
- [:tag_list, :needs_attributes, :job_variables_attributes] -
+ [:tag_list, :needs_attributes, :job_variables_attributes, :id_tokens] -
# ToDo: Move EE accessors to ee/
::Ci::Build.extra_accessors -
[:dast_site_profiles_build, :dast_scanner_profiles_build]
diff --git a/spec/models/ci/resource_group_spec.rb b/spec/models/ci/resource_group_spec.rb
index 76e74f3193c..e8eccc233db 100644
--- a/spec/models/ci/resource_group_spec.rb
+++ b/spec/models/ci/resource_group_spec.rb
@@ -3,8 +3,10 @@
require 'spec_helper'
RSpec.describe Ci::ResourceGroup do
+ let_it_be(:group) { create(:group) }
+
it_behaves_like 'cleanup by a loose foreign key' do
- let!(:parent) { create(:project) }
+ let!(:parent) { create(:project, group: group) }
let!(:model) { create(:ci_resource_group, project: parent) }
end
@@ -94,7 +96,7 @@ RSpec.describe Ci::ResourceGroup do
describe '#upcoming_processables' do
subject { resource_group.upcoming_processables }
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository, group: group) }
let_it_be(:pipeline_1) { create(:ci_pipeline, project: project) }
let_it_be(:pipeline_2) { create(:ci_pipeline, project: project) }
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index 181351222c1..13eb7086586 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -1186,7 +1186,7 @@ RSpec.describe Ci::Runner do
end
end
- context 'Project-related queries' do
+ describe 'Project-related queries' do
let_it_be(:project1) { create(:project) }
let_it_be(:project2) { create(:project) }
@@ -1206,14 +1206,14 @@ RSpec.describe Ci::Runner do
end
end
- describe "belongs_to_one_project?" do
+ describe '#belongs_to_one_project?' do
it "returns false if there are two projects runner is assigned to" do
runner = create(:ci_runner, :project, projects: [project1, project2])
expect(runner.belongs_to_one_project?).to be_falsey
end
- it "returns true if there is only one project runner is assigned to" do
+ it 'returns true if there is only one project runner is assigned to' do
runner = create(:ci_runner, :project, projects: [project1])
expect(runner.belongs_to_one_project?).to be_truthy
@@ -1537,47 +1537,155 @@ RSpec.describe Ci::Runner do
it { is_expected.to eq(contacted_at_stored) }
end
- describe '.belonging_to_group' do
- it 'returns the specific group runner' do
- group = create(:group)
- runner = create(:ci_runner, :group, groups: [group])
- unrelated_group = create(:group)
- create(:ci_runner, :group, groups: [unrelated_group])
+ describe 'Group-related queries' do
+ # Groups
+ let_it_be(:top_level_group) { create(:group) }
+ let_it_be(:child_group) { create(:group, parent: top_level_group) }
+ let_it_be(:child_group2) { create(:group, parent: top_level_group) }
+ let_it_be(:other_top_level_group) { create(:group) }
+
+ # Projects
+ let_it_be(:top_level_group_project) { create(:project, group: top_level_group) }
+ let_it_be(:child_group_project) { create(:project, group: child_group) }
+ let_it_be(:other_top_level_group_project) { create(:project, group: other_top_level_group) }
- expect(described_class.belonging_to_group(group.id)).to contain_exactly(runner)
+ # Runners
+ let_it_be(:instance_runner) { create(:ci_runner, :instance) }
+ let_it_be(:top_level_group_runner) { create(:ci_runner, :group, groups: [top_level_group]) }
+ let_it_be(:child_group_runner) { create(:ci_runner, :group, groups: [child_group]) }
+ let_it_be(:child_group2_runner) { create(:ci_runner, :group, groups: [child_group2]) }
+ let_it_be(:other_top_level_group_runner) do
+ create(:ci_runner, :group, groups: [other_top_level_group])
end
- end
- describe '.belonging_to_group_and_ancestors' do
- let_it_be(:parent_group) { create(:group) }
- let_it_be(:parent_runner) { create(:ci_runner, :group, groups: [parent_group]) }
- let_it_be(:group) { create(:group, parent: parent_group) }
+ let_it_be(:top_level_group_project_runner) do
+ create(:ci_runner, :project, projects: [top_level_group_project])
+ end
- it 'returns the group runner from the parent group' do
- expect(described_class.belonging_to_group_and_ancestors(group.id)).to contain_exactly(parent_runner)
+ let_it_be(:child_group_project_runner) do
+ create(:ci_runner, :project, projects: [child_group_project])
end
- end
- describe '.belonging_to_group_or_project_descendants' do
- it 'returns the specific group runners' do
- group1 = create(:group)
- group2 = create(:group, parent: group1)
- group3 = create(:group)
-
- project1 = create(:project, namespace: group1)
- project2 = create(:project, namespace: group2)
- project3 = create(:project, namespace: group3)
-
- runner1 = create(:ci_runner, :group, groups: [group1])
- runner2 = create(:ci_runner, :group, groups: [group2])
- _runner3 = create(:ci_runner, :group, groups: [group3])
- runner4 = create(:ci_runner, :project, projects: [project1])
- runner5 = create(:ci_runner, :project, projects: [project2])
- _runner6 = create(:ci_runner, :project, projects: [project3])
-
- expect(described_class.belonging_to_group_or_project_descendants(group1.id)).to contain_exactly(
- runner1, runner2, runner4, runner5
- )
+ let_it_be(:other_top_level_group_project_runner) do
+ create(:ci_runner, :project, projects: [other_top_level_group_project])
+ end
+
+ let_it_be(:shared_top_level_group_project_runner) do
+ create(:ci_runner, :project, projects: [top_level_group_project, child_group_project])
+ end
+
+ describe '.belonging_to_group' do
+ subject(:relation) { described_class.belonging_to_group(scope.id) }
+
+ context 'with scope set to top_level_group' do
+ let(:scope) { top_level_group }
+
+ it 'returns the group runners from the top_level_group' do
+ is_expected.to contain_exactly(top_level_group_runner)
+ end
+ end
+
+ context 'with scope set to child_group' do
+ let(:scope) { child_group }
+
+ it 'returns the group runners from the child_group' do
+ is_expected.to contain_exactly(child_group_runner)
+ end
+ end
+ end
+
+ describe '.belonging_to_group_and_ancestors' do
+ subject(:relation) { described_class.belonging_to_group_and_ancestors(child_group.id) }
+
+ it 'returns the group runners from the group and parent group' do
+ is_expected.to contain_exactly(child_group_runner, top_level_group_runner)
+ end
+ end
+
+ describe '.belonging_to_group_or_project_descendants' do
+ subject(:relation) { described_class.belonging_to_group_or_project_descendants(scope.id) }
+
+ context 'with scope set to top_level_group' do
+ let(:scope) { top_level_group }
+
+ it 'returns the expected group and project runners without duplicates', :aggregate_failures do
+ expect(relation).to contain_exactly(
+ top_level_group_runner,
+ top_level_group_project_runner,
+ child_group_runner,
+ child_group_project_runner,
+ child_group2_runner,
+ shared_top_level_group_project_runner
+ )
+
+ # Ensure no duplicates are returned
+ expect(relation.distinct).to match_array(relation)
+ end
+ end
+
+ context 'with scope set to child_group' do
+ let(:scope) { child_group }
+
+ it 'returns the expected group and project runners without duplicates', :aggregate_failures do
+ expect(relation).to contain_exactly(
+ child_group_runner,
+ child_group_project_runner,
+ shared_top_level_group_project_runner
+ )
+
+ # Ensure no duplicates are returned
+ expect(relation.distinct).to match_array(relation)
+ end
+ end
+ end
+
+ describe '.usable_from_scope' do
+ subject(:relation) { described_class.usable_from_scope(scope) }
+
+ context 'with scope set to top_level_group' do
+ let(:scope) { top_level_group }
+
+ it 'returns all runners usable from top_level_group without duplicates' do
+ expect(relation).to contain_exactly(
+ instance_runner,
+ top_level_group_runner,
+ top_level_group_project_runner,
+ child_group_runner,
+ child_group_project_runner,
+ child_group2_runner,
+ shared_top_level_group_project_runner
+ )
+
+ # Ensure no duplicates are returned
+ expect(relation.distinct).to match_array(relation)
+ end
+ end
+
+ context 'with scope set to child_group' do
+ let(:scope) { child_group }
+
+ it 'returns all runners usable from child_group' do
+ expect(relation).to contain_exactly(
+ instance_runner,
+ top_level_group_runner,
+ child_group_runner,
+ child_group_project_runner,
+ shared_top_level_group_project_runner
+ )
+ end
+ end
+
+ context 'with scope set to other_top_level_group' do
+ let(:scope) { other_top_level_group }
+
+ it 'returns all runners usable from other_top_level_group' do
+ expect(relation).to contain_exactly(
+ instance_runner,
+ other_top_level_group_runner,
+ other_top_level_group_project_runner
+ )
+ end
+ end
end
end
diff --git a/spec/models/ci/secure_file_spec.rb b/spec/models/ci/secure_file_spec.rb
index e47efff5dfd..20f64d40865 100644
--- a/spec/models/ci/secure_file_spec.rb
+++ b/spec/models/ci/secure_file_spec.rb
@@ -81,4 +81,70 @@ RSpec.describe Ci::SecureFile do
expect(Base64.encode64(subject.file.read)).to eq(Base64.encode64(sample_file))
end
end
+
+ describe '#file_extension' do
+ it 'returns the extension for the file name' do
+ file = build(:ci_secure_file, name: 'file1.cer')
+ expect(file.file_extension).to eq('cer')
+ end
+
+ it 'returns only the last part of the extension for the file name' do
+ file = build(:ci_secure_file, name: 'file1.tar.gz')
+ expect(file.file_extension).to eq('gz')
+ end
+ end
+
+ describe '#metadata_parsable?' do
+ it 'returns true when the file extension has a supported parser' do
+ file = build(:ci_secure_file, name: 'file1.cer')
+ expect(file.metadata_parsable?).to be true
+ end
+
+ it 'returns false when the file extension does not have a supported parser' do
+ file = build(:ci_secure_file, name: 'file1.foo')
+ expect(file.metadata_parsable?).to be false
+ end
+ end
+
+ describe '#metadata_parser' do
+ it 'returns an instance of Gitlab::Ci::SecureFiles::Cer when a .cer file is supplied' do
+ file = build(:ci_secure_file, name: 'file1.cer')
+ expect(file.metadata_parser).to be_an_instance_of(Gitlab::Ci::SecureFiles::Cer)
+ end
+
+ it 'returns an instance of Gitlab::Ci::SecureFiles::P12 when a .p12 file is supplied' do
+ file = build(:ci_secure_file, name: 'file1.p12')
+ expect(file.metadata_parser).to be_an_instance_of(Gitlab::Ci::SecureFiles::P12)
+ end
+
+ it 'returns an instance of Gitlab::Ci::SecureFiles::MobileProvision when a .mobileprovision file is supplied' do
+ file = build(:ci_secure_file, name: 'file1.mobileprovision')
+ expect(file.metadata_parser).to be_an_instance_of(Gitlab::Ci::SecureFiles::MobileProvision)
+ end
+
+ it 'returns nil when the file type is not supported by any parsers' do
+ file = build(:ci_secure_file, name: 'file1.foo')
+ expect(file.metadata_parser).to be nil
+ end
+ end
+
+ describe '#update_metadata!' do
+ it 'assigns the expected metadata when a parsable file is supplied' do
+ file = create(:ci_secure_file, name: 'file1.cer',
+ file: CarrierWaveStringFile.new(fixture_file('ci_secure_files/sample.cer') ))
+ file.update_metadata!
+
+ expect(file.expires_at).to eq(DateTime.parse('2022-04-26 19:20:40'))
+ expect(file.metadata['id']).to eq('33669367788748363528491290218354043267')
+ expect(file.metadata['issuer']['CN']).to eq('Apple Worldwide Developer Relations Certification Authority')
+ expect(file.metadata['subject']['OU']).to eq('N7SYAN8PX8')
+ end
+
+ it 'logs an error when something goes wrong with the file parsing' do
+ corrupt_file = create(:ci_secure_file, name: 'file1.cer', file: CarrierWaveStringFile.new('11111111'))
+ message = 'Validation failed: Metadata must be a valid json schema - not enough data.'
+ expect(Gitlab::AppLogger).to receive(:error).with("Secure File Parser Failure (#{corrupt_file.id}): #{message}")
+ corrupt_file.update_metadata!
+ end
+ end
end
diff --git a/spec/models/ci/unit_test_spec.rb b/spec/models/ci/unit_test_spec.rb
index 556cf93c266..b3180492a36 100644
--- a/spec/models/ci/unit_test_spec.rb
+++ b/spec/models/ci/unit_test_spec.rb
@@ -43,18 +43,19 @@ RSpec.describe Ci::UnitTest do
result = described_class.find_or_create_by_batch(project, attrs)
- expect(result).to match_array([
- have_attributes(
- key_hash: existing_test.key_hash,
- suite_name: 'rspec',
- name: 'Math#sum adds numbers'
- ),
- have_attributes(
- key_hash: new_key,
- suite_name: 'jest',
- name: 'Component works'
- )
- ])
+ expect(result).to match_array(
+ [
+ have_attributes(
+ key_hash: existing_test.key_hash,
+ suite_name: 'rspec',
+ name: 'Math#sum adds numbers'
+ ),
+ have_attributes(
+ key_hash: new_key,
+ suite_name: 'jest',
+ name: 'Component works'
+ )
+ ])
expect(result).to all(be_persisted)
end
@@ -77,13 +78,14 @@ RSpec.describe Ci::UnitTest do
result = described_class.find_or_create_by_batch(project, attrs)
- expect(result).to match_array([
- have_attributes(
- key_hash: new_key,
- suite_name: 'abc...',
- name: 'abc...'
- )
- ])
+ expect(result).to match_array(
+ [
+ have_attributes(
+ key_hash: new_key,
+ suite_name: 'abc...',
+ name: 'abc...'
+ )
+ ])
expect(result).to all(be_persisted)
end
diff --git a/spec/models/ci/variable_spec.rb b/spec/models/ci/variable_spec.rb
index f0af229ff2c..5f2b5971508 100644
--- a/spec/models/ci/variable_spec.rb
+++ b/spec/models/ci/variable_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe Ci::Variable do
context 'loose foreign key on ci_variables.project_id' do
it_behaves_like 'cleanup by a loose foreign key' do
- let!(:parent) { create(:project) }
+ let!(:parent) { create(:project, namespace: create(:group)) }
let!(:model) { create(:ci_variable, project: parent) }
end
end
diff --git a/spec/models/clusters/agents/implicit_authorization_spec.rb b/spec/models/clusters/agents/implicit_authorization_spec.rb
index 2d6c3ddb426..1f4c5b1ac9e 100644
--- a/spec/models/clusters/agents/implicit_authorization_spec.rb
+++ b/spec/models/clusters/agents/implicit_authorization_spec.rb
@@ -10,5 +10,5 @@ RSpec.describe Clusters::Agents::ImplicitAuthorization do
it { expect(subject.agent).to eq(agent) }
it { expect(subject.agent_id).to eq(agent.id) }
it { expect(subject.config_project).to eq(agent.project) }
- it { expect(subject.config).to be_nil }
+ it { expect(subject.config).to eq({}) }
end
diff --git a/spec/models/clusters/applications/cert_manager_spec.rb b/spec/models/clusters/applications/cert_manager_spec.rb
index 3044260a000..05ab8c4108e 100644
--- a/spec/models/clusters/applications/cert_manager_spec.rb
+++ b/spec/models/clusters/applications/cert_manager_spec.rb
@@ -49,13 +49,15 @@ RSpec.describe Clusters::Applications::CertManager do
expect(subject.version).to eq('v0.10.1')
expect(subject).to be_rbac
expect(subject.files).to eq(cert_manager.files.merge(cluster_issuer_file))
- expect(subject.preinstall).to eq([
- 'kubectl apply -f https://raw.githubusercontent.com/jetstack/cert-manager/release-0.10/deploy/manifests/00-crds.yaml',
- 'kubectl label --overwrite namespace gitlab-managed-apps certmanager.k8s.io/disable-validation=true'
- ])
- expect(subject.postinstall).to eq([
- "for i in $(seq 1 90); do kubectl apply -f /data/helm/certmanager/config/cluster_issuer.yaml && s=0 && break || s=$?; sleep 1s; echo \"Retrying ($i)...\"; done; (exit $s)"
- ])
+ expect(subject.preinstall).to eq(
+ [
+ 'kubectl apply -f https://raw.githubusercontent.com/jetstack/cert-manager/release-0.10/deploy/manifests/00-crds.yaml',
+ 'kubectl label --overwrite namespace gitlab-managed-apps certmanager.k8s.io/disable-validation=true'
+ ])
+ expect(subject.postinstall).to eq(
+ [
+ "for i in $(seq 1 90); do kubectl apply -f /data/helm/certmanager/config/cluster_issuer.yaml && s=0 && break || s=$?; sleep 1s; echo \"Retrying ($i)...\"; done; (exit $s)"
+ ])
end
context 'for a specific user' do
@@ -99,15 +101,16 @@ RSpec.describe Clusters::Applications::CertManager do
end
it 'specifies a post delete command to remove custom resource definitions' do
- expect(subject.postdelete).to eq([
- 'kubectl delete secret -n gitlab-managed-apps letsencrypt-prod --ignore-not-found',
- 'kubectl delete crd certificates.certmanager.k8s.io --ignore-not-found',
- 'kubectl delete crd certificaterequests.certmanager.k8s.io --ignore-not-found',
- 'kubectl delete crd challenges.certmanager.k8s.io --ignore-not-found',
- 'kubectl delete crd clusterissuers.certmanager.k8s.io --ignore-not-found',
- 'kubectl delete crd issuers.certmanager.k8s.io --ignore-not-found',
- 'kubectl delete crd orders.certmanager.k8s.io --ignore-not-found'
- ])
+ expect(subject.postdelete).to eq(
+ [
+ 'kubectl delete secret -n gitlab-managed-apps letsencrypt-prod --ignore-not-found',
+ 'kubectl delete crd certificates.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd certificaterequests.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd challenges.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd clusterissuers.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd issuers.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd orders.certmanager.k8s.io --ignore-not-found'
+ ])
end
context 'secret key name is not found' do
@@ -119,14 +122,15 @@ RSpec.describe Clusters::Applications::CertManager do
end
it 'does not try and delete the secret' do
- expect(subject.postdelete).to eq([
- 'kubectl delete crd certificates.certmanager.k8s.io --ignore-not-found',
- 'kubectl delete crd certificaterequests.certmanager.k8s.io --ignore-not-found',
- 'kubectl delete crd challenges.certmanager.k8s.io --ignore-not-found',
- 'kubectl delete crd clusterissuers.certmanager.k8s.io --ignore-not-found',
- 'kubectl delete crd issuers.certmanager.k8s.io --ignore-not-found',
- 'kubectl delete crd orders.certmanager.k8s.io --ignore-not-found'
- ])
+ expect(subject.postdelete).to eq(
+ [
+ 'kubectl delete crd certificates.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd certificaterequests.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd challenges.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd clusterissuers.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd issuers.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd orders.certmanager.k8s.io --ignore-not-found'
+ ])
end
end
end
diff --git a/spec/models/clusters/platforms/kubernetes_spec.rb b/spec/models/clusters/platforms/kubernetes_spec.rb
index 7b9ff409edd..4ac2fd022ba 100644
--- a/spec/models/clusters/platforms/kubernetes_spec.rb
+++ b/spec/models/clusters/platforms/kubernetes_spec.rb
@@ -601,19 +601,27 @@ RSpec.describe Clusters::Platforms::Kubernetes do
it 'creates a matching RolloutStatus' do
expect(rollout_status).to be_kind_of(::Gitlab::Kubernetes::RolloutStatus)
- expect(rollout_status.deployments.map(&:annotations)).to eq([
- { 'app.gitlab.com/app' => project.full_path_slug, 'app.gitlab.com/env' => 'env-000000' }
- ])
- expect(rollout_status.instances).to eq([{ pod_name: "kube-pod",
- stable: true,
- status: "pending",
- tooltip: "kube-pod (Pending)",
- track: "stable" },
- { pod_name: "Not provided",
- stable: true,
- status: "pending",
- tooltip: "Not provided (Pending)",
- track: "stable" }])
+ expect(rollout_status.deployments.map(&:annotations)).to eq(
+ [
+ { 'app.gitlab.com/app' => project.full_path_slug, 'app.gitlab.com/env' => 'env-000000' }
+ ])
+ expect(rollout_status.instances).to eq(
+ [
+ {
+ pod_name: "kube-pod",
+ stable: true,
+ status: "pending",
+ tooltip: "kube-pod (Pending)",
+ track: "stable"
+ },
+ {
+ pod_name: "Not provided",
+ stable: true,
+ status: "pending",
+ tooltip: "Not provided (Pending)",
+ track: "stable"
+ }
+ ])
end
context 'with canary ingress' do
@@ -720,11 +728,12 @@ RSpec.describe Clusters::Platforms::Kubernetes do
end
it 'returns a pending pod for each missing replica' do
- expect(rollout_status.instances.map { |p| p.slice(:pod_name, :status) }).to eq([
- { pod_name: 'pod-a-1', status: 'running' },
- { pod_name: 'Not provided', status: 'pending' },
- { pod_name: 'Not provided', status: 'pending' }
- ])
+ expect(rollout_status.instances.map { |p| p.slice(:pod_name, :status) }).to eq(
+ [
+ { pod_name: 'pod-a-1', status: 'running' },
+ { pod_name: 'Not provided', status: 'pending' },
+ { pod_name: 'Not provided', status: 'pending' }
+ ])
end
end
@@ -743,12 +752,13 @@ RSpec.describe Clusters::Platforms::Kubernetes do
end
it 'returns the correct track for the pending pods' do
- expect(rollout_status.instances.map { |p| p.slice(:pod_name, :status, :track) }).to eq([
- { pod_name: 'pod-a-1', status: 'running', track: 'canary' },
- { pod_name: 'Not provided', status: 'pending', track: 'canary' },
- { pod_name: 'Not provided', status: 'pending', track: 'stable' },
- { pod_name: 'Not provided', status: 'pending', track: 'stable' }
- ])
+ expect(rollout_status.instances.map { |p| p.slice(:pod_name, :status, :track) }).to eq(
+ [
+ { pod_name: 'pod-a-1', status: 'running', track: 'canary' },
+ { pod_name: 'Not provided', status: 'pending', track: 'canary' },
+ { pod_name: 'Not provided', status: 'pending', track: 'stable' },
+ { pod_name: 'Not provided', status: 'pending', track: 'stable' }
+ ])
end
end
@@ -765,10 +775,11 @@ RSpec.describe Clusters::Platforms::Kubernetes do
end
it 'returns the correct number of pending pods' do
- expect(rollout_status.instances.map { |p| p.slice(:pod_name, :status, :track) }).to eq([
- { pod_name: 'Not provided', status: 'pending', track: 'mytrack' },
- { pod_name: 'Not provided', status: 'pending', track: 'mytrack' }
- ])
+ expect(rollout_status.instances.map { |p| p.slice(:pod_name, :status, :track) }).to eq(
+ [
+ { pod_name: 'Not provided', status: 'pending', track: 'mytrack' },
+ { pod_name: 'Not provided', status: 'pending', track: 'mytrack' }
+ ])
end
end
diff --git a/spec/models/commit_collection_spec.rb b/spec/models/commit_collection_spec.rb
index de9b72c1da2..93c696cae54 100644
--- a/spec/models/commit_collection_spec.rb
+++ b/spec/models/commit_collection_spec.rb
@@ -42,10 +42,7 @@ RSpec.describe CommitCollection do
merge_commit = project.commit("60ecb67744cb56576c30214ff52294f8ce2def98")
expect(merge_commit).to receive(:merge_commit?).and_return(true)
- collection = described_class.new(project, [
- commit,
- merge_commit
- ])
+ collection = described_class.new(project, [commit, merge_commit])
expect(collection.without_merge_commits).to contain_exactly(commit)
end
diff --git a/spec/models/compare_spec.rb b/spec/models/compare_spec.rb
index 0035fb8468a..dc8429fe77e 100644
--- a/spec/models/compare_spec.rb
+++ b/spec/models/compare_spec.rb
@@ -127,13 +127,14 @@ RSpec.describe Compare do
end
it 'returns affected file paths, without duplication' do
- expect(subject.modified_paths).to contain_exactly(*%w{
- foo/for_move.txt
- foo/bar/for_move.txt
- foo/for_create.txt
- foo/for_delete.txt
- foo/for_edit.txt
- })
+ expect(subject.modified_paths).to contain_exactly(
+ *%w{
+ foo/for_move.txt
+ foo/bar/for_move.txt
+ foo/for_create.txt
+ foo/for_delete.txt
+ foo/for_edit.txt
+ })
end
end
diff --git a/spec/models/concerns/approvable_spec.rb b/spec/models/concerns/approvable_spec.rb
index 1ddd9b3edca..25a4f51cd82 100644
--- a/spec/models/concerns/approvable_spec.rb
+++ b/spec/models/concerns/approvable_spec.rb
@@ -32,8 +32,8 @@ RSpec.describe Approvable do
end
end
- describe '#can_be_approved_by?' do
- subject { merge_request.can_be_approved_by?(user) }
+ describe '#eligible_for_approval_by?' do
+ subject { merge_request.eligible_for_approval_by?(user) }
before do
merge_request.project.add_developer(user) if user
@@ -60,8 +60,8 @@ RSpec.describe Approvable do
end
end
- describe '#can_be_unapproved_by?' do
- subject { merge_request.can_be_unapproved_by?(user) }
+ describe '#eligible_for_unapproval_by?' do
+ subject { merge_request.eligible_for_unapproval_by?(user) }
before do
merge_request.project.add_developer(user) if user
diff --git a/spec/models/concerns/atomic_internal_id_spec.rb b/spec/models/concerns/atomic_internal_id_spec.rb
index b803e699b25..5fe3141eb17 100644
--- a/spec/models/concerns/atomic_internal_id_spec.rb
+++ b/spec/models/concerns/atomic_internal_id_spec.rb
@@ -3,10 +3,11 @@
require 'spec_helper'
RSpec.describe AtomicInternalId do
- let(:milestone) { build(:milestone) }
+ let_it_be(:project) { create(:project) }
+ let(:milestone) { build(:milestone, project: project) }
let(:iid) { double('iid', to_i: 42) }
let(:external_iid) { 100 }
- let(:scope_attrs) { { project: milestone.project } }
+ let(:scope_attrs) { { project: project } }
let(:usage) { :milestones }
describe '#save!' do
@@ -248,4 +249,12 @@ RSpec.describe AtomicInternalId do
end.to change { InternalId.find_by(project: milestone.project, usage: :milestones)&.last_value.to_i }.by(4)
end
end
+
+ describe '.track_project_iid!' do
+ it 'tracks the present value' do
+ expect do
+ ::Issue.track_project_iid!(milestone.project, external_iid)
+ end.to change { InternalId.find_by(project: milestone.project, usage: :issues)&.last_value.to_i }.to(external_iid)
+ end
+ end
end
diff --git a/spec/models/concerns/cascading_namespace_setting_attribute_spec.rb b/spec/models/concerns/cascading_namespace_setting_attribute_spec.rb
deleted file mode 100644
index 6be6e3f048f..00000000000
--- a/spec/models/concerns/cascading_namespace_setting_attribute_spec.rb
+++ /dev/null
@@ -1,347 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe NamespaceSetting, 'CascadingNamespaceSettingAttribute' do
- let(:group) { create(:group) }
- let(:subgroup) { create(:group, parent: group) }
-
- def group_settings
- group.namespace_settings
- end
-
- def subgroup_settings
- subgroup.namespace_settings
- end
-
- describe '#delayed_project_removal' do
- subject(:delayed_project_removal) { subgroup_settings.delayed_project_removal }
-
- context 'when there is no parent' do
- context 'and the value is not nil' do
- before do
- group_settings.update!(delayed_project_removal: true)
- end
-
- it 'returns the local value' do
- expect(group_settings.delayed_project_removal).to eq(true)
- end
- end
-
- context 'and the value is nil' do
- before do
- group_settings.update!(delayed_project_removal: nil)
- stub_application_setting(delayed_project_removal: false)
- end
-
- it 'returns the application settings value' do
- expect(group_settings.delayed_project_removal).to eq(false)
- end
- end
- end
-
- context 'when parent does not lock the attribute' do
- context 'and value is not nil' do
- before do
- group_settings.update!(delayed_project_removal: false)
- end
-
- it 'returns local setting when present' do
- subgroup_settings.update!(delayed_project_removal: true)
-
- expect(delayed_project_removal).to eq(true)
- end
-
- it 'returns the parent value when local value is nil' do
- subgroup_settings.update!(delayed_project_removal: nil)
-
- expect(delayed_project_removal).to eq(false)
- end
-
- it 'returns the correct dirty value' do
- subgroup_settings.delayed_project_removal = true
-
- expect(delayed_project_removal).to eq(true)
- end
-
- it 'does not return the application setting value when parent value is false' do
- stub_application_setting(delayed_project_removal: true)
-
- expect(delayed_project_removal).to eq(false)
- end
- end
-
- context 'and the value is nil' do
- before do
- group_settings.update!(delayed_project_removal: nil, lock_delayed_project_removal: false)
- subgroup_settings.update!(delayed_project_removal: nil)
-
- subgroup_settings.clear_memoization(:delayed_project_removal)
- end
-
- it 'cascades to the application settings value' do
- expect(delayed_project_removal).to eq(false)
- end
- end
-
- context 'when multiple ancestors set a value' do
- let(:third_level_subgroup) { create(:group, parent: subgroup) }
-
- before do
- group_settings.update!(delayed_project_removal: true)
- subgroup_settings.update!(delayed_project_removal: false)
- end
-
- it 'returns the closest ancestor value' do
- expect(third_level_subgroup.namespace_settings.delayed_project_removal).to eq(false)
- end
- end
- end
-
- context 'when parent locks the attribute' do
- before do
- subgroup_settings.update!(delayed_project_removal: true)
- group_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: false)
-
- subgroup_settings.clear_memoization(:delayed_project_removal)
- subgroup_settings.clear_memoization(:delayed_project_removal_locked_ancestor)
- end
-
- it 'returns the parent value' do
- expect(delayed_project_removal).to eq(false)
- end
-
- it 'does not allow the local value to be saved' do
- subgroup_settings.delayed_project_removal = nil
-
- expect { subgroup_settings.save! }
- .to raise_error(ActiveRecord::RecordInvalid, /Delayed project removal cannot be changed because it is locked by an ancestor/)
- end
- end
-
- context 'when the application settings locks the attribute' do
- before do
- subgroup_settings.update!(delayed_project_removal: true)
- stub_application_setting(lock_delayed_project_removal: true, delayed_project_removal: true)
- end
-
- it 'returns the application setting value' do
- expect(delayed_project_removal).to eq(true)
- end
-
- it 'does not allow the local value to be saved' do
- subgroup_settings.delayed_project_removal = false
-
- expect { subgroup_settings.save! }
- .to raise_error(ActiveRecord::RecordInvalid, /Delayed project removal cannot be changed because it is locked by an ancestor/)
- end
- end
-
- context 'when parent locked the attribute then the application settings locks it' do
- before do
- subgroup_settings.update!(delayed_project_removal: true)
- group_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: false)
- stub_application_setting(lock_delayed_project_removal: true, delayed_project_removal: true)
-
- subgroup_settings.clear_memoization(:delayed_project_removal)
- subgroup_settings.clear_memoization(:delayed_project_removal_locked_ancestor)
- end
-
- it 'returns the application setting value' do
- expect(delayed_project_removal).to eq(true)
- end
- end
- end
-
- describe '#delayed_project_removal?' do
- before do
- subgroup_settings.update!(delayed_project_removal: true)
- group_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: false)
-
- subgroup_settings.clear_memoization(:delayed_project_removal)
- subgroup_settings.clear_memoization(:delayed_project_removal_locked_ancestor)
- end
-
- it 'aliases the method when the attribute is a boolean' do
- expect(subgroup_settings.delayed_project_removal?).to eq(subgroup_settings.delayed_project_removal)
- end
- end
-
- describe '#delayed_project_removal=' do
- before do
- subgroup_settings.update!(delayed_project_removal: nil)
- group_settings.update!(delayed_project_removal: true)
- end
-
- it 'does not save the value locally when it matches the cascaded value' do
- subgroup_settings.update!(delayed_project_removal: true)
-
- expect(subgroup_settings.read_attribute(:delayed_project_removal)).to eq(nil)
- end
- end
-
- describe '#delayed_project_removal_locked?' do
- shared_examples 'not locked' do
- it 'is not locked by an ancestor' do
- expect(subgroup_settings.delayed_project_removal_locked_by_ancestor?).to eq(false)
- end
-
- it 'is not locked by application setting' do
- expect(subgroup_settings.delayed_project_removal_locked_by_application_setting?).to eq(false)
- end
-
- it 'does not return a locked namespace' do
- expect(subgroup_settings.delayed_project_removal_locked_ancestor).to be_nil
- end
- end
-
- context 'when attribute is locked by self' do
- before do
- subgroup_settings.update!(lock_delayed_project_removal: true)
- end
-
- it 'is not locked by default' do
- expect(subgroup_settings.delayed_project_removal_locked?).to eq(false)
- end
-
- it 'is locked when including self' do
- expect(subgroup_settings.delayed_project_removal_locked?(include_self: true)).to eq(true)
- end
- end
-
- context 'when parent does not lock the attribute' do
- it_behaves_like 'not locked'
- end
-
- context 'when parent locks the attribute' do
- before do
- group_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: false)
-
- subgroup_settings.clear_memoization(:delayed_project_removal)
- subgroup_settings.clear_memoization(:delayed_project_removal_locked_ancestor)
- end
-
- it 'is locked by an ancestor' do
- expect(subgroup_settings.delayed_project_removal_locked_by_ancestor?).to eq(true)
- end
-
- it 'is not locked by application setting' do
- expect(subgroup_settings.delayed_project_removal_locked_by_application_setting?).to eq(false)
- end
-
- it 'returns a locked namespace settings object' do
- expect(subgroup_settings.delayed_project_removal_locked_ancestor.namespace_id).to eq(group_settings.namespace_id)
- end
- end
-
- context 'when not locked by application settings' do
- before do
- stub_application_setting(lock_delayed_project_removal: false)
- end
-
- it_behaves_like 'not locked'
- end
-
- context 'when locked by application settings' do
- before do
- stub_application_setting(lock_delayed_project_removal: true)
- end
-
- it 'is not locked by an ancestor' do
- expect(subgroup_settings.delayed_project_removal_locked_by_ancestor?).to eq(false)
- end
-
- it 'is locked by application setting' do
- expect(subgroup_settings.delayed_project_removal_locked_by_application_setting?).to eq(true)
- end
-
- it 'does not return a locked namespace' do
- expect(subgroup_settings.delayed_project_removal_locked_ancestor).to be_nil
- end
- end
- end
-
- describe '#lock_delayed_project_removal=' do
- context 'when parent locks the attribute' do
- before do
- group_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: false)
-
- subgroup_settings.clear_memoization(:delayed_project_removal)
- subgroup_settings.clear_memoization(:delayed_project_removal_locked_ancestor)
- end
-
- it 'does not allow the attribute to be saved' do
- subgroup_settings.lock_delayed_project_removal = true
-
- expect { subgroup_settings.save! }
- .to raise_error(ActiveRecord::RecordInvalid, /Lock delayed project removal cannot be changed because it is locked by an ancestor/)
- end
- end
-
- context 'when parent does not lock the attribute' do
- before do
- group_settings.update!(lock_delayed_project_removal: false)
-
- subgroup_settings.lock_delayed_project_removal = true
- end
-
- it 'allows the lock to be set when the attribute is not nil' do
- subgroup_settings.delayed_project_removal = true
-
- expect(subgroup_settings.save).to eq(true)
- end
-
- it 'does not allow the lock to be saved when the attribute is nil' do
- subgroup_settings.delayed_project_removal = nil
-
- expect { subgroup_settings.save! }
- .to raise_error(ActiveRecord::RecordInvalid, /Delayed project removal cannot be nil when locking the attribute/)
- end
-
- it 'copies the cascaded value when locking the attribute if the local value is nil', :aggregate_failures do
- subgroup_settings.delayed_project_removal = nil
- subgroup_settings.lock_delayed_project_removal = true
-
- expect(subgroup_settings.read_attribute(:delayed_project_removal)).to eq(false)
- end
- end
-
- context 'when application settings locks the attribute' do
- before do
- stub_application_setting(lock_delayed_project_removal: true)
- end
-
- it 'does not allow the attribute to be saved' do
- subgroup_settings.lock_delayed_project_removal = true
-
- expect { subgroup_settings.save! }
- .to raise_error(ActiveRecord::RecordInvalid, /Lock delayed project removal cannot be changed because it is locked by an ancestor/)
- end
- end
-
- context 'when application_settings does not lock the attribute' do
- before do
- stub_application_setting(lock_delayed_project_removal: false)
- end
-
- it 'allows the attribute to be saved' do
- subgroup_settings.delayed_project_removal = true
- subgroup_settings.lock_delayed_project_removal = true
-
- expect(subgroup_settings.save).to eq(true)
- end
- end
- end
-
- describe 'after update callback' do
- before do
- subgroup_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: false)
- end
-
- it 'clears descendant locks' do
- group_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: true)
-
- expect(subgroup_settings.reload.lock_delayed_project_removal).to eq(false)
- end
- end
-end
diff --git a/spec/models/concerns/ci/partitionable_spec.rb b/spec/models/concerns/ci/partitionable_spec.rb
new file mode 100644
index 00000000000..d53501ccc3d
--- /dev/null
+++ b/spec/models/concerns/ci/partitionable_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Partitionable do
+ describe 'partitionable models inclusion' do
+ let(:ci_model) { Class.new(Ci::ApplicationRecord) }
+
+ subject { ci_model.include(described_class) }
+
+ it 'raises an exception' do
+ expect { subject }
+ .to raise_error(/must be included in PARTITIONABLE_MODELS/)
+ end
+
+ context 'when is included in the models list' do
+ before do
+ stub_const("#{described_class}::Testing::PARTITIONABLE_MODELS", [ci_model.name])
+ end
+
+ it 'does not raise exceptions' do
+ expect { subject }.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/models/concerns/counter_attribute_spec.rb b/spec/models/concerns/counter_attribute_spec.rb
index 2dd70188740..66ccd4559e5 100644
--- a/spec/models/concerns/counter_attribute_spec.rb
+++ b/spec/models/concerns/counter_attribute_spec.rb
@@ -73,8 +73,8 @@ RSpec.describe CounterAttribute, :counter_attribute, :clean_gitlab_redis_shared_
subject
Gitlab::Redis::SharedState.with do |redis|
- expect(redis.exists(increment_key)).to be_falsey
- expect(redis.exists(flushed_key)).to eq(flushed_key_present)
+ expect(redis.exists?(increment_key)).to eq(false)
+ expect(redis.exists?(flushed_key)).to eq(flushed_key_present)
end
end
end
diff --git a/spec/models/concerns/id_in_ordered_spec.rb b/spec/models/concerns/id_in_ordered_spec.rb
index a3b434caac6..15da079f2bc 100644
--- a/spec/models/concerns/id_in_ordered_spec.rb
+++ b/spec/models/concerns/id_in_ordered_spec.rb
@@ -12,9 +12,10 @@ RSpec.describe IdInOrdered do
issue4 = create(:issue)
issue5 = create(:issue)
- expect(Issue.id_in_ordered([issue3.id, issue1.id, issue4.id, issue5.id, issue2.id])).to eq([
- issue3, issue1, issue4, issue5, issue2
- ])
+ expect(Issue.id_in_ordered([issue3.id, issue1.id, issue4.id, issue5.id, issue2.id])).to eq(
+ [
+ issue3, issue1, issue4, issue5, issue2
+ ])
end
context 'when the ids are not an array of integers' do
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index 6763cc904b4..8842a36f40a 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -75,6 +75,24 @@ RSpec.describe Issuable do
it_behaves_like 'truncates the description to its allowed maximum length on import'
end
+
+ describe '#validate_assignee_length' do
+ let(:assignee_1) { create(:user) }
+ let(:assignee_2) { create(:user) }
+ let(:assignee_3) { create(:user) }
+
+ subject { create(:merge_request) }
+
+ before do
+ stub_const("Issuable::MAX_NUMBER_OF_ASSIGNEES_OR_REVIEWERS", 2)
+ end
+
+ it 'will not exceed the assignee limit' do
+ expect do
+ subject.update!(assignees: [assignee_1, assignee_2, assignee_3])
+ end.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
end
describe "Scope" do
diff --git a/spec/models/concerns/mentionable_spec.rb b/spec/models/concerns/mentionable_spec.rb
index 9daea3438cb..7bbbd10ec8d 100644
--- a/spec/models/concerns/mentionable_spec.rb
+++ b/spec/models/concerns/mentionable_spec.rb
@@ -225,7 +225,7 @@ RSpec.describe Commit, 'Mentionable' do
end
context 'with external issue tracker' do
- let(:project) { create(:jira_project, :repository) }
+ let(:project) { create(:project, :with_jira_integration, :repository) }
it 'is true if external issues referenced' do
allow(commit.raw).to receive(:message).and_return 'JIRA-123'
diff --git a/spec/models/concerns/noteable_spec.rb b/spec/models/concerns/noteable_spec.rb
index 81ae30b7116..82aca13c929 100644
--- a/spec/models/concerns/noteable_spec.rb
+++ b/spec/models/concerns/noteable_spec.rb
@@ -47,18 +47,19 @@ RSpec.describe Noteable do
let(:discussions) { subject.discussions }
it 'includes discussions for diff notes, commit diff notes, commit notes, and regular notes' do
- expect(discussions).to eq([
- DiffDiscussion.new([active_diff_note1, active_diff_note2], subject),
- DiffDiscussion.new([active_diff_note3], subject),
- DiffDiscussion.new([outdated_diff_note1, outdated_diff_note2], subject),
- Discussion.new([discussion_note1, discussion_note2], subject),
- DiffDiscussion.new([commit_diff_note1, commit_diff_note2], subject),
- OutOfContextDiscussion.new([commit_note1, commit_note2], subject),
- Discussion.new([commit_discussion_note1, commit_discussion_note2], subject),
- Discussion.new([commit_discussion_note3], subject),
- IndividualNoteDiscussion.new([note1], subject),
- IndividualNoteDiscussion.new([note2], subject)
- ])
+ expect(discussions).to eq(
+ [
+ DiffDiscussion.new([active_diff_note1, active_diff_note2], subject),
+ DiffDiscussion.new([active_diff_note3], subject),
+ DiffDiscussion.new([outdated_diff_note1, outdated_diff_note2], subject),
+ Discussion.new([discussion_note1, discussion_note2], subject),
+ DiffDiscussion.new([commit_diff_note1, commit_diff_note2], subject),
+ OutOfContextDiscussion.new([commit_note1, commit_note2], subject),
+ Discussion.new([commit_discussion_note1, commit_discussion_note2], subject),
+ Discussion.new([commit_discussion_note3], subject),
+ IndividualNoteDiscussion.new([note1], subject),
+ IndividualNoteDiscussion.new([note2], subject)
+ ])
end
end
@@ -88,23 +89,24 @@ RSpec.describe Noteable do
{ table_name: n.table_name, discussion_id: n.discussion_id, id: n.id }
end
- expect(discussions).to match([
- a_hash_including(table_name: 'notes', discussion_id: active_diff_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: active_diff_note3.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: outdated_diff_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: discussion_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: commit_diff_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: commit_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: commit_note2.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: commit_discussion_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: commit_discussion_note3.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: note2.discussion_id),
- a_hash_including(table_name: 'resource_label_events', id: label_event.id),
- a_hash_including(table_name: 'notes', discussion_id: system_note.discussion_id),
- a_hash_including(table_name: 'resource_milestone_events', id: milestone_event.id),
- a_hash_including(table_name: 'resource_state_events', id: state_event.id)
- ])
+ expect(discussions).to match(
+ [
+ a_hash_including(table_name: 'notes', discussion_id: active_diff_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: active_diff_note3.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: outdated_diff_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: discussion_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: commit_diff_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: commit_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: commit_note2.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: commit_discussion_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: commit_discussion_note3.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: note2.discussion_id),
+ a_hash_including(table_name: 'resource_label_events', id: label_event.id),
+ a_hash_including(table_name: 'notes', discussion_id: system_note.discussion_id),
+ a_hash_including(table_name: 'resource_milestone_events', id: milestone_event.id),
+ a_hash_including(table_name: 'resource_state_events', id: state_event.id)
+ ])
end
it 'filters by comments only' do
@@ -112,19 +114,20 @@ RSpec.describe Noteable do
{ table_name: n.table_name, discussion_id: n.discussion_id, id: n.id }
end
- expect(discussions).to match([
- a_hash_including(table_name: 'notes', discussion_id: active_diff_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: active_diff_note3.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: outdated_diff_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: discussion_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: commit_diff_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: commit_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: commit_note2.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: commit_discussion_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: commit_discussion_note3.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: note2.discussion_id)
- ])
+ expect(discussions).to match(
+ [
+ a_hash_including(table_name: 'notes', discussion_id: active_diff_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: active_diff_note3.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: outdated_diff_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: discussion_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: commit_diff_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: commit_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: commit_note2.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: commit_discussion_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: commit_discussion_note3.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: note2.discussion_id)
+ ])
end
it 'filters by system notes only' do
@@ -132,12 +135,13 @@ RSpec.describe Noteable do
{ table_name: n.table_name, discussion_id: n.discussion_id, id: n.id }
end
- expect(discussions).to match([
- a_hash_including(table_name: 'resource_label_events', id: label_event.id),
- a_hash_including(table_name: 'notes', discussion_id: system_note.discussion_id),
- a_hash_including(table_name: 'resource_milestone_events', id: milestone_event.id),
- a_hash_including(table_name: 'resource_state_events', id: state_event.id)
- ])
+ expect(discussions).to match(
+ [
+ a_hash_including(table_name: 'resource_label_events', id: label_event.id),
+ a_hash_including(table_name: 'notes', discussion_id: system_note.discussion_id),
+ a_hash_including(table_name: 'resource_milestone_events', id: milestone_event.id),
+ a_hash_including(table_name: 'resource_state_events', id: state_event.id)
+ ])
end
end
diff --git a/spec/models/concerns/participable_spec.rb b/spec/models/concerns/participable_spec.rb
index f7f68cb38d8..58a44fec3aa 100644
--- a/spec/models/concerns/participable_spec.rb
+++ b/spec/models/concerns/participable_spec.rb
@@ -186,6 +186,9 @@ RSpec.describe Participable do
expect(instance.visible_participants(user1)).to match_array [user1, user2]
end
end
+
+ it_behaves_like 'visible participants for issuable with read ability', :issue
+ it_behaves_like 'visible participants for issuable with read ability', :merge_request
end
describe '#participant?' do
diff --git a/spec/models/concerns/prometheus_adapter_spec.rb b/spec/models/concerns/prometheus_adapter_spec.rb
index 4158e8a0a4c..d3a44ac8403 100644
--- a/spec/models/concerns/prometheus_adapter_spec.rb
+++ b/spec/models/concerns/prometheus_adapter_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe PrometheusAdapter, :use_clean_rails_memory_store_caching do
include PrometheusHelpers
include ReactiveCachingHelpers
- let(:project) { create(:prometheus_project) }
+ let(:project) { create(:project, :with_prometheus_integration) }
let(:integration) { project.prometheus_integration }
let(:described_class) do
diff --git a/spec/models/concerns/routable_spec.rb b/spec/models/concerns/routable_spec.rb
index cf66ba83e87..dc1002f3560 100644
--- a/spec/models/concerns/routable_spec.rb
+++ b/spec/models/concerns/routable_spec.rb
@@ -23,6 +23,12 @@ RSpec.shared_examples 'routable resource' do
end.not_to exceed_all_query_limit(control_count)
end
+ context 'when path is a negative number' do
+ it 'returns nil' do
+ expect(described_class.find_by_full_path(-1)).to be_nil
+ end
+ end
+
context 'with redirect routes' do
let_it_be(:redirect_route) { create(:redirect_route, source: record) }
diff --git a/spec/models/concerns/token_authenticatable_spec.rb b/spec/models/concerns/token_authenticatable_spec.rb
index 3f6bbe795cc..e8db83b7144 100644
--- a/spec/models/concerns/token_authenticatable_spec.rb
+++ b/spec/models/concerns/token_authenticatable_spec.rb
@@ -314,52 +314,22 @@ RSpec.describe Ci::Runner, 'TokenAuthenticatable', :freeze_time do
describe '#token_expired?' do
subject { runner.token_expired? }
- context 'when enforce_runner_token_expires_at feature flag is disabled' do
- before do
- stub_feature_flags(enforce_runner_token_expires_at: false)
- end
-
- context 'when runner has no token expiration' do
- let(:runner) { non_expirable_runner }
-
- it { is_expected.to eq(false) }
- end
-
- context 'when runner token is not expired' do
- let(:runner) { non_expired_runner }
+ context 'when runner has no token expiration' do
+ let(:runner) { non_expirable_runner }
- it { is_expected.to eq(false) }
- end
-
- context 'when runner token is expired' do
- let(:runner) { expired_runner }
-
- it { is_expected.to eq(false) }
- end
+ it { is_expected.to eq(false) }
end
- context 'when enforce_runner_token_expires_at feature flag is enabled' do
- before do
- stub_feature_flags(enforce_runner_token_expires_at: true)
- end
-
- context 'when runner has no token expiration' do
- let(:runner) { non_expirable_runner }
-
- it { is_expected.to eq(false) }
- end
+ context 'when runner token is not expired' do
+ let(:runner) { non_expired_runner }
- context 'when runner token is not expired' do
- let(:runner) { non_expired_runner }
-
- it { is_expected.to eq(false) }
- end
+ it { is_expected.to eq(false) }
+ end
- context 'when runner token is expired' do
- let(:runner) { expired_runner }
+ context 'when runner token is expired' do
+ let(:runner) { expired_runner }
- it { is_expected.to eq(true) }
- end
+ it { is_expected.to eq(true) }
end
end
@@ -386,52 +356,22 @@ RSpec.describe Ci::Runner, 'TokenAuthenticatable', :freeze_time do
describe '.find_by_token' do
subject { Ci::Runner.find_by_token(runner.token) }
- context 'when enforce_runner_token_expires_at feature flag is disabled' do
- before do
- stub_feature_flags(enforce_runner_token_expires_at: false)
- end
-
- context 'when runner has no token expiration' do
- let(:runner) { non_expirable_runner }
-
- it { is_expected.to eq(non_expirable_runner) }
- end
-
- context 'when runner token is not expired' do
- let(:runner) { non_expired_runner }
-
- it { is_expected.to eq(non_expired_runner) }
- end
-
- context 'when runner token is expired' do
- let(:runner) { expired_runner }
+ context 'when runner has no token expiration' do
+ let(:runner) { non_expirable_runner }
- it { is_expected.to eq(expired_runner) }
- end
+ it { is_expected.to eq(non_expirable_runner) }
end
- context 'when enforce_runner_token_expires_at feature flag is enabled' do
- before do
- stub_feature_flags(enforce_runner_token_expires_at: true)
- end
-
- context 'when runner has no token expiration' do
- let(:runner) { non_expirable_runner }
-
- it { is_expected.to eq(non_expirable_runner) }
- end
-
- context 'when runner token is not expired' do
- let(:runner) { non_expired_runner }
+ context 'when runner token is not expired' do
+ let(:runner) { non_expired_runner }
- it { is_expected.to eq(non_expired_runner) }
- end
+ it { is_expected.to eq(non_expired_runner) }
+ end
- context 'when runner token is expired' do
- let(:runner) { expired_runner }
+ context 'when runner token is expired' do
+ let(:runner) { expired_runner }
- it { is_expected.to be_nil }
- end
+ it { is_expected.to be_nil }
end
end
end
diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb
index a4329993e91..0033e9bbd08 100644
--- a/spec/models/container_repository_spec.rb
+++ b/spec/models/container_repository_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe ContainerRepository, :aggregate_failures do
api_url: 'http://registry.gitlab',
host_port: 'registry.gitlab')
- stub_request(:get, 'http://registry.gitlab/v2/group/test/my_image/tags/list')
+ stub_request(:get, "http://registry.gitlab/v2/group/test/my_image/tags/list?n=#{::ContainerRegistry::Client::DEFAULT_TAGS_PAGE_SIZE}")
.with(headers: { 'Accept' => ContainerRegistry::Client::ACCEPTED_TYPES.join(', ') })
.to_return(
status: 200,
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index 87fa5289795..b91d836f82f 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Deployment do
let(:deployment) { create(:deployment) }
it 'delegates to environment_manual_actions' do
- expect(deployment.deployable).to receive(:environment_manual_actions).and_call_original
+ expect(deployment.deployable).to receive(:other_manual_actions).and_call_original
deployment.manual_actions
end
@@ -38,7 +38,7 @@ RSpec.describe Deployment do
let(:deployment) { create(:deployment) }
it 'delegates to environment_scheduled_actions' do
- expect(deployment.deployable).to receive(:environment_scheduled_actions).and_call_original
+ expect(deployment.deployable).to receive(:other_scheduled_actions).and_call_original
deployment.scheduled_actions
end
@@ -171,11 +171,22 @@ RSpec.describe Deployment do
end
it 'executes Deployments::DropOlderDeploymentsWorker asynchronously' do
+ stub_feature_flags(prevent_outdated_deployment_jobs: false)
+
expect(Deployments::DropOlderDeploymentsWorker)
.to receive(:perform_async).once.with(deployment.id)
deployment.run!
end
+
+ it 'does not execute Deployments::DropOlderDeploymentsWorker when FF enabled' do
+ stub_feature_flags(prevent_outdated_deployment_jobs: true)
+
+ expect(Deployments::DropOlderDeploymentsWorker)
+ .not_to receive(:perform_async).with(deployment.id)
+
+ deployment.run!
+ end
end
context 'when deployment succeeded' do
diff --git a/spec/models/diff_note_spec.rb b/spec/models/diff_note_spec.rb
index d379ffeee02..a526f91ddc1 100644
--- a/spec/models/diff_note_spec.rb
+++ b/spec/models/diff_note_spec.rb
@@ -541,11 +541,12 @@ RSpec.describe DiffNote do
describe '#shas' do
it 'returns list of SHAs based on original_position' do
- expect(subject.shas).to match_array([
- position.base_sha,
- position.start_sha,
- position.head_sha
- ])
+ expect(subject.shas).to match_array(
+ [
+ position.base_sha,
+ position.start_sha,
+ position.head_sha
+ ])
end
context 'when position changes' do
@@ -554,14 +555,15 @@ RSpec.describe DiffNote do
end
it 'includes the new position SHAs' do
- expect(subject.shas).to match_array([
- position.base_sha,
- position.start_sha,
- position.head_sha,
- new_position.base_sha,
- new_position.start_sha,
- new_position.head_sha
- ])
+ expect(subject.shas).to match_array(
+ [
+ position.base_sha,
+ position.start_sha,
+ position.head_sha,
+ new_position.base_sha,
+ new_position.start_sha,
+ new_position.head_sha
+ ])
end
end
end
diff --git a/spec/models/diff_viewer/server_side_spec.rb b/spec/models/diff_viewer/server_side_spec.rb
index 28660b0d4b9..db0814af422 100644
--- a/spec/models/diff_viewer/server_side_spec.rb
+++ b/spec/models/diff_viewer/server_side_spec.rb
@@ -17,10 +17,30 @@ RSpec.describe DiffViewer::ServerSide do
subject { viewer_class.new(diff_file) }
describe '#prepare!' do
- it 'loads all diff file data' do
- expect(Blob).to receive(:lazy).at_least(:twice)
+ before do
+ stub_feature_flags(disable_load_entire_blob_for_diff_viewer: feature_flag_enabled)
+ end
+
+ context 'when the disable_load_entire_blob_for_diff_viewer flag is disabled' do
+ let(:feature_flag_enabled) { false }
- subject.prepare!
+ it 'loads all diff file data' do
+ subject
+ expect(diff_file).to receive_message_chain(:old_blob, :load_all_data!)
+ expect(diff_file).to receive_message_chain(:new_blob, :load_all_data!)
+ subject.prepare!
+ end
+ end
+
+ context 'when the disable_load_entire_blob_for_diff_viewer flag is enabled' do
+ let(:feature_flag_enabled) { true }
+
+ it 'does not load file data' do
+ subject
+ expect(diff_file).not_to receive(:old_blob)
+ expect(diff_file).not_to receive(:new_blob)
+ subject.prepare!
+ end
end
end
diff --git a/spec/models/discussion_spec.rb b/spec/models/discussion_spec.rb
index 212619a1c3d..7bd3c5743a6 100644
--- a/spec/models/discussion_spec.rb
+++ b/spec/models/discussion_spec.rb
@@ -37,10 +37,11 @@ RSpec.describe Discussion do
describe '.build_collection' do
it 'returns an array of discussions of the right type' do
discussions = described_class.build_collection([first_note, second_note, third_note], merge_request)
- expect(discussions).to eq([
- DiffDiscussion.new([first_note, second_note], merge_request),
- DiffDiscussion.new([third_note], merge_request)
- ])
+ expect(discussions).to eq(
+ [
+ DiffDiscussion.new([first_note, second_note], merge_request),
+ DiffDiscussion.new([third_note], merge_request)
+ ])
end
end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 1e15b09a069..a442856d993 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -390,7 +390,10 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
'staging' | described_class.tiers[:staging]
'pre-prod' | described_class.tiers[:staging]
'blue-kit-stage' | described_class.tiers[:staging]
- 'pre-prod' | described_class.tiers[:staging]
+ 'nonprod' | described_class.tiers[:staging]
+ 'nonlive' | described_class.tiers[:staging]
+ 'non-prod' | described_class.tiers[:staging]
+ 'non-live' | described_class.tiers[:staging]
'gprd' | described_class.tiers[:production]
'gprd-cny' | described_class.tiers[:production]
'production' | described_class.tiers[:production]
@@ -1291,7 +1294,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
context 'when the environment is available' do
context 'with a deployment service' do
- let(:project) { create(:prometheus_project, :repository) }
+ let(:project) { create(:project, :with_prometheus_integration, :repository) }
context 'and a deployment' do
let!(:deployment) { create(:deployment, environment: environment) }
@@ -1364,7 +1367,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
end
context 'when the environment is unavailable' do
- let(:project) { create(:prometheus_project) }
+ let(:project) { create(:project, :with_prometheus_integration) }
before do
environment.stop
@@ -1391,7 +1394,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
end
describe '#metrics' do
- let(:project) { create(:prometheus_project) }
+ let(:project) { create(:project, :with_prometheus_integration) }
subject { environment.metrics }
@@ -1427,7 +1430,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
end
describe '#additional_metrics' do
- let(:project) { create(:prometheus_project) }
+ let(:project) { create(:project, :with_prometheus_integration) }
let(:metric_params) { [] }
subject { environment.additional_metrics(*metric_params) }
@@ -1617,44 +1620,30 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
nil | nil
'never' | nil
end
- with_them do
- it 'sets correct auto_stop_in' do
- freeze_time do
- if expected_result.is_a?(Integer) || expected_result.nil?
- subject
- expect(environment.auto_stop_in).to eq(expected_result)
- else
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
- an_instance_of(expected_result),
- project_id: environment.project_id,
- environment_id: environment.id
- )
+ with_them do
+ shared_examples 'for given values expected result is set' do
+ it do
+ freeze_time do
+ if expected_result.is_a?(Integer) || expected_result.nil?
+ subject
- expect { subject }.to raise_error(expected_result)
+ expect(environment.auto_stop_in).to eq(expected_result)
+ else
+ expect { subject }.to raise_error(expected_result)
+ end
end
end
end
- end
- context 'resets earlier value' do
- let(:environment) { create(:environment, auto_stop_at: 1.day.since.round) }
-
- where(:value, :expected_result) do
- '2 days' | 2.days.to_i
- '1 week' | 1.week.to_i
- '2h20min' | 2.hours.to_i + 20.minutes.to_i
- '' | nil
- 'never' | nil
+ context 'new assignment sets correct auto_stop_in' do
+ include_examples 'for given values expected result is set'
end
- with_them do
- it 'assigns new value' do
- freeze_time do
- subject
- expect(environment.auto_stop_in).to eq(expected_result)
- end
- end
+ context 'resets older value' do
+ let(:environment) { create(:environment, auto_stop_at: 1.day.since.round) }
+
+ include_examples 'for given values expected result is set'
end
end
end
diff --git a/spec/models/factories_spec.rb b/spec/models/factories_spec.rb
index 2993b2aee58..c931c96bafd 100644
--- a/spec/models/factories_spec.rb
+++ b/spec/models/factories_spec.rb
@@ -25,6 +25,7 @@ RSpec.describe 'factories' do
[:issue_customer_relations_contact, :for_contact],
[:issue_customer_relations_contact, :for_issue],
[:package_file, :object_storage],
+ [:rpm_repository_file, :object_storage],
[:pages_domain, :without_certificate],
[:pages_domain, :without_key],
[:pages_domain, :with_missing_chain],
@@ -79,7 +80,6 @@ RSpec.describe 'factories' do
member_task
milestone_release
namespace
- project_broken_repo
project_namespace
project_repository
prometheus_alert
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 2ce75fb1290..68c2d1d3995 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -2293,10 +2293,11 @@ RSpec.describe Group do
it 'clears both self and descendant cache when the parent value is updated' do
expect(Rails.cache).to receive(:delete_multi)
.with(
- match_array([
- start_with("namespaces:{#{parent.traversal_ids.first}}:first_auto_devops_config:#{parent.id}"),
- start_with("namespaces:{#{parent.traversal_ids.first}}:first_auto_devops_config:#{group.id}")
- ])
+ match_array(
+ [
+ start_with("namespaces:{#{parent.traversal_ids.first}}:first_auto_devops_config:#{parent.id}"),
+ start_with("namespaces:{#{parent.traversal_ids.first}}:first_auto_devops_config:#{group.id}")
+ ])
)
parent.update!(auto_devops_enabled: true)
@@ -3312,16 +3313,6 @@ RSpec.describe Group do
expect(group.packages_policy_subject).to be_a(Packages::Policies::Group)
expect(group.packages_policy_subject.group).to eq(group)
end
-
- context 'with feature flag disabled' do
- before do
- stub_feature_flags(read_package_policy_rule: false)
- end
-
- it 'returns group' do
- expect(group.packages_policy_subject).to eq(group)
- end
- end
end
describe '#gitlab_deploy_token' do
diff --git a/spec/models/hooks/web_hook_spec.rb b/spec/models/hooks/web_hook_spec.rb
index 036d2effc0f..da8c10b67a6 100644
--- a/spec/models/hooks/web_hook_spec.rb
+++ b/spec/models/hooks/web_hook_spec.rb
@@ -139,6 +139,16 @@ RSpec.describe WebHook do
it { is_expected.to contain_exactly(:token, :url, :url_variables) }
end
+ describe '.web_hooks_disable_failed?' do
+ it 'returns true when feature is enabled for parent' do
+ second_hook = build(:project_hook, project: create(:project))
+ stub_feature_flags(web_hooks_disable_failed: [false, second_hook.project])
+
+ expect(described_class.web_hooks_disable_failed?(hook)).to eq(false)
+ expect(described_class.web_hooks_disable_failed?(second_hook)).to eq(true)
+ end
+ end
+
describe 'execute' do
let(:data) { { key: 'value' } }
let(:hook_name) { 'project hook' }
@@ -170,7 +180,7 @@ RSpec.describe WebHook do
end
it 'does not async execute non-executable hooks' do
- hook.update!(disabled_until: 1.day.from_now)
+ allow(hook).to receive(:executable?).and_return(false)
expect(WebHookService).not_to receive(:new)
@@ -238,17 +248,18 @@ RSpec.describe WebHook do
[
[0, :not_set, true],
[0, :past, true],
- [0, :future, false],
- [0, :now, false],
+ [0, :future, true],
+ [0, :now, true],
[1, :not_set, true],
[1, :past, true],
- [1, :future, false],
+ [1, :future, true],
[3, :not_set, true],
[3, :past, true],
- [3, :future, false],
+ [3, :future, true],
[4, :not_set, false],
- [4, :past, false],
- [4, :future, false]
+ [4, :past, true], # expired suspension
+ [4, :now, false], # active suspension
+ [4, :future, false] # active suspension
]
end
@@ -315,7 +326,7 @@ RSpec.describe WebHook do
end
it 'is twice the initial value' do
- expect(hook.next_backoff).to eq(20.minutes)
+ expect(hook.next_backoff).to eq(2 * described_class::INITIAL_BACKOFF)
end
end
@@ -325,7 +336,7 @@ RSpec.describe WebHook do
end
it 'grows exponentially' do
- expect(hook.next_backoff).to eq(80.minutes)
+ expect(hook.next_backoff).to eq(2 * 2 * 2 * described_class::INITIAL_BACKOFF)
end
end
@@ -357,6 +368,7 @@ RSpec.describe WebHook do
end
it 'makes a hook executable if it is currently backed off' do
+ hook.recent_failures = 1000
hook.disabled_until = 1.hour.from_now
expect { hook.enable! }.to change(hook, :executable?).from(false).to(true)
@@ -378,55 +390,71 @@ RSpec.describe WebHook do
end
describe 'backoff!' do
- it 'sets disabled_until to the next backoff' do
- expect { hook.backoff! }.to change(hook, :disabled_until).to(hook.next_backoff.from_now)
- end
+ context 'when we have not backed off before' do
+ it 'does not disable the hook' do
+ expect { hook.backoff! }.not_to change(hook, :executable?).from(true)
+ end
- it 'increments the backoff count' do
- expect { hook.backoff! }.to change(hook, :backoff_count).by(1)
+ it 'increments the recent_failures count' do
+ expect { hook.backoff! }.to change(hook, :recent_failures).by(1)
+ end
end
- context 'when the hook is permanently disabled' do
+ context 'when we have exhausted the grace period' do
before do
- allow(hook).to receive(:permanently_disabled?).and_return(true)
- end
-
- it 'does not set disabled_until' do
- expect { hook.backoff! }.not_to change(hook, :disabled_until)
+ hook.update!(recent_failures: described_class::FAILURE_THRESHOLD)
end
- it 'does not increment the backoff count' do
- expect { hook.backoff! }.not_to change(hook, :backoff_count)
+ it 'sets disabled_until to the next backoff' do
+ expect { hook.backoff! }.to change(hook, :disabled_until).to(hook.next_backoff.from_now)
end
- end
- context 'when we have backed off MAX_FAILURES times' do
- before do
- stub_const("#{described_class}::MAX_FAILURES", 5)
- 5.times { hook.backoff! }
+ it 'increments the backoff count' do
+ expect { hook.backoff! }.to change(hook, :backoff_count).by(1)
end
- it 'does not let the backoff count exceed the maximum failure count' do
- expect { hook.backoff! }.not_to change(hook, :backoff_count)
- end
+ context 'when the hook is permanently disabled' do
+ before do
+ allow(hook).to receive(:permanently_disabled?).and_return(true)
+ end
- it 'does not change disabled_until', :skip_freeze_time do
- travel_to(hook.disabled_until - 1.minute) do
+ it 'does not set disabled_until' do
expect { hook.backoff! }.not_to change(hook, :disabled_until)
end
+
+ it 'does not increment the backoff count' do
+ expect { hook.backoff! }.not_to change(hook, :backoff_count)
+ end
end
- it 'changes disabled_until when it has elapsed', :skip_freeze_time do
- travel_to(hook.disabled_until + 1.minute) do
- expect { hook.backoff! }.to change { hook.disabled_until }
- expect(hook.backoff_count).to eq(described_class::MAX_FAILURES)
+ context 'when we have backed off MAX_FAILURES times' do
+ before do
+ stub_const("#{described_class}::MAX_FAILURES", 5)
+ (described_class::FAILURE_THRESHOLD + 5).times { hook.backoff! }
+ end
+
+ it 'does not let the backoff count exceed the maximum failure count' do
+ expect { hook.backoff! }.not_to change(hook, :backoff_count)
+ end
+
+ it 'does not change disabled_until', :skip_freeze_time do
+ travel_to(hook.disabled_until - 1.minute) do
+ expect { hook.backoff! }.not_to change(hook, :disabled_until)
+ end
+ end
+
+ it 'changes disabled_until when it has elapsed', :skip_freeze_time do
+ travel_to(hook.disabled_until + 1.minute) do
+ expect { hook.backoff! }.to change { hook.disabled_until }
+ expect(hook.backoff_count).to eq(described_class::MAX_FAILURES)
+ end
end
end
- end
- include_examples 'is tolerant of invalid records' do
- def run_expectation
- expect { hook.backoff! }.to change(hook, :backoff_count).by(1)
+ include_examples 'is tolerant of invalid records' do
+ def run_expectation
+ expect { hook.backoff! }.to change(hook, :backoff_count).by(1)
+ end
end
end
end
@@ -468,8 +496,19 @@ RSpec.describe WebHook do
expect(hook).not_to be_temporarily_disabled
end
+ it 'allows FAILURE_THRESHOLD initial failures before we back-off' do
+ described_class::FAILURE_THRESHOLD.times do
+ hook.backoff!
+ expect(hook).not_to be_temporarily_disabled
+ end
+
+ hook.backoff!
+ expect(hook).to be_temporarily_disabled
+ end
+
context 'when hook has been told to back off' do
before do
+ hook.update!(recent_failures: described_class::FAILURE_THRESHOLD)
hook.backoff!
end
@@ -550,6 +589,7 @@ RSpec.describe WebHook do
context 'when hook has been backed off' do
before do
+ hook.update!(recent_failures: described_class::FAILURE_THRESHOLD + 1)
hook.disabled_until = 1.hour.from_now
end
diff --git a/spec/models/incident_management/timeline_event_spec.rb b/spec/models/incident_management/timeline_event_spec.rb
index fea391acda3..d288cc1a75d 100644
--- a/spec/models/incident_management/timeline_event_spec.rb
+++ b/spec/models/incident_management/timeline_event_spec.rb
@@ -13,6 +13,12 @@ RSpec.describe IncidentManagement::TimelineEvent do
it { is_expected.to belong_to(:incident) }
it { is_expected.to belong_to(:updated_by_user) }
it { is_expected.to belong_to(:promoted_from_note) }
+ it { is_expected.to have_many(:timeline_event_tag_links).class_name('IncidentManagement::TimelineEventTagLink') }
+
+ it do
+ is_expected.to have_many(:timeline_event_tags)
+ .class_name('IncidentManagement::TimelineEventTag').through(:timeline_event_tag_links)
+ end
end
describe 'validations' do
@@ -22,7 +28,6 @@ RSpec.describe IncidentManagement::TimelineEvent do
it { is_expected.to validate_presence_of(:incident) }
it { is_expected.to validate_presence_of(:note) }
it { is_expected.to validate_length_of(:note).is_at_most(10_000) }
- it { is_expected.to validate_presence_of(:note_html) }
it { is_expected.to validate_length_of(:note_html).is_at_most(10_000) }
it { is_expected.to validate_presence_of(:occurred_at) }
it { is_expected.to validate_presence_of(:action) }
diff --git a/spec/models/incident_management/timeline_event_tag_link_spec.rb b/spec/models/incident_management/timeline_event_tag_link_spec.rb
new file mode 100644
index 00000000000..fe31a6604c1
--- /dev/null
+++ b/spec/models/incident_management/timeline_event_tag_link_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IncidentManagement::TimelineEventTagLink do
+ describe 'associations' do
+ it { is_expected.to belong_to(:timeline_event) }
+ it { is_expected.to belong_to(:timeline_event_tag) }
+ end
+end
diff --git a/spec/models/incident_management/timeline_event_tag_spec.rb b/spec/models/incident_management/timeline_event_tag_spec.rb
new file mode 100644
index 00000000000..cff8ad8469f
--- /dev/null
+++ b/spec/models/incident_management/timeline_event_tag_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IncidentManagement::TimelineEventTag do
+ describe 'associations' do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to have_many(:timeline_event_tag_links).class_name('IncidentManagement::TimelineEventTagLink') }
+
+ it {
+ is_expected.to have_many(:timeline_events)
+ .class_name('IncidentManagement::TimelineEvent').through(:timeline_event_tag_links)
+ }
+ end
+
+ describe 'validations' do
+ subject { build(:incident_management_timeline_event_tag) }
+
+ it { is_expected.to validate_presence_of(:name) }
+ it { is_expected.to validate_length_of(:name).is_at_most(255) }
+ it { is_expected.to validate_uniqueness_of(:name).scoped_to([:project_id]) }
+
+ it { is_expected.to allow_value('Test tag 1').for(:name) }
+ it { is_expected.not_to allow_value('Test tag, 1').for(:name) }
+ it { is_expected.not_to allow_value('').for(:name) }
+ it { is_expected.not_to allow_value('s' * 256).for(:name) }
+ end
+end
diff --git a/spec/models/integration_spec.rb b/spec/models/integration_spec.rb
index 950f2c639fb..baa3443b4c5 100644
--- a/spec/models/integration_spec.rb
+++ b/spec/models/integration_spec.rb
@@ -971,11 +971,12 @@ RSpec.describe Integration do
describe '#secret_fields' do
it 'returns all fields with type `password`' do
- allow(subject).to receive(:fields).and_return([
- { name: 'password', type: 'password' },
- { name: 'secret', type: 'password' },
- { name: 'public', type: 'text' }
- ])
+ allow(subject).to receive(:fields).and_return(
+ [
+ { name: 'password', type: 'password' },
+ { name: 'secret', type: 'password' },
+ { name: 'public', type: 'text' }
+ ])
expect(subject.secret_fields).to match_array(%w[password secret])
end
diff --git a/spec/models/integrations/chat_message/issue_message_spec.rb b/spec/models/integrations/chat_message/issue_message_spec.rb
index 4a86322cdaf..ff9f30efdca 100644
--- a/spec/models/integrations/chat_message/issue_message_spec.rb
+++ b/spec/models/integrations/chat_message/issue_message_spec.rb
@@ -47,14 +47,15 @@ RSpec.describe Integrations::ChatMessage::IssueMessage do
it 'returns a message regarding opening of issues' do
expect(subject.pretext).to eq(
'[<http://somewhere.com|project_name>] Issue <http://url.com|#100 Issue title> opened by Test User (test.user)')
- expect(subject.attachments).to eq([
- {
- title: "#100 Issue title",
- title_link: "http://url.com",
- text: "issue description",
- color: color
- }
- ])
+ expect(subject.attachments).to eq(
+ [
+ {
+ title: "#100 Issue title",
+ title_link: "http://url.com",
+ text: "issue description",
+ color: color
+ }
+ ])
end
end
diff --git a/spec/models/integrations/chat_message/wiki_page_message_spec.rb b/spec/models/integrations/chat_message/wiki_page_message_spec.rb
index 16659311c52..dae8d293354 100644
--- a/spec/models/integrations/chat_message/wiki_page_message_spec.rb
+++ b/spec/models/integrations/chat_message/wiki_page_message_spec.rb
@@ -71,12 +71,13 @@ RSpec.describe Integrations::ChatMessage::WikiPageMessage do
end
it 'returns the commit message for a new wiki page' do
- expect(subject.attachments).to eq([
- {
- text: commit_message,
- color: color
- }
- ])
+ expect(subject.attachments).to eq(
+ [
+ {
+ text: commit_message,
+ color: color
+ }
+ ])
end
end
@@ -86,12 +87,13 @@ RSpec.describe Integrations::ChatMessage::WikiPageMessage do
end
it 'returns the commit message for an updated wiki page' do
- expect(subject.attachments).to eq([
- {
- text: commit_message,
- color: color
- }
- ])
+ expect(subject.attachments).to eq(
+ [
+ {
+ text: commit_message,
+ color: color
+ }
+ ])
end
end
end
diff --git a/spec/models/integrations/datadog_spec.rb b/spec/models/integrations/datadog_spec.rb
index b7da6a79e44..71a5bbc4db1 100644
--- a/spec/models/integrations/datadog_spec.rb
+++ b/spec/models/integrations/datadog_spec.rb
@@ -47,6 +47,10 @@ RSpec.describe Integrations::Datadog do
Gitlab::DataBuilder::ArchiveTrace.build(build)
end
+ it_behaves_like Integrations::ResetSecretFields do
+ let(:integration) { instance }
+ end
+
it_behaves_like Integrations::HasWebHook do
let(:integration) { instance }
let(:hook_url) { "#{described_class::URL_TEMPLATE % { datadog_domain: dd_site }}?dd-api-key={api_key}&env=#{dd_env}&service=#{dd_service}" }
diff --git a/spec/models/integrations/harbor_spec.rb b/spec/models/integrations/harbor_spec.rb
index 26b43fa3313..9ab37a92e89 100644
--- a/spec/models/integrations/harbor_spec.rb
+++ b/spec/models/integrations/harbor_spec.rb
@@ -7,7 +7,11 @@ RSpec.describe Integrations::Harbor do
let(:project_name) { 'testproject' }
let(:username) { 'harborusername' }
let(:password) { 'harborpassword' }
- let(:harbor_integration) { create(:harbor_integration) }
+ let(:harbor_integration) { build(:harbor_integration) }
+
+ it_behaves_like Integrations::ResetSecretFields do
+ let(:integration) { described_class.new }
+ end
describe "masked password" do
subject { build(:harbor_integration) }
@@ -66,6 +70,8 @@ RSpec.describe Integrations::Harbor do
end
context 'ci variables' do
+ let(:harbor_integration) { create(:harbor_integration) }
+
it 'returns vars when harbor_integration is activated' do
ci_vars = [
{ key: 'HARBOR_URL', value: url },
@@ -94,66 +100,4 @@ RSpec.describe Integrations::Harbor do
end
end
end
-
- describe 'before_validation :reset_username_and_password' do
- context 'when username/password was previously set' do
- it 'resets username and password if url changed' do
- harbor_integration.url = 'https://anotherharbor.com'
- harbor_integration.valid?
-
- expect(harbor_integration.password).to be_nil
- expect(harbor_integration.username).to be_nil
- end
-
- it 'does not reset password if username changed' do
- harbor_integration.username = 'newusername'
- harbor_integration.valid?
-
- expect(harbor_integration.password).to eq('harborpassword')
- end
-
- it 'does not reset username if password changed' do
- harbor_integration.password = 'newpassword'
- harbor_integration.valid?
-
- expect(harbor_integration.username).to eq('harborusername')
- end
-
- it "does not reset password if new url is set together with password, even if it's the same password" do
- harbor_integration.url = 'https://anotherharbor.com'
- harbor_integration.password = 'harborpassword'
- harbor_integration.valid?
-
- expect(harbor_integration.password).to eq('harborpassword')
- expect(harbor_integration.username).to be_nil
- expect(harbor_integration.url).to eq('https://anotherharbor.com')
- end
-
- it "does not reset username if new url is set together with username, even if it's the same username" do
- harbor_integration.url = 'https://anotherharbor.com'
- harbor_integration.username = 'harborusername'
- harbor_integration.valid?
-
- expect(harbor_integration.password).to be_nil
- expect(harbor_integration.username).to eq('harborusername')
- expect(harbor_integration.url).to eq('https://anotherharbor.com')
- end
- end
-
- it 'saves password if new url is set together with password when no password was previously set' do
- harbor_integration.password = nil
- harbor_integration.username = nil
-
- harbor_integration.url = 'https://anotherharbor.com'
- harbor_integration.password = 'newpassword'
- harbor_integration.username = 'newusername'
- harbor_integration.save!
-
- expect(harbor_integration).to have_attributes(
- url: 'https://anotherharbor.com',
- password: 'newpassword',
- username: 'newusername'
- )
- end
- end
end
diff --git a/spec/models/integrations/jira_spec.rb b/spec/models/integrations/jira_spec.rb
index a52a4514ebe..9f928442b28 100644
--- a/spec/models/integrations/jira_spec.rb
+++ b/spec/models/integrations/jira_spec.rb
@@ -81,9 +81,10 @@ RSpec.describe Integrations::Jira do
jira_integration.jira_issue_transition_id = 'foo bar'
expect(jira_integration).not_to be_valid
- expect(jira_integration.errors.full_messages).to eq([
- 'Jira issue transition IDs must be a list of numbers that can be split with , or ;'
- ])
+ expect(jira_integration.errors.full_messages).to eq(
+ [
+ 'Jira issue transition IDs must be a list of numbers that can be split with , or ;'
+ ])
end
end
end
@@ -213,6 +214,8 @@ RSpec.describe Integrations::Jira do
'EXT_EXT-1234' | 'EXT_EXT-1234'
'EXT3_EXT-1234' | 'EXT3_EXT-1234'
'3EXT_EXT-1234' | ''
+ 'CVE-2022-123' | ''
+ 'CVE-123' | 'CVE-123'
end
with_them do
diff --git a/spec/models/integrations/microsoft_teams_spec.rb b/spec/models/integrations/microsoft_teams_spec.rb
index b1b3e42b5e9..b6de2bb7176 100644
--- a/spec/models/integrations/microsoft_teams_spec.rb
+++ b/spec/models/integrations/microsoft_teams_spec.rb
@@ -81,10 +81,14 @@ RSpec.describe Integrations::MicrosoftTeams do
let(:opts) { { title: 'Awesome issue', description: 'please fix' } }
let(:issues_sample_data) do
service = Issues::CreateService.new(project: project, current_user: user, params: opts, spam_params: nil)
- issue = service.execute
+ issue = service.execute[:issue]
service.hook_data(issue, 'open')
end
+ before do
+ project.add_developer(user)
+ end
+
it "calls Microsoft Teams API" do
chat_integration.execute(issues_sample_data)
diff --git a/spec/models/integrations/prometheus_spec.rb b/spec/models/integrations/prometheus_spec.rb
index ae965ed78d1..3971511872b 100644
--- a/spec/models/integrations/prometheus_spec.rb
+++ b/spec/models/integrations/prometheus_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Integrations::Prometheus, :use_clean_rails_memory_store_caching,
include PrometheusHelpers
include ReactiveCachingHelpers
- let_it_be_with_reload(:project) { create(:prometheus_project) }
+ let_it_be_with_reload(:project) { create(:project, :with_prometheus_integration) }
let(:integration) { project.prometheus_integration }
@@ -318,7 +318,7 @@ RSpec.describe Integrations::Prometheus, :use_clean_rails_memory_store_caching,
context 'cluster belongs to projects group' do
let_it_be(:group) { create(:group) }
- let(:project) { create(:prometheus_project, group: group) }
+ let(:project) { create(:project, :with_prometheus_integration, group: group) }
let(:cluster) { create(:cluster_for_group, groups: [group]) }
it 'returns true' do
diff --git a/spec/models/jira_connect/public_key_spec.rb b/spec/models/jira_connect/public_key_spec.rb
new file mode 100644
index 00000000000..2e79a3ca4d2
--- /dev/null
+++ b/spec/models/jira_connect/public_key_spec.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe JiraConnect::PublicKey do
+ describe '.create!' do
+ let(:key) { 'key123' }
+
+ subject(:create_public_key) { described_class.create!(key: key) }
+
+ it 'only accepts valid public keys' do
+ expect { create_public_key }.to raise_error(ArgumentError, 'Invalid public key')
+ end
+
+ shared_examples 'creates a jira connect public key' do
+ it 'generates a Uuid' do
+ expect(create_public_key.uuid).to match(/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/)
+ end
+
+ it 'sets the key attribute' do
+ expect(create_public_key.key).to eq(expected_key)
+ end
+
+ it 'persists the values' do
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis).to receive(:set).with(anything, expected_key, anything)
+ end
+
+ create_public_key
+ end
+ end
+
+ context 'with OpenSSL::PKey::RSA object' do
+ let(:key) { OpenSSL::PKey::RSA.generate(3072).public_key }
+ let(:expected_key) { key.to_s }
+
+ it_behaves_like 'creates a jira connect public key'
+ end
+
+ context 'with string public key' do
+ let(:key) { OpenSSL::PKey::RSA.generate(3072).public_key.to_s }
+ let(:expected_key) { key }
+
+ it_behaves_like 'creates a jira connect public key'
+ end
+ end
+
+ describe '.find' do
+ let(:uuid) { '1234' }
+
+ subject(:find_public_key) { described_class.find(uuid) }
+
+ it 'raises an error' do
+ expect { find_public_key }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ context 'when the public key exists' do
+ let_it_be(:key) { OpenSSL::PKey::RSA.generate(3072).public_key }
+ let_it_be(:public_key) { described_class.create!(key: key) }
+
+ let(:uuid) { public_key.uuid }
+
+ it 'loads the public key', :aggregate_failures do
+ expect(find_public_key).to be_kind_of(described_class)
+ expect(find_public_key.uuid).to eq(public_key.uuid)
+ expect(find_public_key.key).to eq(key.to_s)
+ end
+ end
+ end
+
+ describe '#save!' do
+ let(:key) { OpenSSL::PKey::RSA.generate(3072).public_key }
+ let(:public_key) { described_class.new(key: key, uuid: '123') }
+ let(:jira_connect_installation) { build(:jira_connect_installation) }
+
+ subject(:save_public_key) { public_key.save! }
+
+ it 'persists the values' do
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis).to receive(:set).with(anything, key.to_s, ex: 5.minutes.to_i)
+ end
+
+ save_public_key
+ end
+
+ it 'returns itself' do
+ expect(save_public_key).to eq(public_key)
+ end
+ end
+end
diff --git a/spec/models/jira_connect_installation_spec.rb b/spec/models/jira_connect_installation_spec.rb
index 9c1f7c678a9..e57d3e78a4e 100644
--- a/spec/models/jira_connect_installation_spec.rb
+++ b/spec/models/jira_connect_installation_spec.rb
@@ -20,29 +20,53 @@ RSpec.describe JiraConnectInstallation do
it { is_expected.not_to allow_value('not/a/url').for(:instance_url) }
end
- describe '.for_project' do
- let(:other_group) { create(:group) }
- let(:parent_group) { create(:group) }
- let(:group) { create(:group, parent: parent_group) }
- let(:project) { create(:project, group: group) }
+ describe 'scopes' do
+ let_it_be(:jira_connect_subscription) { create(:jira_connect_subscription) }
- subject { described_class.for_project(project) }
+ describe '.for_project' do
+ let_it_be(:other_group) { create(:group) }
+ let_it_be(:parent_group) { create(:group) }
+ let_it_be(:group) { create(:group, parent: parent_group) }
+ let_it_be(:project) { create(:project, group: group) }
- it 'returns installations with subscriptions for project' do
- sub_on_project_namespace = create(:jira_connect_subscription, namespace: group)
- sub_on_ancestor_namespace = create(:jira_connect_subscription, namespace: parent_group)
+ subject { described_class.for_project(project) }
- # Subscription on other group that shouldn't be returned
- create(:jira_connect_subscription, namespace: other_group)
+ it 'returns installations with subscriptions for project' do
+ sub_on_project_namespace = create(:jira_connect_subscription, namespace: group)
+ sub_on_ancestor_namespace = create(:jira_connect_subscription, namespace: parent_group)
- expect(subject).to contain_exactly(sub_on_project_namespace.installation, sub_on_ancestor_namespace.installation)
+ # Subscription on other group that shouldn't be returned
+ create(:jira_connect_subscription, namespace: other_group)
+
+ expect(subject).to contain_exactly(
+ sub_on_project_namespace.installation, sub_on_ancestor_namespace.installation
+ )
+ end
+
+ it 'returns distinct installations' do
+ subscription = create(:jira_connect_subscription, namespace: group)
+ create(:jira_connect_subscription, namespace: parent_group, installation: subscription.installation)
+
+ expect(subject).to contain_exactly(subscription.installation)
+ end
end
- it 'returns distinct installations' do
- subscription = create(:jira_connect_subscription, namespace: group)
- create(:jira_connect_subscription, namespace: parent_group, installation: subscription.installation)
+ describe '.direct_installations' do
+ subject { described_class.direct_installations }
+
+ it { is_expected.to contain_exactly(jira_connect_subscription.installation) }
+ end
+
+ describe '.proxy_installations' do
+ subject { described_class.proxy_installations }
+
+ it { is_expected.to be_empty }
- expect(subject).to contain_exactly(subscription.installation)
+ context 'with an installation on a self-managed instance' do
+ let_it_be(:installation) { create(:jira_connect_installation, instance_url: 'http://self-managed-gitlab.com') }
+
+ it { is_expected.to contain_exactly(installation) }
+ end
end
end
@@ -71,4 +95,46 @@ RSpec.describe JiraConnectInstallation do
end
end
end
+
+ describe 'audience_url' do
+ let_it_be(:installation) { create(:jira_connect_installation) }
+
+ subject(:audience) { installation.audience_url }
+
+ it { is_expected.to eq(nil) }
+
+ context 'when proxy installation' do
+ let_it_be(:installation) { create(:jira_connect_installation, instance_url: 'https://example.com') }
+
+ it { is_expected.to eq('https://example.com/-/jira_connect') }
+ end
+ end
+
+ describe 'audience_installed_event_url' do
+ let_it_be(:installation) { create(:jira_connect_installation) }
+
+ subject(:audience) { installation.audience_installed_event_url }
+
+ it { is_expected.to eq(nil) }
+
+ context 'when proxy installation' do
+ let_it_be(:installation) { create(:jira_connect_installation, instance_url: 'https://example.com') }
+
+ it { is_expected.to eq('https://example.com/-/jira_connect/events/installed') }
+ end
+ end
+
+ describe 'proxy?' do
+ let_it_be(:installation) { create(:jira_connect_installation) }
+
+ subject { installation.proxy? }
+
+ it { is_expected.to eq(false) }
+
+ context 'when instance_url is present' do
+ let_it_be(:installation) { create(:jira_connect_installation, instance_url: 'https://example.com') }
+
+ it { is_expected.to eq(true) }
+ end
+ end
end
diff --git a/spec/models/label_note_spec.rb b/spec/models/label_note_spec.rb
index 145ddd44834..6658d42f25e 100644
--- a/spec/models/label_note_spec.rb
+++ b/spec/models/label_note_spec.rb
@@ -18,9 +18,10 @@ RSpec.describe LabelNote do
it_behaves_like 'label note created from events'
it 'includes a link to the list of issues filtered by the label' do
- note = described_class.from_events([
- create(:resource_label_event, label: label, issue: resource)
- ])
+ note = described_class.from_events(
+ [
+ create(:resource_label_event, label: label, issue: resource)
+ ])
expect(note.note_html).to include(project_issues_path(project, label_name: label.title))
end
@@ -32,9 +33,10 @@ RSpec.describe LabelNote do
it_behaves_like 'label note created from events'
it 'includes a link to the list of merge requests filtered by the label' do
- note = described_class.from_events([
- create(:resource_label_event, label: label, merge_request: resource)
- ])
+ note = described_class.from_events(
+ [
+ create(:resource_label_event, label: label, merge_request: resource)
+ ])
expect(note.note_html).to include(project_merge_requests_path(project, label_name: label.title))
end
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index 7b75a6ee1c2..04df8ecc882 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -178,8 +178,8 @@ RSpec.describe Member do
end
context 'member role is associated' do
- let_it_be(:member_role) do
- create(:member_role, members: [member])
+ let!(:member_role) do
+ create(:member_role, members: [member], base_access_level: Gitlab::Access::DEVELOPER)
end
context 'member role matches access level' do
@@ -201,7 +201,9 @@ RSpec.describe Member do
member.access_level = Gitlab::Access::MAINTAINER
expect(member).not_to be_valid
- expect(member.errors.full_messages).to include( "Access level cannot be changed since member is associated with a custom role")
+ expect(member.errors.full_messages).to include(
+ "Access level cannot be changed since member is associated with a custom role"
+ )
end
end
end
@@ -824,22 +826,6 @@ RSpec.describe Member do
expect(user.authorized_projects.reload).to include(project)
end
-
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(allow_non_blocking_member_refresh: false)
- end
-
- it 'successfully completes a blocking refresh', :delete, :sidekiq_inline do
- member.blocking_refresh = false
-
- expect(member).to receive(:refresh_member_authorized_projects).with(blocking: true).and_call_original
-
- member.accept_invite!(user)
-
- expect(user.authorized_projects.reload).to include(project)
- end
- end
end
it 'does not accept the invite if saving a new user fails' do
diff --git a/spec/models/members/member_role_spec.rb b/spec/models/members/member_role_spec.rb
index e8993491918..e2691e2e78c 100644
--- a/spec/models/members/member_role_spec.rb
+++ b/spec/models/members/member_role_spec.rb
@@ -11,7 +11,39 @@ RSpec.describe MemberRole do
describe 'validation' do
subject { described_class.new }
- it { is_expected.to validate_presence_of(:namespace_id) }
+ it { is_expected.to validate_presence_of(:namespace) }
it { is_expected.to validate_presence_of(:base_access_level) }
+
+ context 'for namespace' do
+ subject { build(:member_role) }
+
+ let_it_be(:root_group) { create(:group) }
+
+ context 'when namespace is a subgroup' do
+ it 'is invalid' do
+ subgroup = create(:group, parent: root_group)
+ subject.namespace = subgroup
+
+ expect(subject).to be_invalid
+ end
+ end
+
+ context 'when namespace is a root group' do
+ it 'is valid' do
+ subject.namespace = root_group
+
+ expect(subject).to be_valid
+ end
+ end
+
+ context 'when namespace is not present' do
+ it 'is invalid with a different error message' do
+ subject.namespace = nil
+
+ expect(subject).to be_invalid
+ expect(subject.errors.full_messages).to eq(["Namespace can't be blank"])
+ end
+ end
+ end
end
end
diff --git a/spec/models/merge_request/cleanup_schedule_spec.rb b/spec/models/merge_request/cleanup_schedule_spec.rb
index 9c50b64f2bd..1f1f33db5ed 100644
--- a/spec/models/merge_request/cleanup_schedule_spec.rb
+++ b/spec/models/merge_request/cleanup_schedule_spec.rb
@@ -111,12 +111,13 @@ RSpec.describe MergeRequest::CleanupSchedule do
let!(:cleanup_schedule_7) { create(:merge_request_cleanup_schedule, :failed, scheduled_at: 5.days.ago) }
it 'returns records that are scheduled before or on current time and unstarted (ordered by scheduled first)' do
- expect(described_class.scheduled_and_unstarted).to eq([
- cleanup_schedule_2,
- cleanup_schedule_1,
- cleanup_schedule_5,
- cleanup_schedule_4
- ])
+ expect(described_class.scheduled_and_unstarted).to eq(
+ [
+ cleanup_schedule_2,
+ cleanup_schedule_1,
+ cleanup_schedule_5,
+ cleanup_schedule_4
+ ])
end
end
diff --git a/spec/models/merge_request_diff_file_spec.rb b/spec/models/merge_request_diff_file_spec.rb
index 7dc550a6c93..f107a56c1b6 100644
--- a/spec/models/merge_request_diff_file_spec.rb
+++ b/spec/models/merge_request_diff_file_spec.rb
@@ -220,5 +220,25 @@ RSpec.describe MergeRequestDiffFile do
file.utf8_diff
end
end
+
+ context 'when exception is raised' do
+ it 'logs exception and returns an empty string' do
+ allow(file).to receive(:diff).and_raise(StandardError, 'Error!')
+
+ expect(Gitlab::AppLogger)
+ .to receive(:warn)
+ .with(
+ a_hash_including(
+ :message => 'Failed fetching merge request diff',
+ :merge_request_diff_file_id => file.id,
+ :merge_request_diff_id => file.merge_request_diff.id,
+ 'exception.class' => 'StandardError',
+ 'exception.message' => 'Error!'
+ )
+ )
+
+ expect(file.utf8_diff).to eq('')
+ end
+ end
end
end
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index 007e84164a8..e9e8bd9bfea 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -466,19 +466,20 @@ RSpec.describe MergeRequestDiff do
diff_with_commits.update!(sorted: false) # Mark as unsorted so it'll re-order
# There will be 11 returned, as we have to take into account for new and old paths
- expect(diff_with_commits.diffs_in_batch(0, 10, diff_options: diff_options).diff_paths).to eq([
- 'bar/branch-test.txt',
- 'custom-highlighting/test.gitlab-custom',
- 'encoding/iso8859.txt',
- 'files/images/wm.svg',
- 'files/js/commit.js.coffee',
- 'files/js/commit.coffee',
- 'files/lfs/lfs_object.iso',
- 'files/ruby/popen.rb',
- 'files/ruby/regex.rb',
- 'files/.DS_Store',
- 'files/whitespace'
- ])
+ expect(diff_with_commits.diffs_in_batch(0, 10, diff_options: diff_options).diff_paths).to eq(
+ [
+ 'bar/branch-test.txt',
+ 'custom-highlighting/test.gitlab-custom',
+ 'encoding/iso8859.txt',
+ 'files/images/wm.svg',
+ 'files/js/commit.js.coffee',
+ 'files/js/commit.coffee',
+ 'files/lfs/lfs_object.iso',
+ 'files/ruby/popen.rb',
+ 'files/ruby/regex.rb',
+ 'files/.DS_Store',
+ 'files/whitespace'
+ ])
end
context 'when diff_options include ignore_whitespace_change' do
@@ -555,29 +556,30 @@ RSpec.describe MergeRequestDiff do
it 'sorts diff files directory first' do
diff_with_commits.update!(sorted: false) # Mark as unsorted so it'll re-order
- expect(diff_with_commits.diffs(diff_options).diff_paths).to eq([
- 'bar/branch-test.txt',
- 'custom-highlighting/test.gitlab-custom',
- 'encoding/iso8859.txt',
- 'files/images/wm.svg',
- 'files/js/commit.js.coffee',
- 'files/js/commit.coffee',
- 'files/lfs/lfs_object.iso',
- 'files/ruby/popen.rb',
- 'files/ruby/regex.rb',
- 'files/.DS_Store',
- 'files/whitespace',
- 'foo/bar/.gitkeep',
- 'with space/README.md',
- '.DS_Store',
- '.gitattributes',
- '.gitignore',
- '.gitmodules',
- 'CHANGELOG',
- 'README',
- 'gitlab-grack',
- 'gitlab-shell'
- ])
+ expect(diff_with_commits.diffs(diff_options).diff_paths).to eq(
+ [
+ 'bar/branch-test.txt',
+ 'custom-highlighting/test.gitlab-custom',
+ 'encoding/iso8859.txt',
+ 'files/images/wm.svg',
+ 'files/js/commit.js.coffee',
+ 'files/js/commit.coffee',
+ 'files/lfs/lfs_object.iso',
+ 'files/ruby/popen.rb',
+ 'files/ruby/regex.rb',
+ 'files/.DS_Store',
+ 'files/whitespace',
+ 'foo/bar/.gitkeep',
+ 'with space/README.md',
+ '.DS_Store',
+ '.gitattributes',
+ '.gitignore',
+ '.gitmodules',
+ 'CHANGELOG',
+ 'README',
+ 'gitlab-grack',
+ 'gitlab-shell'
+ ])
end
end
end
@@ -661,28 +663,29 @@ RSpec.describe MergeRequestDiff do
mr_diff = create(:merge_request).merge_request_diff
diff_files_paths = mr_diff.merge_request_diff_files.map { |file| file.new_path.presence || file.old_path }
- expect(diff_files_paths).to eq([
- 'bar/branch-test.txt',
- 'custom-highlighting/test.gitlab-custom',
- 'encoding/iso8859.txt',
- 'files/images/wm.svg',
- 'files/js/commit.coffee',
- 'files/lfs/lfs_object.iso',
- 'files/ruby/popen.rb',
- 'files/ruby/regex.rb',
- 'files/.DS_Store',
- 'files/whitespace',
- 'foo/bar/.gitkeep',
- 'with space/README.md',
- '.DS_Store',
- '.gitattributes',
- '.gitignore',
- '.gitmodules',
- 'CHANGELOG',
- 'README',
- 'gitlab-grack',
- 'gitlab-shell'
- ])
+ expect(diff_files_paths).to eq(
+ [
+ 'bar/branch-test.txt',
+ 'custom-highlighting/test.gitlab-custom',
+ 'encoding/iso8859.txt',
+ 'files/images/wm.svg',
+ 'files/js/commit.coffee',
+ 'files/lfs/lfs_object.iso',
+ 'files/ruby/popen.rb',
+ 'files/ruby/regex.rb',
+ 'files/.DS_Store',
+ 'files/whitespace',
+ 'foo/bar/.gitkeep',
+ 'with space/README.md',
+ '.DS_Store',
+ '.gitattributes',
+ '.gitignore',
+ '.gitmodules',
+ 'CHANGELOG',
+ 'README',
+ 'gitlab-grack',
+ 'gitlab-shell'
+ ])
end
it 'expands collapsed diffs before saving' do
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index f27f3b749b1..32518b867cb 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -1837,9 +1837,8 @@ RSpec.describe MergeRequest, factory_default: :keep do
context 'persisted merge request' do
context 'with a limit' do
it 'returns a limited number of commit shas' do
- expect(subject.commit_shas(limit: 2)).to eq(%w[
- b83d6e391c22777fca1ed3012fce84f633d7fed0 498214de67004b1da3d820901307bed2a68a8ef6
- ])
+ expect(subject.commit_shas(limit: 2)).to eq(
+ %w[b83d6e391c22777fca1ed3012fce84f633d7fed0 498214de67004b1da3d820901307bed2a68a8ef6])
end
end
@@ -4200,6 +4199,45 @@ RSpec.describe MergeRequest, factory_default: :keep do
context 'state machine transitions' do
let(:project) { create(:project, :repository) }
+ shared_examples_for 'transition not triggering mergeRequestMergeStatusUpdated GraphQL subscription' do
+ specify do
+ expect(GraphqlTriggers).not_to receive(:merge_request_merge_status_updated)
+
+ transition!
+ end
+ end
+
+ shared_examples_for 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription' do
+ specify do
+ expect(GraphqlTriggers).to receive(:merge_request_merge_status_updated).with(subject).and_call_original
+
+ transition!
+ end
+
+ context 'when trigger_mr_subscription_on_merge_status_change is disabled' do
+ before do
+ stub_feature_flags(trigger_mr_subscription_on_merge_status_change: false)
+ end
+
+ it_behaves_like 'transition not triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+ end
+
+ shared_examples 'for an invalid state transition' do
+ specify 'is not a valid state transition' do
+ expect { transition! }.to raise_error(StateMachines::InvalidTransition)
+ end
+ end
+
+ shared_examples 'for a valid state transition' do
+ it 'is a valid state transition' do
+ expect { transition! }
+ .to change { subject.merge_status }
+ .from(merge_status.to_s)
+ .to(expected_merge_status)
+ end
+ end
+
describe '#unlock_mr' do
subject { create(:merge_request, state: 'locked', source_project: project, merge_jid: 123) }
@@ -4214,22 +4252,58 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
- describe '#mark_as_unchecked' do
+ describe '#mark_as_preparing' do
subject { create(:merge_request, source_project: project, merge_status: merge_status) }
- shared_examples 'for an invalid state transition' do
- it 'is not a valid state transition' do
- expect { subject.mark_as_unchecked! }.to raise_error(StateMachines::InvalidTransition)
- end
+ let(:expected_merge_status) { 'preparing' }
+
+ def transition!
+ subject.mark_as_preparing!
end
- shared_examples 'for an valid state transition' do
- it 'is a valid state transition' do
- expect { subject.mark_as_unchecked! }
- .to change { subject.merge_status }
- .from(merge_status.to_s)
- .to(expected_merge_status)
- end
+ context 'when the status is unchecked' do
+ let(:merge_status) { :unchecked }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition not triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+
+ context 'when the status is checking' do
+ let(:merge_status) { :checking }
+
+ include_examples 'for an invalid state transition'
+ end
+
+ context 'when the status is can_be_merged' do
+ let(:merge_status) { :can_be_merged }
+
+ include_examples 'for an invalid state transition'
+ end
+
+ context 'when the status is cannot_be_merged_recheck' do
+ let(:merge_status) { :cannot_be_merged_recheck }
+
+ include_examples 'for an invalid state transition'
+ end
+
+ context 'when the status is cannot_be_merged' do
+ let(:merge_status) { :cannot_be_merged }
+
+ include_examples 'for an invalid state transition'
+ end
+
+ context 'when the status is cannot_be_merged_rechecking' do
+ let(:merge_status) { :cannot_be_merged_rechecking }
+
+ include_examples 'for an invalid state transition'
+ end
+ end
+
+ describe '#mark_as_unchecked' do
+ subject { create(:merge_request, source_project: project, merge_status: merge_status) }
+
+ def transition!
+ subject.mark_as_unchecked!
end
context 'when the status is unchecked' do
@@ -4242,14 +4316,16 @@ RSpec.describe MergeRequest, factory_default: :keep do
let(:merge_status) { :checking }
let(:expected_merge_status) { 'unchecked' }
- include_examples 'for an valid state transition'
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
context 'when the status is can_be_merged' do
let(:merge_status) { :can_be_merged }
let(:expected_merge_status) { 'unchecked' }
- include_examples 'for an valid state transition'
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
context 'when the status is cannot_be_merged_recheck' do
@@ -4262,14 +4338,164 @@ RSpec.describe MergeRequest, factory_default: :keep do
let(:merge_status) { :cannot_be_merged }
let(:expected_merge_status) { 'cannot_be_merged_recheck' }
- include_examples 'for an valid state transition'
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+
+ context 'when the status is cannot_be_merged_rechecking' do
+ let(:merge_status) { :cannot_be_merged_rechecking }
+ let(:expected_merge_status) { 'cannot_be_merged_recheck' }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+ end
+
+ describe '#mark_as_checking' do
+ subject { create(:merge_request, source_project: project, merge_status: merge_status) }
+
+ def transition!
+ subject.mark_as_checking!
+ end
+
+ context 'when the status is unchecked' do
+ let(:merge_status) { :unchecked }
+ let(:expected_merge_status) { 'checking' }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+
+ context 'when the status is checking' do
+ let(:merge_status) { :checking }
+
+ include_examples 'for an invalid state transition'
+ end
+
+ context 'when the status is can_be_merged' do
+ let(:merge_status) { :can_be_merged }
+
+ include_examples 'for an invalid state transition'
+ end
+
+ context 'when the status is cannot_be_merged_recheck' do
+ let(:merge_status) { :cannot_be_merged_recheck }
+ let(:expected_merge_status) { 'cannot_be_merged_rechecking' }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
context 'when the status is cannot_be_merged' do
let(:merge_status) { :cannot_be_merged }
- let(:expected_merge_status) { 'cannot_be_merged_recheck' }
- include_examples 'for an valid state transition'
+ include_examples 'for an invalid state transition'
+ end
+
+ context 'when the status is cannot_be_merged_rechecking' do
+ let(:merge_status) { :cannot_be_merged_rechecking }
+
+ include_examples 'for an invalid state transition'
+ end
+ end
+
+ describe '#mark_as_mergeable' do
+ subject { create(:merge_request, source_project: project, merge_status: merge_status) }
+
+ let(:expected_merge_status) { 'can_be_merged' }
+
+ def transition!
+ subject.mark_as_mergeable!
+ end
+
+ context 'when the status is unchecked' do
+ let(:merge_status) { :unchecked }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+
+ context 'when the status is checking' do
+ let(:merge_status) { :checking }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+
+ context 'when the status is can_be_merged' do
+ let(:merge_status) { :can_be_merged }
+
+ include_examples 'for an invalid state transition'
+ end
+
+ context 'when the status is cannot_be_merged_recheck' do
+ let(:merge_status) { :cannot_be_merged_recheck }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+
+ context 'when the status is cannot_be_merged' do
+ let(:merge_status) { :cannot_be_merged }
+
+ include_examples 'for an invalid state transition'
+ end
+
+ context 'when the status is cannot_be_merged_rechecking' do
+ let(:merge_status) { :cannot_be_merged_rechecking }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+ end
+
+ describe '#mark_as_unmergeable' do
+ subject { create(:merge_request, source_project: project, merge_status: merge_status) }
+
+ let(:expected_merge_status) { 'cannot_be_merged' }
+
+ def transition!
+ subject.mark_as_unmergeable!
+ end
+
+ context 'when the status is unchecked' do
+ let(:merge_status) { :unchecked }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+
+ context 'when the status is checking' do
+ let(:merge_status) { :checking }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+
+ context 'when the status is can_be_merged' do
+ let(:merge_status) { :can_be_merged }
+
+ include_examples 'for an invalid state transition'
+ end
+
+ context 'when the status is cannot_be_merged_recheck' do
+ let(:merge_status) { :cannot_be_merged_recheck }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+
+ context 'when the status is cannot_be_merged' do
+ let(:merge_status) { :cannot_be_merged }
+
+ include_examples 'for an invalid state transition'
+ end
+
+ context 'when the status is cannot_be_merged_rechecking' do
+ let(:merge_status) { :cannot_be_merged_rechecking }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
end
@@ -4739,15 +4965,17 @@ RSpec.describe MergeRequest, factory_default: :keep do
context 'persisted merge request' do
context 'with a limit' do
it 'returns a limited number of commits' do
- expect(subject.commits(limit: 2).map(&:sha)).to eq(%w[
- b83d6e391c22777fca1ed3012fce84f633d7fed0
- 498214de67004b1da3d820901307bed2a68a8ef6
- ])
- expect(subject.commits(limit: 3).map(&:sha)).to eq(%w[
- b83d6e391c22777fca1ed3012fce84f633d7fed0
- 498214de67004b1da3d820901307bed2a68a8ef6
- 1b12f15a11fc6e62177bef08f47bc7b5ce50b141
- ])
+ expect(subject.commits(limit: 2).map(&:sha)).to eq(
+ %w[
+ b83d6e391c22777fca1ed3012fce84f633d7fed0
+ 498214de67004b1da3d820901307bed2a68a8ef6
+ ])
+ expect(subject.commits(limit: 3).map(&:sha)).to eq(
+ %w[
+ b83d6e391c22777fca1ed3012fce84f633d7fed0
+ 498214de67004b1da3d820901307bed2a68a8ef6
+ 1b12f15a11fc6e62177bef08f47bc7b5ce50b141
+ ])
end
end
@@ -4792,9 +5020,10 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
it 'returns the safe number of commits' do
- expect(subject.recent_commits.map(&:sha)).to eq(%w[
- b83d6e391c22777fca1ed3012fce84f633d7fed0 498214de67004b1da3d820901307bed2a68a8ef6
- ])
+ expect(subject.recent_commits.map(&:sha)).to eq(
+ %w[
+ b83d6e391c22777fca1ed3012fce84f633d7fed0 498214de67004b1da3d820901307bed2a68a8ef6
+ ])
end
end
@@ -5171,4 +5400,22 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
end
+
+ describe '#can_suggest_reviewers?' do
+ let_it_be(:merge_request) { build(:merge_request, :opened, project: project) }
+
+ subject(:can_suggest_reviewers) { merge_request.can_suggest_reviewers? }
+
+ it 'returns false' do
+ expect(can_suggest_reviewers).to be(false)
+ end
+ end
+
+ describe '#suggested_reviewer_users' do
+ let_it_be(:merge_request) { build(:merge_request, project: project) }
+
+ subject(:suggested_reviewer_users) { merge_request.suggested_reviewer_users }
+
+ it { is_expected.to be_empty }
+ end
end
diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb
index af1383b68bf..9f6b1f8016b 100644
--- a/spec/models/milestone_spec.rb
+++ b/spec/models/milestone_spec.rb
@@ -5,9 +5,35 @@ require 'spec_helper'
RSpec.describe Milestone do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public) }
+ let_it_be(:group) { create(:group) }
let_it_be(:issue) { create(:issue, project: project) }
+ describe 'modules' do
+ context 'with a project' do
+ it_behaves_like 'AtomicInternalId' do
+ let(:internal_id_attribute) { :iid }
+ let(:instance) { build(:milestone, project: create(:project), group: nil) }
+ let(:scope) { :project }
+ let(:scope_attrs) { { project: instance.project } }
+ let(:usage) { :milestones }
+ end
+ end
+
+ context 'with a group' do
+ it_behaves_like 'AtomicInternalId' do
+ let(:internal_id_attribute) { :iid }
+ let(:instance) { build(:milestone, project: nil, group: create(:group)) }
+ let(:scope) { :group }
+ let(:scope_attrs) { { namespace: instance.group } }
+ let(:usage) { :milestones }
+ end
+ end
+ end
+
it_behaves_like 'a timebox', :milestone do
+ let(:project) { create(:project, :public) }
+ let(:timebox) { create(:milestone, project: project) }
+
describe "#uniqueness_of_title" do
context "per project" do
it "does not accept the same title in a project twice" do
@@ -25,7 +51,7 @@ RSpec.describe Milestone do
end
context "per group" do
- let(:timebox) { create(:milestone, *timebox_args, group: group) }
+ let(:timebox) { create(:milestone, group: group) }
before do
project.update!(group: group)
@@ -96,9 +122,22 @@ RSpec.describe Milestone do
end
end
end
+
+ describe '#parent_type_check' do
+ let(:milestone) { build(:milestone, group: group) }
+
+ it 'is invalid if it has both project_id and group_id' do
+ milestone.project = project
+
+ expect(milestone).not_to be_valid
+ expect(milestone.errors[:project_id]).to include("milestone should belong either to a project or a group.")
+ end
+ end
end
describe "Associations" do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:group) }
it { is_expected.to have_many(:releases) }
it { is_expected.to have_many(:milestone_releases) }
end
@@ -562,6 +601,57 @@ RSpec.describe Milestone do
it { is_expected.not_to match("gitlab-org/gitlab-ce/milestones/123") }
end
+ describe '#merge_requests_enabled?' do
+ context "per project" do
+ it "is true for projects with MRs enabled" do
+ project = create(:project, :merge_requests_enabled)
+ milestone = build(:milestone, project: project)
+
+ expect(milestone.merge_requests_enabled?).to be_truthy
+ end
+
+ it "is false for projects with MRs disabled" do
+ project = create(:project, :repository_enabled, :merge_requests_disabled)
+ milestone = build(:milestone, project: project)
+
+ expect(milestone.merge_requests_enabled?).to be_falsey
+ end
+
+ it "is false for projects with repository disabled" do
+ project = create(:project, :repository_disabled)
+ milestone = build(:milestone, project: project)
+
+ expect(milestone.merge_requests_enabled?).to be_falsey
+ end
+ end
+
+ context "per group" do
+ let(:milestone) { build(:milestone, group: group) }
+
+ it "is always true for groups, for performance reasons" do
+ expect(milestone.merge_requests_enabled?).to be_truthy
+ end
+ end
+ end
+
+ describe '#resource_parent' do
+ context 'when group is present' do
+ let(:milestone) { build(:milestone, group: group) }
+
+ it 'returns the group' do
+ expect(milestone.resource_parent).to eq(group)
+ end
+ end
+
+ context 'when project is present' do
+ let(:milestone) { build(:milestone, project: project) }
+
+ it 'returns the project' do
+ expect(milestone.resource_parent).to eq(project)
+ end
+ end
+ end
+
describe '#parent' do
context 'with group' do
it 'returns the expected parent' do
@@ -598,4 +688,40 @@ RSpec.describe Milestone do
end
end
end
+
+ describe '#project_milestone?' do
+ context 'when project_id is present' do
+ let(:milestone) { build(:milestone, project: project) }
+
+ it 'returns true' do
+ expect(milestone.project_milestone?).to be_truthy
+ end
+ end
+
+ context 'when project_id is not present' do
+ let(:milestone) { build(:milestone, group: group) }
+
+ it 'returns false' do
+ expect(milestone.project_milestone?).to be_falsey
+ end
+ end
+ end
+
+ describe '#group_milestone?' do
+ context 'when group_id is present' do
+ let(:milestone) { build(:milestone, group: group) }
+
+ it 'returns true' do
+ expect(milestone.group_milestone?).to be_truthy
+ end
+ end
+
+ context 'when group_id is not present' do
+ let(:milestone) { build(:milestone, project: project) }
+
+ it 'returns false' do
+ expect(milestone.group_milestone?).to be_falsey
+ end
+ end
+ end
end
diff --git a/spec/models/ml/candidate_spec.rb b/spec/models/ml/candidate_spec.rb
index f58d30f81a0..3bf1e80a152 100644
--- a/spec/models/ml/candidate_spec.rb
+++ b/spec/models/ml/candidate_spec.rb
@@ -2,7 +2,9 @@
require 'spec_helper'
-RSpec.describe Ml::Candidate do
+RSpec.describe Ml::Candidate, factory_default: :keep do
+ let_it_be(:candidate) { create_default(:ml_candidates, :with_metrics_and_params) }
+
describe 'associations' do
it { is_expected.to belong_to(:experiment) }
it { is_expected.to belong_to(:user) }
@@ -12,13 +14,11 @@ RSpec.describe Ml::Candidate do
describe '#new' do
it 'iid is not null' do
- expect(create(:ml_candidates).iid).not_to be_nil
+ expect(candidate.iid).not_to be_nil
end
end
- describe 'by_project_id_and_iid' do
- let_it_be(:candidate) { create(:ml_candidates) }
-
+ describe '#by_project_id_and_iid' do
let(:project_id) { candidate.experiment.project_id }
let(:iid) { candidate.iid }
diff --git a/spec/models/ml/experiment_spec.rb b/spec/models/ml/experiment_spec.rb
index e300f82d290..789bb3aa88a 100644
--- a/spec/models/ml/experiment_spec.rb
+++ b/spec/models/ml/experiment_spec.rb
@@ -3,16 +3,19 @@
require 'spec_helper'
RSpec.describe Ml::Experiment do
+ let_it_be(:exp) { create(:ml_experiments) }
+ let_it_be(:exp2) { create(:ml_experiments, project: exp.project) }
+
+ let(:iid) { exp.iid }
+ let(:exp_name) { exp.name }
+
describe 'associations' do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:user) }
it { is_expected.to have_many(:candidates) }
end
- describe '#by_project_id_and_iid?' do
- let(:exp) { create(:ml_experiments) }
- let(:iid) { exp.iid }
-
+ describe '#by_project_id_and_iid' do
subject { described_class.by_project_id_and_iid(exp.project_id, iid) }
context 'if exists' do
@@ -26,10 +29,7 @@ RSpec.describe Ml::Experiment do
end
end
- describe '#by_project_id_and_name?' do
- let(:exp) { create(:ml_experiments) }
- let(:exp_name) { exp.name }
-
+ describe '#by_project_id_and_name' do
subject { described_class.by_project_id_and_name(exp.project_id, exp_name) }
context 'if exists' do
@@ -43,20 +43,17 @@ RSpec.describe Ml::Experiment do
end
end
- describe '#has_record?' do
- let(:exp) { create(:ml_experiments) }
- let(:exp_name) { exp.name }
+ describe '#by_project_id' do
+ let(:project_id) { exp.project_id }
- subject { described_class.has_record?(exp.project_id, exp_name) }
+ subject { described_class.by_project_id(project_id) }
- context 'if exists' do
- it { is_expected.to be_truthy }
- end
+ it { is_expected.to match_array([exp, exp2]) }
- context 'if does not exist' do
- let(:exp_name) { 'hello' }
+ context 'when project does not have experiment' do
+ let(:project_id) { non_existing_record_iid }
- it { is_expected.to be_falsey }
+ it { is_expected.to be_empty }
end
end
end
diff --git a/spec/models/namespace/aggregation_schedule_spec.rb b/spec/models/namespace/aggregation_schedule_spec.rb
index 38bf8089411..45b66fa12dd 100644
--- a/spec/models/namespace/aggregation_schedule_spec.rb
+++ b/spec/models/namespace/aggregation_schedule_spec.rb
@@ -5,8 +5,24 @@ require 'spec_helper'
RSpec.describe Namespace::AggregationSchedule, :clean_gitlab_redis_shared_state, type: :model do
include ExclusiveLeaseHelpers
+ let(:default_timeout) { described_class.default_lease_timeout }
+
it { is_expected.to belong_to :namespace }
+ describe "#default_lease_timeout" do
+ subject(:default_lease_timeout) { default_timeout }
+
+ it { is_expected.to eq 30.minutes.to_i }
+
+ context 'when remove_namespace_aggregator_delay FF is disabled' do
+ before do
+ stub_feature_flags(remove_namespace_aggregator_delay: false)
+ end
+
+ it { is_expected.to eq 1.hour.to_i }
+ end
+ end
+
describe '#schedule_root_storage_statistics' do
let(:namespace) { create(:namespace) }
let(:aggregation_schedule) { namespace.build_aggregation_schedule }
@@ -14,7 +30,7 @@ RSpec.describe Namespace::AggregationSchedule, :clean_gitlab_redis_shared_state,
context "when we can't obtain the lease" do
it 'does not schedule the workers' do
- stub_exclusive_lease_taken(lease_key, timeout: described_class::DEFAULT_LEASE_TIMEOUT)
+ stub_exclusive_lease_taken(lease_key, timeout: default_timeout)
expect(Namespaces::RootStatisticsWorker)
.not_to receive(:perform_async)
@@ -28,20 +44,20 @@ RSpec.describe Namespace::AggregationSchedule, :clean_gitlab_redis_shared_state,
context 'when we can obtain the lease' do
it 'schedules a root storage statistics after create' do
- stub_exclusive_lease(lease_key, timeout: described_class::DEFAULT_LEASE_TIMEOUT)
+ stub_exclusive_lease(lease_key, timeout: default_timeout)
expect(Namespaces::RootStatisticsWorker)
.to receive(:perform_async).once
expect(Namespaces::RootStatisticsWorker)
.to receive(:perform_in).once
- .with(described_class::DEFAULT_LEASE_TIMEOUT, aggregation_schedule.namespace_id)
+ .with(default_timeout, aggregation_schedule.namespace_id)
aggregation_schedule.save!
end
it 'does not release the lease' do
- stub_exclusive_lease(lease_key, timeout: described_class::DEFAULT_LEASE_TIMEOUT)
+ stub_exclusive_lease(lease_key, timeout: default_timeout)
aggregation_schedule.save!
@@ -58,7 +74,7 @@ RSpec.describe Namespace::AggregationSchedule, :clean_gitlab_redis_shared_state,
expect(Namespaces::RootStatisticsWorker)
.to receive(:perform_in).once
- .with(described_class::DEFAULT_LEASE_TIMEOUT, aggregation_schedule.namespace_id)
+ .with(default_timeout, aggregation_schedule.namespace_id)
.and_return(nil)
# Scheduling workers for the first time
diff --git a/spec/models/namespace/package_setting_spec.rb b/spec/models/namespace/package_setting_spec.rb
index 4308c8c06bc..2584fa597ad 100644
--- a/spec/models/namespace/package_setting_spec.rb
+++ b/spec/models/namespace/package_setting_spec.rb
@@ -85,4 +85,13 @@ RSpec.describe Namespace::PackageSetting do
end
end
end
+
+ describe 'package forwarding attributes' do
+ %i[maven_package_requests_forwarding
+ pypi_package_requests_forwarding
+ npm_package_requests_forwarding].each do |attribute|
+ it_behaves_like 'a cascading namespace setting boolean attribute', settings_attribute_name: attribute,
+ settings_association: :package_settings
+ end
+ end
end
diff --git a/spec/models/namespace_setting_spec.rb b/spec/models/namespace_setting_spec.rb
index 4ac248802b8..a4446bfedd1 100644
--- a/spec/models/namespace_setting_spec.rb
+++ b/spec/models/namespace_setting_spec.rb
@@ -177,4 +177,8 @@ RSpec.describe NamespaceSetting, type: :model do
end
end
end
+
+ describe '#delayed_project_removal' do
+ it_behaves_like 'a cascading namespace setting boolean attribute', settings_attribute_name: :delayed_project_removal
+ end
end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 2e8d22cb9db..c6d028af22d 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -362,6 +362,9 @@ RSpec.describe Namespace do
it { is_expected.to delegate_method(:name).to(:owner).with_prefix.allow_nil }
it { is_expected.to delegate_method(:avatar_url).to(:owner).allow_nil }
it { is_expected.to delegate_method(:prevent_sharing_groups_outside_hierarchy).to(:namespace_settings).allow_nil }
+ it { is_expected.to delegate_method(:maven_package_requests_forwarding).to(:package_settings) }
+ it { is_expected.to delegate_method(:pypi_package_requests_forwarding).to(:package_settings) }
+ it { is_expected.to delegate_method(:npm_package_requests_forwarding).to(:package_settings) }
it do
is_expected.to delegate_method(:prevent_sharing_groups_outside_hierarchy=).to(:namespace_settings)
@@ -1036,7 +1039,9 @@ RSpec.describe Namespace do
let(:pages_dir) { File.join(TestEnv.pages_path) }
def expect_project_directories_at(namespace_path, with_pages: true)
- expected_repository_path = File.join(TestEnv.repos_path, namespace_path, 'the-project.git')
+ expected_repository_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ File.join(TestEnv.repos_path, namespace_path, 'the-project.git')
+ end
expected_upload_path = File.join(uploads_dir, namespace_path, 'the-project')
expected_pages_path = File.join(pages_dir, namespace_path, 'the-project')
@@ -1046,15 +1051,19 @@ RSpec.describe Namespace do
end
before do
- FileUtils.mkdir_p(File.join(TestEnv.repos_path, "#{project.full_path}.git"))
+ Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ FileUtils.mkdir_p(File.join(TestEnv.repos_path, "#{project.full_path}.git"))
+ end
FileUtils.mkdir_p(File.join(uploads_dir, project.full_path))
FileUtils.mkdir_p(File.join(pages_dir, project.full_path))
end
after do
- FileUtils.remove_entry(File.join(TestEnv.repos_path, parent.full_path), true)
- FileUtils.remove_entry(File.join(TestEnv.repos_path, new_parent.full_path), true)
- FileUtils.remove_entry(File.join(TestEnv.repos_path, child.full_path), true)
+ Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ FileUtils.remove_entry(File.join(TestEnv.repos_path, parent.full_path), true)
+ FileUtils.remove_entry(File.join(TestEnv.repos_path, new_parent.full_path), true)
+ FileUtils.remove_entry(File.join(TestEnv.repos_path, child.full_path), true)
+ end
FileUtils.remove_entry(File.join(uploads_dir, project.full_path), true)
FileUtils.remove_entry(pages_dir, true)
end
@@ -1962,7 +1971,7 @@ RSpec.describe Namespace do
it 'returns the virual domain' do
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
expect(virtual_domain.lookup_paths).not_to be_empty
- expect(virtual_domain.cache_key).to eq("pages_domain_for_namespace_#{namespace.root_ancestor.id}")
+ expect(virtual_domain.cache_key).to match(/pages_domain_for_namespace_#{namespace.root_ancestor.id}_/)
end
context 'when :cache_pages_domain_api is disabled' do
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index 1fce1f97dcb..670a6237788 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -1839,6 +1839,12 @@ RSpec.describe Note do
let(:note) { create(:note, :system, :on_issue, note: original_note_body) }
it { is_expected.to eq(expected_text_replacement) }
+
+ context 'context when note and cache are null (happens in bulk insert)' do
+ let(:note) { build(:note, :system, :on_issue, note: nil, note_html: nil).tap { |note| note.save!(validate: false) } }
+
+ it { is_expected.to be_in([nil, '']) }
+ end
end
end
end
@@ -1868,7 +1874,7 @@ RSpec.describe Note do
let(:expected_text_replacement) { '<p data-sourcepos="1:1-1:48" dir="auto">marked the checklist item <strong>task 1</strong> as completed</p>' }
before do
- note.update_columns(note_html: unchanged_note_body)
+ note.update_columns(note_html: unchanged_note_body) unless note.note.nil?
end
end
@@ -1878,7 +1884,7 @@ RSpec.describe Note do
let(:expected_text_replacement) { '<p data-sourcepos="1:1-1:48" dir="auto">marked the checklist item <strong>task 1</strong> as incomplete</p>' }
before do
- note.update_columns(note_html: unchanged_note_body)
+ note.update_columns(note_html: unchanged_note_body) unless note.note.nil?
end
end
end
diff --git a/spec/models/notification_recipient_spec.rb b/spec/models/notification_recipient_spec.rb
index 8105262aada..068166ebb0d 100644
--- a/spec/models/notification_recipient_spec.rb
+++ b/spec/models/notification_recipient_spec.rb
@@ -57,16 +57,6 @@ RSpec.describe NotificationRecipient do
it 'returns false' do
expect(recipient.notifiable?).to eq(false)
end
-
- context 'when block_emails_with_failures is disabled' do
- before do
- stub_feature_flags(block_emails_with_failures: false)
- end
-
- it 'returns true' do
- expect(recipient.notifiable?).to eq(true)
- end
- end
end
context 'with temporary failures' do
@@ -77,16 +67,6 @@ RSpec.describe NotificationRecipient do
it 'returns false' do
expect(recipient.notifiable?).to eq(false)
end
-
- context 'when block_emails_with_failures is disabled' do
- before do
- stub_feature_flags(block_emails_with_failures: false)
- end
-
- it 'returns true' do
- expect(recipient.notifiable?).to eq(true)
- end
- end
end
end
end
diff --git a/spec/models/operations/feature_flags/strategy_spec.rb b/spec/models/operations/feature_flags/strategy_spec.rb
index de1b9d2c855..949f92b3b2a 100644
--- a/spec/models/operations/feature_flags/strategy_spec.rb
+++ b/spec/models/operations/feature_flags/strategy_spec.rb
@@ -130,14 +130,15 @@ RSpec.describe Operations::FeatureFlags::Strategy do
context 'when the strategy name is flexibleRollout' do
valid_parameters = { rollout: '40', groupId: 'mygroup', stickiness: 'default' }
- where(invalid_parameters: [
- nil,
- {},
- *valid_parameters.to_a.combination(1).to_a.map { |p| p.to_h },
- *valid_parameters.to_a.combination(2).to_a.map { |p| p.to_h },
- { **valid_parameters, userIds: '4' },
- { **valid_parameters, extra: nil }
- ])
+ where(
+ invalid_parameters: [
+ nil,
+ {},
+ *valid_parameters.to_a.combination(1).to_a.map { |p| p.to_h },
+ *valid_parameters.to_a.combination(2).to_a.map { |p| p.to_h },
+ { **valid_parameters, userIds: '4' },
+ { **valid_parameters, extra: nil }
+ ])
with_them do
it 'must have valid parameters for the strategy' do
strategy = build(:operations_strategy,
@@ -180,9 +181,7 @@ RSpec.describe Operations::FeatureFlags::Strategy do
expect(strategy).to be_invalid
- expect(strategy.errors[:parameters]).to eq([
- 'rollout must be a string between 0 and 100 inclusive'
- ])
+ expect(strategy.errors[:parameters]).to eq(['rollout must be a string between 0 and 100 inclusive'])
end
end
@@ -243,9 +242,8 @@ RSpec.describe Operations::FeatureFlags::Strategy do
expect(strategy).to be_invalid
- expect(strategy.errors[:parameters]).to eq([
- 'stickiness parameter must be default, userId, sessionId, or random'
- ])
+ expect(strategy.errors[:parameters]).to eq(
+ ['stickiness parameter must be default, userId, sessionId, or random'])
end
end
diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb
index fb88dbb4212..0edb04224a3 100644
--- a/spec/models/packages/package_spec.rb
+++ b/spec/models/packages/package_spec.rb
@@ -904,6 +904,14 @@ RSpec.describe Packages::Package, type: :model do
it { is_expected.to match_array([pypi_package]) }
end
+ describe '.with_case_insensitive_version' do
+ let_it_be(:nuget_package) { create(:nuget_package, version: '1.0.0-ABC') }
+
+ subject { described_class.with_case_insensitive_version('1.0.0-abC') }
+
+ it { is_expected.to match_array([nuget_package]) }
+ end
+
context 'status scopes' do
let_it_be(:default_package) { create(:maven_package, :default) }
let_it_be(:hidden_package) { create(:maven_package, :hidden) }
diff --git a/spec/models/packages/rpm/repository_file_spec.rb b/spec/models/packages/rpm/repository_file_spec.rb
new file mode 100644
index 00000000000..34347793dd8
--- /dev/null
+++ b/spec/models/packages/rpm/repository_file_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Rpm::RepositoryFile, type: :model do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:repository_file) { create(:rpm_repository_file) }
+
+ it_behaves_like 'having unique enum values'
+
+ describe 'relationships' do
+ it { is_expected.to belong_to(:project) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:project) }
+ end
+
+ context 'when updating project statistics' do
+ context 'when the package file has an explicit size' do
+ it_behaves_like 'UpdateProjectStatistics' do
+ subject { build(:rpm_repository_file, size: 42) }
+ end
+ end
+
+ context 'when the package file does not have a size' do
+ it_behaves_like 'UpdateProjectStatistics' do
+ subject { build(:rpm_repository_file, size: nil) }
+ end
+ end
+ end
+
+ context 'with status scopes' do
+ let_it_be(:pending_destruction_repository_package_file) do
+ create(:rpm_repository_file, :pending_destruction)
+ end
+
+ describe '.with_status' do
+ subject { described_class.with_status(:pending_destruction) }
+
+ it { is_expected.to contain_exactly(pending_destruction_repository_package_file) }
+ end
+ end
+end
diff --git a/spec/models/pages_domain_spec.rb b/spec/models/pages_domain_spec.rb
index b50bfaed528..463ec904e9a 100644
--- a/spec/models/pages_domain_spec.rb
+++ b/spec/models/pages_domain_spec.rb
@@ -563,7 +563,7 @@ RSpec.describe PagesDomain do
it 'returns the virual domain when there are pages deployed for the project' do
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
expect(virtual_domain.lookup_paths).not_to be_empty
- expect(virtual_domain.cache_key).to eq("pages_domain_for_project_#{project.id}")
+ expect(virtual_domain.cache_key).to match(/pages_domain_for_project_#{project.id}_/)
end
context 'when :cache_pages_domain_api is disabled' do
diff --git a/spec/models/personal_access_token_spec.rb b/spec/models/personal_access_token_spec.rb
index 5bce6a2cc3f..67e7d444d25 100644
--- a/spec/models/personal_access_token_spec.rb
+++ b/spec/models/personal_access_token_spec.rb
@@ -105,6 +105,31 @@ RSpec.describe PersonalAccessToken do
end
end
+ describe '.last_used_before' do
+ context 'last_used_*' do
+ let_it_be(:date) { DateTime.new(2022, 01, 01) }
+ let_it_be(:token) { create(:personal_access_token, last_used_at: date ) }
+ # This token should never occur in the following tests and indicates that filtering was done correctly with it
+ let_it_be(:never_used_token) { create(:personal_access_token) }
+
+ describe '.last_used_before' do
+ it 'returns personal access tokens used before the specified date only' do
+ expect(described_class.last_used_before(date + 1)).to contain_exactly(token)
+ end
+ end
+
+ it 'does not return token that is last_used_at after given date' do
+ expect(described_class.last_used_before(date + 1)).not_to contain_exactly(never_used_token)
+ end
+
+ describe '.last_used_after' do
+ it 'returns personal access tokens used after the specified date only' do
+ expect(described_class.last_used_after(date - 1)).to contain_exactly(token)
+ end
+ end
+ end
+ end
+
describe '.last_used_before_or_unused' do
let(:last_used_at) { 1.month.ago.beginning_of_hour }
let!(:unused_token) { create(:personal_access_token) }
diff --git a/spec/models/pool_repository_spec.rb b/spec/models/pool_repository_spec.rb
index bf88e941540..9861e832bef 100644
--- a/spec/models/pool_repository_spec.rb
+++ b/spec/models/pool_repository_spec.rb
@@ -26,8 +26,6 @@ RSpec.describe PoolRepository do
describe '#unlink_repository' do
let(:pool) { create(:pool_repository, :ready) }
- let(:repository_path) { File.join(TestEnv.repos_path, pool.source_project.repository.relative_path) }
- let(:alternates_file) { File.join(repository_path, 'objects', 'info', 'alternates') }
before do
pool.link_repository(pool.source_project.repository)
@@ -36,19 +34,17 @@ RSpec.describe PoolRepository do
context 'when the last member leaves' do
it 'schedules pool removal' do
expect(::ObjectPool::DestroyWorker).to receive(:perform_async).with(pool.id).and_call_original
+ expect(pool.source_project.repository).to receive(:disconnect_alternates).and_call_original
pool.unlink_repository(pool.source_project.repository)
-
- expect(File).not_to exist(alternates_file)
end
end
context 'when skipping disconnect' do
it 'does not change the alternates file' do
- before = File.read(alternates_file)
- pool.unlink_repository(pool.source_project.repository, disconnect: false)
+ expect(pool.source_project.repository).not_to receive(:disconnect_alternates)
- expect(File.read(alternates_file)).to eq(before)
+ pool.unlink_repository(pool.source_project.repository, disconnect: false)
end
end
@@ -58,10 +54,9 @@ RSpec.describe PoolRepository do
pool.link_repository(other_project.repository)
expect(::ObjectPool::DestroyWorker).not_to receive(:perform_async).with(pool.id)
+ expect(pool.source_project.repository).to receive(:disconnect_alternates).and_call_original
pool.unlink_repository(pool.source_project.repository)
-
- expect(File).not_to exist(alternates_file)
end
end
end
diff --git a/spec/models/preloaders/project_root_ancestor_preloader_spec.rb b/spec/models/preloaders/project_root_ancestor_preloader_spec.rb
index 30036a6a033..bb0de24abe5 100644
--- a/spec/models/preloaders/project_root_ancestor_preloader_spec.rb
+++ b/spec/models/preloaders/project_root_ancestor_preloader_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Preloaders::ProjectRootAncestorPreloader do
let_it_be(:root_parent1) { create(:group, :private, name: 'root-1', path: 'root-1') }
- let_it_be(:root_parent2) { create(:group, :private, name: 'root-2', path: 'root-2') }
+ let_it_be(:root_parent2) { create(:group, name: 'root-2', path: 'root-2') }
let_it_be(:guest_project) { create(:project, name: 'public guest', path: 'public-guest') }
let_it_be(:private_maintainer_project) do
create(:project, :private, name: 'b private maintainer', path: 'b-private-maintainer', namespace: root_parent1)
@@ -15,7 +15,7 @@ RSpec.describe Preloaders::ProjectRootAncestorPreloader do
end
let_it_be(:public_maintainer_project) do
- create(:project, :private, name: 'a public maintainer', path: 'a-public-maintainer', namespace: root_parent2)
+ create(:project, name: 'a public maintainer', path: 'a-public-maintainer', namespace: root_parent2)
end
let(:root_query_regex) { /\ASELECT.+FROM "namespaces" WHERE "namespaces"."id" = \d+/ }
@@ -36,20 +36,20 @@ RSpec.describe Preloaders::ProjectRootAncestorPreloader do
it 'strong_memoizes the correct root_ancestor' do
pristine_projects.each do |project|
- expected_parent_id = project.root_ancestor&.id
+ preloaded_parent_id = project.root_ancestor&.id
- expect(project.parent_id).to eq(expected_parent_id)
+ expect(preloaded_parent_id).to eq(project.parent_id)
end
end
end
context 'when use_traversal_ids FF is enabled' do
context 'when the preloader is used' do
- before do
- preload_ancestors
- end
-
context 'when no additional preloads are provided' do
+ before do
+ preload_ancestors(:group)
+ end
+
it_behaves_like 'executes N matching DB queries', 0
end
@@ -57,6 +57,10 @@ RSpec.describe Preloaders::ProjectRootAncestorPreloader do
let(:additional_preloads) { [:route] }
let(:root_query_regex) { /\ASELECT.+FROM "routes" WHERE "routes"."source_id" = \d+/ }
+ before do
+ preload_ancestors
+ end
+
it_behaves_like 'executes N matching DB queries', 0, :full_path
end
end
@@ -64,6 +68,17 @@ RSpec.describe Preloaders::ProjectRootAncestorPreloader do
context 'when the preloader is not used' do
it_behaves_like 'executes N matching DB queries', 4
end
+
+ context 'when using a :group sti name and passing projects in a user namespace' do
+ let(:projects) { [private_developer_project] }
+ let(:additional_preloads) { [:ip_restrictions, :saml_provider] }
+
+ it 'does not load a nil value for root_ancestor' do
+ preload_ancestors(:group)
+
+ expect(pristine_projects.first.root_ancestor).to eq(private_developer_project.root_ancestor)
+ end
+ end
end
context 'when use_traversal_ids FF is disabled' do
@@ -91,9 +106,22 @@ RSpec.describe Preloaders::ProjectRootAncestorPreloader do
context 'when the preloader is not used' do
it_behaves_like 'executes N matching DB queries', 4
end
+
+ context 'when using a :group sti name and passing projects in a user namespace' do
+ let(:projects) { [private_developer_project] }
+ let(:additional_preloads) { [:ip_restrictions, :saml_provider] }
+
+ it 'does not load a nil value for root_ancestor' do
+ preload_ancestors(:group)
+
+ expect(pristine_projects.first.root_ancestor).to eq(private_developer_project.root_ancestor)
+ end
+ end
end
- def preload_ancestors
- described_class.new(pristine_projects, :namespace, additional_preloads).execute
+ private
+
+ def preload_ancestors(namespace_sti_name = :namespace)
+ described_class.new(pristine_projects, namespace_sti_name, additional_preloads).execute
end
end
diff --git a/spec/models/project_authorization_spec.rb b/spec/models/project_authorization_spec.rb
index 14220007966..55fe28ceb6f 100644
--- a/spec/models/project_authorization_spec.rb
+++ b/spec/models/project_authorization_spec.rb
@@ -92,20 +92,192 @@ RSpec.describe ProjectAuthorization do
let_it_be(:project_2) { create(:project) }
let_it_be(:project_3) { create(:project) }
- let(:per_batch_size) { 2 }
-
- it 'inserts the rows in batches, as per the `per_batch` size' do
- attributes = [
+ let(:attributes) do
+ [
{ user_id: user.id, project_id: project_1.id, access_level: Gitlab::Access::MAINTAINER },
{ user_id: user.id, project_id: project_2.id, access_level: Gitlab::Access::MAINTAINER },
{ user_id: user.id, project_id: project_3.id, access_level: Gitlab::Access::MAINTAINER }
]
+ end
- expect(described_class).to receive(:insert_all).twice.and_call_original
+ before do
+ # Configure as if a replica database is enabled
+ allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(false)
+ stub_feature_flags(enable_minor_delay_during_project_authorizations_refresh: true)
+ end
- described_class.insert_all_in_batches(attributes, per_batch_size)
+ shared_examples_for 'inserts the rows in batches, as per the `per_batch` size, without a delay between each batch' do
+ specify do
+ expect(described_class).not_to receive(:sleep)
- expect(user.project_authorizations.pluck(:user_id, :project_id, :access_level)).to match_array(attributes.map(&:values))
+ described_class.insert_all_in_batches(attributes, per_batch_size)
+
+ expect(user.project_authorizations.pluck(:user_id, :project_id, :access_level)).to match_array(attributes.map(&:values))
+ end
+ end
+
+ context 'when the total number of records to be inserted is greater than the batch size' do
+ let(:per_batch_size) { 2 }
+
+ it 'inserts the rows in batches, as per the `per_batch` size, with a delay between each batch' do
+ expect(described_class).to receive(:insert_all).twice.and_call_original
+ expect(described_class).to receive(:sleep).twice
+
+ described_class.insert_all_in_batches(attributes, per_batch_size)
+
+ expect(user.project_authorizations.pluck(:user_id, :project_id, :access_level)).to match_array(attributes.map(&:values))
+ end
+
+ context 'when the GitLab installation does not have a replica database configured' do
+ before do
+ # Configure as if a replica database is not enabled
+ allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(true)
+ end
+
+ it_behaves_like 'inserts the rows in batches, as per the `per_batch` size, without a delay between each batch'
+ end
+ end
+
+ context 'when the total number of records to be inserted is less than the batch size' do
+ let(:per_batch_size) { 5 }
+
+ it_behaves_like 'inserts the rows in batches, as per the `per_batch` size, without a delay between each batch'
+ end
+ end
+
+ describe '.delete_all_in_batches_for_project' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user_1) { create(:user) }
+ let_it_be(:user_2) { create(:user) }
+ let_it_be(:user_3) { create(:user) }
+ let_it_be(:user_4) { create(:user) }
+
+ let(:user_ids) { [user_1.id, user_2.id, user_3.id] }
+
+ before do
+ # Configure as if a replica database is enabled
+ allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(false)
+ stub_feature_flags(enable_minor_delay_during_project_authorizations_refresh: true)
+ end
+
+ before_all do
+ create(:project_authorization, user: user_1, project: project)
+ create(:project_authorization, user: user_2, project: project)
+ create(:project_authorization, user: user_3, project: project)
+ create(:project_authorization, user: user_4, project: project)
+ end
+
+ shared_examples_for 'removes the project authorizations of the specified users in the current project, without a delay between each batch' do
+ specify do
+ expect(described_class).not_to receive(:sleep)
+
+ described_class.delete_all_in_batches_for_project(
+ project: project,
+ user_ids: user_ids,
+ per_batch: per_batch_size
+ )
+
+ expect(project.project_authorizations.pluck(:user_id)).not_to include(*user_ids)
+ end
+ end
+
+ context 'when the total number of records to be removed is greater than the batch size' do
+ let(:per_batch_size) { 2 }
+
+ it 'removes the project authorizations of the specified users in the current project, with a delay between each batch' do
+ expect(described_class).to receive(:sleep).twice
+
+ described_class.delete_all_in_batches_for_project(
+ project: project,
+ user_ids: user_ids,
+ per_batch: per_batch_size
+ )
+
+ expect(project.project_authorizations.pluck(:user_id)).not_to include(*user_ids)
+ end
+
+ context 'when the GitLab installation does not have a replica database configured' do
+ before do
+ # Configure as if a replica database is not enabled
+ allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(true)
+ end
+
+ it_behaves_like 'removes the project authorizations of the specified users in the current project, without a delay between each batch'
+ end
+ end
+
+ context 'when the total number of records to be removed is less than the batch size' do
+ let(:per_batch_size) { 5 }
+
+ it_behaves_like 'removes the project authorizations of the specified users in the current project, without a delay between each batch'
+ end
+ end
+
+ describe '.delete_all_in_batches_for_user' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project_1) { create(:project) }
+ let_it_be(:project_2) { create(:project) }
+ let_it_be(:project_3) { create(:project) }
+ let_it_be(:project_4) { create(:project) }
+
+ let(:project_ids) { [project_1.id, project_2.id, project_3.id] }
+
+ before do
+ # Configure as if a replica database is enabled
+ allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(false)
+ stub_feature_flags(enable_minor_delay_during_project_authorizations_refresh: true)
+ end
+
+ before_all do
+ create(:project_authorization, user: user, project: project_1)
+ create(:project_authorization, user: user, project: project_2)
+ create(:project_authorization, user: user, project: project_3)
+ create(:project_authorization, user: user, project: project_4)
+ end
+
+ shared_examples_for 'removes the project authorizations of the specified projects from the current user, without a delay between each batch' do
+ specify do
+ expect(described_class).not_to receive(:sleep)
+
+ described_class.delete_all_in_batches_for_user(
+ user: user,
+ project_ids: project_ids,
+ per_batch: per_batch_size
+ )
+
+ expect(user.project_authorizations.pluck(:project_id)).not_to include(*project_ids)
+ end
+ end
+
+ context 'when the total number of records to be removed is greater than the batch size' do
+ let(:per_batch_size) { 2 }
+
+ it 'removes the project authorizations of the specified projects from the current user, with a delay between each batch' do
+ expect(described_class).to receive(:sleep).twice
+
+ described_class.delete_all_in_batches_for_user(
+ user: user,
+ project_ids: project_ids,
+ per_batch: per_batch_size
+ )
+
+ expect(user.project_authorizations.pluck(:project_id)).not_to include(*project_ids)
+ end
+
+ context 'when the GitLab installation does not have a replica database configured' do
+ before do
+ # Configure as if a replica database is not enabled
+ allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(true)
+ end
+
+ it_behaves_like 'removes the project authorizations of the specified projects from the current user, without a delay between each batch'
+ end
+ end
+
+ context 'when the total number of records to be removed is less than the batch size' do
+ let(:per_batch_size) { 5 }
+
+ it_behaves_like 'removes the project authorizations of the specified projects from the current user, without a delay between each batch'
end
end
end
diff --git a/spec/models/project_group_link_spec.rb b/spec/models/project_group_link_spec.rb
index 8b95b86b14b..f141b8e83d6 100644
--- a/spec/models/project_group_link_spec.rb
+++ b/spec/models/project_group_link_spec.rb
@@ -47,9 +47,9 @@ RSpec.describe ProjectGroupLink do
it 'returns all records which are greater than Guests access' do
expect(described_class.non_guests).to match_array([
- project_group_link_reporter,
- project_group_link_developer,
- project_group_link_maintainer
+ project_group_link_reporter,
+ project_group_link_developer,
+ project_group_link_maintainer
])
end
end
diff --git a/spec/models/project_setting_spec.rb b/spec/models/project_setting_spec.rb
index a09ae7ec7ae..5730ca58e9e 100644
--- a/spec/models/project_setting_spec.rb
+++ b/spec/models/project_setting_spec.rb
@@ -21,6 +21,10 @@ RSpec.describe ProjectSetting, type: :model do
it { is_expected.not_to allow_value(nil).for(:target_platforms) }
it { is_expected.to allow_value([]).for(:target_platforms) }
+ it { is_expected.not_to allow_value(nil).for(:suggested_reviewers_enabled) }
+ it { is_expected.to allow_value(true).for(:suggested_reviewers_enabled) }
+ it { is_expected.to allow_value(false).for(:suggested_reviewers_enabled) }
+
it 'allows any combination of the allowed target platforms' do
valid_target_platform_combinations.each do |target_platforms|
expect(subject).to allow_value(target_platforms).for(:target_platforms)
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 99b984ff547..75887e49dc9 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -28,7 +28,6 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:issues) }
it { is_expected.to have_many(:incident_management_issuable_escalation_statuses).through(:issues).inverse_of(:project).class_name('IncidentManagement::IssuableEscalationStatus') }
it { is_expected.to have_many(:milestones) }
- it { is_expected.to have_many(:iterations) }
it { is_expected.to have_many(:project_members).dependent(:delete_all) }
it { is_expected.to have_many(:users).through(:project_members) }
it { is_expected.to have_many(:requesters).dependent(:delete_all) }
@@ -149,6 +148,8 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:secure_files).class_name('Ci::SecureFile').dependent(:restrict_with_error) }
it { is_expected.to have_one(:build_artifacts_size_refresh).class_name('Projects::BuildArtifactsSizeRefresh') }
it { is_expected.to have_many(:project_callouts).class_name('Users::ProjectCallout').with_foreign_key(:project_id) }
+ it { is_expected.to have_many(:pipeline_metadata).class_name('Ci::PipelineMetadata') }
+ it { is_expected.to have_many(:incident_management_timeline_event_tags).class_name('IncidentManagement::TimelineEventTag') }
# GitLab Pages
it { is_expected.to have_many(:pages_domains) }
@@ -845,6 +846,8 @@ RSpec.describe Project, factory_default: :keep do
end
describe 'delegation' do
+ let_it_be(:project) { create(:project) }
+
[:add_guest, :add_reporter, :add_developer, :add_maintainer, :add_member, :add_members].each do |method|
it { is_expected.to delegate_method(method).to(:team) }
end
@@ -859,6 +862,9 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to delegate_method(:environments_access_level).to(:project_feature) }
it { is_expected.to delegate_method(:feature_flags_access_level).to(:project_feature) }
it { is_expected.to delegate_method(:releases_access_level).to(:project_feature) }
+ it { is_expected.to delegate_method(:maven_package_requests_forwarding).to(:namespace) }
+ it { is_expected.to delegate_method(:pypi_package_requests_forwarding).to(:namespace) }
+ it { is_expected.to delegate_method(:npm_package_requests_forwarding).to(:namespace) }
describe 'read project settings' do
%i(
@@ -884,8 +890,24 @@ RSpec.describe Project, factory_default: :keep do
end
include_examples 'ci_cd_settings delegation' do
- # Skip attributes defined in EE code
+ let(:attributes_with_prefix) do
+ {
+ 'group_runners_enabled' => '',
+ 'default_git_depth' => 'ci_',
+ 'forward_deployment_enabled' => 'ci_',
+ 'keep_latest_artifact' => '',
+ 'restrict_user_defined_variables' => '',
+ 'runner_token_expiration_interval' => '',
+ 'separated_caches' => 'ci_',
+ 'opt_in_jwt' => 'ci_',
+ 'allow_fork_pipelines_to_run_in_parent_project' => 'ci_',
+ 'inbound_job_token_scope_enabled' => 'ci_',
+ 'job_token_scope_enabled' => 'ci_outbound_'
+ }
+ end
+
let(:exclude_attributes) do
+ # Skip attributes defined in EE code
%w(
merge_pipelines_enabled
merge_trains_enabled
@@ -906,12 +928,18 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '#ci_job_token_scope_enabled?' do
- it_behaves_like 'a ci_cd_settings predicate method', prefix: 'ci_' do
+ describe '#ci_outbound_job_token_scope_enabled?' do
+ it_behaves_like 'a ci_cd_settings predicate method', prefix: 'ci_outbound_' do
let(:delegated_method) { :job_token_scope_enabled? }
end
end
+ describe '#ci_inbound_job_token_scope_enabled?' do
+ it_behaves_like 'a ci_cd_settings predicate method', prefix: 'ci_' do
+ let(:delegated_method) { :inbound_job_token_scope_enabled? }
+ end
+ end
+
describe '#restrict_user_defined_variables?' do
it_behaves_like 'a ci_cd_settings predicate method' do
let(:delegated_method) { :restrict_user_defined_variables? }
@@ -937,23 +965,6 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '#remove_project_authorizations' do
- let_it_be(:project) { create(:project) }
- let_it_be(:user_1) { create(:user) }
- let_it_be(:user_2) { create(:user) }
- let_it_be(:user_3) { create(:user) }
-
- it 'removes the project authorizations of the specified users in the current project' do
- create(:project_authorization, user: user_1, project: project)
- create(:project_authorization, user: user_2, project: project)
- create(:project_authorization, user: user_3, project: project)
-
- project.remove_project_authorizations([user_1.id, user_2.id])
-
- expect(project.project_authorizations.pluck(:user_id)).not_to include(user_1.id, user_2.id)
- end
- end
-
describe '#merge_commit_template_or_default' do
let_it_be(:project) { create(:project) }
@@ -1426,7 +1437,7 @@ RSpec.describe Project, factory_default: :keep do
it "is false if used other tracker" do
# NOTE: The current nature of this factory requires persistence
- project = create(:redmine_project)
+ project = create(:project, :with_redmine_integration)
expect(project.default_issues_tracker?).to be_falsey
end
@@ -1471,7 +1482,7 @@ RSpec.describe Project, factory_default: :keep do
describe '#external_issue_tracker' do
it 'sets Project#has_external_issue_tracker when it is nil' do
project_with_no_tracker = create(:project, has_external_issue_tracker: nil)
- project_with_tracker = create(:redmine_project, has_external_issue_tracker: nil)
+ project_with_tracker = create(:project, :with_redmine_integration, has_external_issue_tracker: nil)
expect do
project_with_no_tracker.external_issue_tracker
@@ -1490,7 +1501,7 @@ RSpec.describe Project, factory_default: :keep do
end
it 'retrieves external_issue_tracker querying services and cache it when there is external issue tracker' do
- project = create(:redmine_project)
+ project = create(:project, :with_redmine_integration)
expect(project).to receive(:integrations).once.and_call_original
2.times { expect(project.external_issue_tracker).to be_a_kind_of(Integrations::Redmine) }
@@ -4620,6 +4631,7 @@ RSpec.describe Project, factory_default: :keep do
describe '.filter_by_feature_visibility' do
include_context 'ProjectPolicyTable context'
include ProjectHelpers
+ include UserHelpers
let_it_be(:group) { create(:group) }
let_it_be_with_reload(:project) { create(:project, namespace: group) }
@@ -5761,40 +5773,40 @@ RSpec.describe Project, factory_default: :keep do
describe '#has_active_hooks?' do
let_it_be_with_refind(:project) { create(:project) }
- it { expect(project.has_active_hooks?).to be_falsey }
+ it { expect(project.has_active_hooks?).to eq(false) }
it 'returns true when a matching push hook exists' do
create(:project_hook, push_events: true, project: project)
- expect(project.has_active_hooks?(:merge_request_events)).to be_falsey
- expect(project.has_active_hooks?).to be_truthy
+ expect(project.has_active_hooks?(:merge_request_hooks)).to eq(false)
+ expect(project.has_active_hooks?).to eq(true)
end
it 'returns true when a matching system hook exists' do
create(:system_hook, push_events: true)
- expect(project.has_active_hooks?(:merge_request_events)).to be_falsey
- expect(project.has_active_hooks?).to be_truthy
+ expect(project.has_active_hooks?(:merge_request_hooks)).to eq(false)
+ expect(project.has_active_hooks?).to eq(true)
end
it 'returns true when a plugin exists' do
expect(Gitlab::FileHook).to receive(:any?).twice.and_return(true)
- expect(project.has_active_hooks?(:merge_request_events)).to be_truthy
- expect(project.has_active_hooks?).to be_truthy
+ expect(project.has_active_hooks?(:merge_request_hooks)).to eq(true)
+ expect(project.has_active_hooks?).to eq(true)
end
end
describe '#has_active_integrations?' do
let_it_be(:project) { create(:project) }
- it { expect(project.has_active_integrations?).to be_falsey }
+ it { expect(project.has_active_integrations?).to eq(false) }
it 'returns true when a matching service exists' do
create(:custom_issue_tracker_integration, push_events: true, merge_requests_events: false, project: project)
- expect(project.has_active_integrations?(:merge_request_hooks)).to be_falsey
- expect(project.has_active_integrations?).to be_truthy
+ expect(project.has_active_integrations?(:merge_request_hooks)).to eq(false)
+ expect(project.has_active_integrations?).to eq(true)
end
end
@@ -8308,16 +8320,6 @@ RSpec.describe Project, factory_default: :keep do
expect(project.packages_policy_subject).to be_a(Packages::Policies::Project)
expect(project.packages_policy_subject.project).to eq(project)
end
-
- context 'with feature flag disabled' do
- before do
- stub_feature_flags(read_package_policy_rule: false)
- end
-
- it 'returns project' do
- expect(project.packages_policy_subject).to eq(project)
- end
- end
end
describe '#destroy_deployment_by_id' do
@@ -8356,6 +8358,22 @@ RSpec.describe Project, factory_default: :keep do
end
end
+ describe '#can_suggest_reviewers?' do
+ let_it_be(:project) { create(:project) }
+
+ subject(:can_suggest_reviewers) { project.can_suggest_reviewers? }
+
+ it { is_expected.to be(false) }
+ end
+
+ describe '#suggested_reviewers_available?' do
+ let_it_be(:project) { create(:project) }
+
+ subject(:suggested_reviewers_available) { project.suggested_reviewers_available? }
+
+ it { is_expected.to be(false) }
+ end
+
private
def finish_job(export_job)
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index b2158baa670..9de31ea66e4 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -98,6 +98,8 @@ RSpec.describe ProjectStatistics do
end
describe '#refresh!' do
+ subject(:refresh_statistics) { statistics.refresh! }
+
before do
allow(statistics).to receive(:update_commit_count)
allow(statistics).to receive(:update_repository_size)
@@ -111,7 +113,7 @@ RSpec.describe ProjectStatistics do
context "without arguments" do
before do
- statistics.refresh!
+ refresh_statistics
end
it "sums all counters" do
@@ -146,7 +148,7 @@ RSpec.describe ProjectStatistics do
expect(project.repository.exists?).to be_falsey
expect(project.wiki.repository.exists?).to be_falsey
- statistics.refresh!
+ refresh_statistics
expect(statistics).to have_received(:update_commit_count)
expect(statistics).to have_received(:update_repository_size)
@@ -167,14 +169,12 @@ RSpec.describe ProjectStatistics do
let(:project) { create(:project, :repository, :wiki_repo) }
before do
- Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- FileUtils.rm_rf(project.repository.path)
- FileUtils.rm_rf(project.wiki.repository.path)
- end
+ project.repository.remove
+ project.wiki.repository.remove
end
it 'does not crash' do
- statistics.refresh!
+ refresh_statistics
expect(statistics).to have_received(:update_commit_count)
expect(statistics).to have_received(:update_repository_size)
@@ -209,7 +209,7 @@ RSpec.describe ProjectStatistics do
expect(Namespaces::ScheduleAggregationWorker)
.to receive(:perform_async)
- statistics.refresh!
+ refresh_statistics
end
end
end
@@ -238,9 +238,13 @@ RSpec.describe ProjectStatistics do
expect(Namespaces::ScheduleAggregationWorker)
.not_to receive(:perform_async)
- statistics.refresh!
+ refresh_statistics
end
end
+
+ it_behaves_like 'obtaining lease to update database' do
+ let(:model) { statistics }
+ end
end
describe '#update_commit_count' do
@@ -408,6 +412,8 @@ RSpec.describe ProjectStatistics do
end
describe '#refresh_storage_size!' do
+ subject(:refresh_storage_size) { statistics.refresh_storage_size! }
+
it 'recalculates storage size from its components and save it' do
statistics.update_columns(
repository_size: 2,
@@ -422,7 +428,29 @@ RSpec.describe ProjectStatistics do
storage_size: 0
)
- expect { statistics.refresh_storage_size! }.to change { statistics.storage_size }.from(0).to(28)
+ expect { refresh_storage_size }.to change { statistics.reload.storage_size }.from(0).to(28)
+ end
+
+ context 'when nullable columns are nil' do
+ before do
+ statistics.update_columns(
+ repository_size: 2,
+ wiki_size: nil,
+ storage_size: 0
+ )
+ end
+
+ it 'does not raise any error' do
+ expect { refresh_storage_size }.not_to raise_error
+ end
+
+ it 'recalculates storage size from its components' do
+ expect { refresh_storage_size }.to change { statistics.reload.storage_size }.from(0).to(2)
+ end
+ end
+
+ it_behaves_like 'obtaining lease to update database' do
+ let(:model) { statistics }
end
end
diff --git a/spec/models/protected_branch_spec.rb b/spec/models/protected_branch_spec.rb
index 54a90ca6049..b88367b9ca2 100644
--- a/spec/models/protected_branch_spec.rb
+++ b/spec/models/protected_branch_spec.rb
@@ -435,4 +435,28 @@ RSpec.describe ProtectedBranch do
expect(described_class.downcase_humanized_name).to eq 'protected branch'
end
end
+
+ describe '.default_branch?' do
+ before do
+ allow(subject.project).to receive(:default_branch).and_return(branch)
+ end
+
+ context 'when the name matches the default branch' do
+ let(:branch) { subject.name }
+
+ it { is_expected.to be_default_branch }
+ end
+
+ context 'when the name does not match the default branch' do
+ let(:branch) { "#{subject.name}qwerty" }
+
+ it { is_expected.not_to be_default_branch }
+ end
+
+ context 'when a wildcard name matches the default branch' do
+ let(:branch) { "#{subject.name}*" }
+
+ it { is_expected.not_to be_default_branch }
+ end
+ end
end
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 4e386bf584f..6fbf69ec23a 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -8,12 +8,12 @@ RSpec.describe Repository do
TestBlob = Struct.new(:path)
- let(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+
let(:repository) { project.repository }
let(:broken_repository) { create(:project, :broken_storage).repository }
- let(:user) { create(:user) }
let(:git_user) { Gitlab::Git::User.from_gitlab(user) }
-
let(:message) { 'Test message' }
let(:merge_commit) do
@@ -40,16 +40,20 @@ RSpec.describe Repository do
end
describe '#branch_names_contains' do
- let_it_be(:project) { create(:project, :repository) }
+ subject { repository.branch_names_contains(sample_commit.id, **opts) }
- let(:repository) { project.repository }
-
- subject { repository.branch_names_contains(sample_commit.id) }
+ let(:opts) { {} }
it { is_expected.to include('master') }
it { is_expected.not_to include('feature') }
it { is_expected.not_to include('fix') }
+ context 'when limit is provided' do
+ let(:opts) { { limit: 1 } }
+
+ it { is_expected.to match_array(["'test'"]) }
+ end
+
describe 'when storage is broken', :broken_storage do
it 'raises a storage error' do
expect_to_raise_storage_error do
@@ -60,10 +64,18 @@ RSpec.describe Repository do
end
describe '#tag_names_contains' do
- subject { repository.tag_names_contains(sample_commit.id) }
+ subject { repository.tag_names_contains(sample_commit.id, **opts) }
+
+ let(:opts) { {} }
it { is_expected.to include('v1.1.0') }
it { is_expected.not_to include('v1.0.0') }
+
+ context 'when limit is provided' do
+ let(:opts) { { limit: 1 } }
+
+ it { is_expected.to match_array(['v1.1.0']) }
+ end
end
describe '#tags_sorted_by' do
@@ -359,6 +371,8 @@ RSpec.describe Repository do
end
describe '#commits' do
+ let_it_be(:project) { create(:project, :repository) }
+
context 'when neither the all flag nor a ref are specified' do
it 'returns every commit from default branch' do
expect(repository.commits(nil, limit: 60).size).to eq(37)
@@ -431,10 +445,6 @@ RSpec.describe Repository do
end
describe '#new_commits' do
- let_it_be(:project) { create(:project, :repository) }
-
- let(:repository) { project.repository }
-
subject { repository.new_commits(rev) }
context 'when there are no new commits' do
@@ -498,6 +508,8 @@ RSpec.describe Repository do
end
describe '#commits_between' do
+ let_it_be(:project) { create(:project, :repository) }
+
let(:commit) { project.commit }
it 'delegates to Gitlab::Git::Commit#between, returning decorated commits' do
@@ -614,6 +626,8 @@ RSpec.describe Repository do
end
describe '#merged_to_root_ref?' do
+ let_it_be(:project) { create(:project, :repository) }
+
context 'merged branch without ff' do
subject { repository.merged_to_root_ref?('branch-merged') }
@@ -843,14 +857,16 @@ RSpec.describe Repository do
end
describe "#create_dir" do
+ let_it_be(:project) { create(:project, :repository) }
+
it "commits a change that creates a new directory" do
expect do
- repository.create_dir(user, 'newdir',
+ repository.create_dir(user, 'newdir1',
message: 'Create newdir', branch_name: 'master')
end.to change { repository.count_commits(ref: 'master') }.by(1)
- newdir = repository.tree('master', 'newdir')
- expect(newdir.path).to eq('newdir')
+ newdir = repository.tree('master', 'newdir1')
+ expect(newdir.path).to eq('newdir1')
end
context "when committing to another project" do
@@ -858,7 +874,7 @@ RSpec.describe Repository do
it "creates a fork and commit to the forked project" do
expect do
- repository.create_dir(user, 'newdir',
+ repository.create_dir(user, 'newdir2',
message: 'Create newdir', branch_name: 'patch',
start_branch_name: 'master', start_project: forked_project)
end.to change { repository.count_commits(ref: 'master') }.by(0)
@@ -866,15 +882,15 @@ RSpec.describe Repository do
expect(repository.branch_exists?('patch')).to be_truthy
expect(forked_project.repository.branch_exists?('patch')).to be_falsy
- newdir = repository.tree('patch', 'newdir')
- expect(newdir.path).to eq('newdir')
+ newdir = repository.tree('patch', 'newdir2')
+ expect(newdir.path).to eq('newdir2')
end
end
context "when an author is specified" do
it "uses the given email/name to set the commit's author" do
expect do
- repository.create_dir(user, 'newdir',
+ repository.create_dir(user, 'newdir3',
message: 'Add newdir',
branch_name: 'master',
author_email: author_email, author_name: author_name)
@@ -987,6 +1003,8 @@ RSpec.describe Repository do
end
describe "#delete_file" do
+ let(:project) { create(:project, :repository) }
+
it 'removes file successfully' do
expect do
repository.delete_file(user, 'README',
@@ -1013,6 +1031,8 @@ RSpec.describe Repository do
end
describe "search_files_by_content" do
+ let_it_be(:project) { create(:project, :repository) }
+
let(:results) { repository.search_files_by_content('feature', 'master') }
subject { results }
@@ -1248,6 +1268,8 @@ RSpec.describe Repository do
end
describe "#changelog", :use_clean_rails_memory_store_caching do
+ let(:project) { create(:project, :repository) }
+
it 'accepts changelog' do
expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('changelog')])
@@ -1280,6 +1302,8 @@ RSpec.describe Repository do
end
describe "#license_blob", :use_clean_rails_memory_store_caching do
+ let(:project) { create(:project, :repository) }
+
before do
repository.delete_file(
user, 'LICENSE', message: 'Remove LICENSE', branch_name: 'master')
@@ -1323,7 +1347,9 @@ RSpec.describe Repository do
end
end
- describe '#license_key', :use_clean_rails_memory_store_caching do
+ describe '#license_key', :clean_gitlab_redis_cache do
+ let(:project) { create(:project, :repository) }
+
before do
repository.delete_file(user, 'LICENSE',
message: 'Remove LICENSE', branch_name: 'master')
@@ -1367,50 +1393,52 @@ RSpec.describe Repository do
end
end
- describe '#license' do
- before do
- repository.delete_file(user, 'LICENSE',
- message: 'Remove LICENSE', branch_name: 'master')
- end
+ [true, false].each do |ff|
+ context "with feature flag license_from_gitaly=#{ff}" do
+ before do
+ stub_feature_flags(license_from_gitaly: ff)
+ end
- it 'returns nil when no license is detected' do
- expect(repository.license).to be_nil
- end
+ describe '#license', :use_clean_rails_memory_store_caching, :clean_gitlab_redis_cache do
+ let(:project) { create(:project, :repository) }
- it 'returns nil when the repository does not exist' do
- expect(repository).to receive(:exists?).and_return(false)
+ before do
+ repository.delete_file(user, 'LICENSE',
+ message: 'Remove LICENSE', branch_name: 'master')
+ end
- expect(repository.license).to be_nil
- end
+ it 'returns nil when no license is detected' do
+ expect(repository.license).to be_nil
+ end
- it 'returns nil when license_key is not recognized' do
- expect(repository).to receive(:license_key).twice.and_return('not-recognized')
- expect(Gitlab::ErrorTracking).to receive(:track_exception) do |ex|
- expect(ex).to be_a(Licensee::InvalidLicense)
- end
+ it 'returns nil when the repository does not exist' do
+ expect(repository).to receive(:exists?).and_return(false)
- expect(repository.license).to be_nil
- end
+ expect(repository.license).to be_nil
+ end
- it 'returns other when the content is not recognizable' do
- license = Licensee::License.new('other')
- repository.create_file(user, 'LICENSE', 'Gitlab B.V.',
- message: 'Add LICENSE', branch_name: 'master')
+ it 'returns other when the content is not recognizable' do
+ repository.create_file(user, 'LICENSE', 'Gitlab B.V.',
+ message: 'Add LICENSE', branch_name: 'master')
- expect(repository.license).to eq(license)
- end
+ expect(repository.license_key).to eq('other')
+ end
- it 'returns the license' do
- license = Licensee::License.new('mit')
- repository.create_file(user, 'LICENSE',
- license.content,
- message: 'Add LICENSE', branch_name: 'master')
+ it 'returns the license' do
+ license = Licensee::License.new('mit')
+ repository.create_file(user, 'LICENSE',
+ license.content,
+ message: 'Add LICENSE', branch_name: 'master')
- expect(repository.license).to eq(license)
+ expect(repository.license_key).to eq(license.key)
+ end
+ end
end
end
describe "#gitlab_ci_yml", :use_clean_rails_memory_store_caching do
+ let(:project) { create(:project, :repository) }
+
it 'returns valid file' do
files = [TestBlob.new('file'), TestBlob.new('.gitlab-ci.yml'), TestBlob.new('copying')]
expect(repository.tree).to receive(:blobs).and_return(files)
@@ -1430,11 +1458,11 @@ RSpec.describe Repository do
end
describe '#ambiguous_ref?' do
- let(:ref) { 'ref' }
-
subject { repository.ambiguous_ref?(ref) }
context 'when ref is ambiguous' do
+ let(:ref) { 'ref' }
+
before do
repository.add_tag(project.creator, ref, 'master')
repository.add_branch(project.creator, ref, 'master')
@@ -1446,6 +1474,8 @@ RSpec.describe Repository do
end
context 'when ref is not ambiguous' do
+ let(:ref) { 'another_ref' }
+
before do
repository.add_tag(project.creator, ref, 'master')
end
@@ -1457,6 +1487,8 @@ RSpec.describe Repository do
end
describe '#has_ambiguous_refs?' do
+ let(:project) { create(:project, :repository) }
+
using RSpec::Parameterized::TableSyntax
where(:branch_names, :tag_names, :result) do
@@ -1484,6 +1516,7 @@ RSpec.describe Repository do
end
describe '#expand_ref' do
+ let(:project) { create(:project, :repository) }
let(:ref) { 'ref' }
subject { repository.expand_ref(ref) }
@@ -1520,6 +1553,7 @@ RSpec.describe Repository do
describe '#add_branch' do
let(:branch_name) { 'new_feature' }
let(:target) { 'master' }
+ let(:project) { create(:project, :repository) }
subject { repository.add_branch(user, branch_name, target) }
@@ -1604,6 +1638,8 @@ RSpec.describe Repository do
end
describe '#exists?' do
+ let(:project) { create(:project, :repository) }
+
it 'returns true when a repository exists' do
expect(repository.exists?).to be(true)
end
@@ -1624,6 +1660,8 @@ RSpec.describe Repository do
end
describe '#has_visible_content?' do
+ let(:project) { create(:project, :repository) }
+
it 'delegates to raw_repository when true' do
expect(repository.raw_repository).to receive(:has_visible_content?)
.and_return(true)
@@ -1690,6 +1728,8 @@ RSpec.describe Repository do
end
describe '#branch_names', :clean_gitlab_redis_cache do
+ let_it_be(:project) { create(:project, :repository) }
+
let(:fake_branch_names) { ['foobar'] }
it 'gets cached across Repository instances' do
@@ -1706,6 +1746,7 @@ RSpec.describe Repository do
end
describe '#empty?' do
+ let(:project) { create(:project, :repository) }
let(:empty_repository) { create(:project_empty_repo).repository }
it 'returns true for an empty repository' do
@@ -1752,6 +1793,8 @@ RSpec.describe Repository do
end
describe '#root_ref' do
+ let(:project) { create(:project, :repository) }
+
it 'returns a branch name' do
expect(repository.root_ref).to be_an_instance_of(String)
end
@@ -1792,6 +1835,8 @@ RSpec.describe Repository do
describe '#expire_branch_cache' do
# This method is private but we need it for testing purposes. Sadly there's
# no other proper way of testing caching operations.
+ let_it_be(:project) { create(:project, :repository) }
+
let(:cache) { repository.send(:cache) }
it 'expires the cache for all branches' do
@@ -2003,6 +2048,7 @@ RSpec.describe Repository do
end
describe '#revert' do
+ let(:project) { create(:project, :repository) }
let(:new_image_commit) { repository.commit('33f3729a45c02fc67d00adb1b8bca394b0e761d9') }
let(:update_image_commit) { repository.commit('2f63565e7aac07bcdadb654e253078b727143ec4') }
let(:message) { 'revert message' }
@@ -2039,6 +2085,7 @@ RSpec.describe Repository do
end
describe '#cherry_pick' do
+ let(:project) { create(:project, :repository) }
let(:conflict_commit) { repository.commit('c642fe9b8b9f28f9225d7ea953fe14e74748d53b') }
let(:pickable_commit) { repository.commit('7d3b0f7cff5f37573aea97cebfd5692ea1689924') }
let(:pickable_merge) { repository.commit('e56497bb5f03a90a51293fc6d516788730953899') }
@@ -2174,7 +2221,8 @@ RSpec.describe Repository do
:contribution_guide,
:changelog,
:license_blob,
- :license_key,
+ :license_licensee,
+ :license_gitaly,
:gitignore,
:gitlab_ci_yml,
:branch_names,
@@ -2404,7 +2452,7 @@ RSpec.describe Repository do
end
it 'returns a Gitlab::Git::Tag object' do
- tag = repository.add_tag(user, '8.5', 'master', 'foo')
+ tag = repository.add_tag(user, '8.6', 'master', 'foo')
expect(tag).to be_a(Gitlab::Git::Tag)
end
@@ -2412,12 +2460,14 @@ RSpec.describe Repository do
context 'with an invalid target' do
it 'returns false' do
- expect(repository.add_tag(user, '8.5', 'bar', 'foo')).to be false
+ expect(repository.add_tag(user, '8.7', 'bar', 'foo')).to be false
end
end
end
describe '#rm_branch' do
+ let(:project) { create(:project, :repository) }
+
it 'removes a branch' do
expect(repository).to receive(:before_remove_branch)
expect(repository).to receive(:after_remove_branch)
@@ -2452,6 +2502,8 @@ RSpec.describe Repository do
end
describe '#find_tag' do
+ let_it_be(:project) { create(:project, :repository) }
+
before do
allow(Gitlab::GitalyClient).to receive(:call).and_call_original
end
@@ -2477,6 +2529,8 @@ RSpec.describe Repository do
end
describe '#avatar' do
+ let(:project) { create(:project, :repository) }
+
it 'returns nil if repo does not exist' do
allow(repository).to receive(:root_ref).and_raise(Gitlab::Git::Repository::NoRepository)
@@ -2519,6 +2573,8 @@ RSpec.describe Repository do
end
describe '#xcode_project?' do
+ let(:project) { create(:project, :repository) }
+
before do
allow(repository).to receive(:tree).with(:head).and_return(double(:tree, trees: [tree]))
end
@@ -2654,7 +2710,7 @@ RSpec.describe Repository do
match[1].to_sym if match
end.compact
- expect(Repository::CACHED_METHODS + Repository::MEMOIZED_CACHED_METHODS).to include(*methods)
+ expect(Repository::CACHED_METHODS).to include(*methods)
end
end
@@ -2819,18 +2875,20 @@ RSpec.describe Repository do
describe '#refresh_method_caches' do
it 'refreshes the caches of the given types' do
expect(repository).to receive(:expire_method_caches)
- .with(%i(readme_path license_blob license_key license))
+ .with(%i(readme_path license_blob license_licensee license_gitaly))
expect(repository).to receive(:readme_path)
expect(repository).to receive(:license_blob)
- expect(repository).to receive(:license_key)
- expect(repository).to receive(:license)
+ expect(repository).to receive(:license_licensee)
+ expect(repository).to receive(:license_gitaly)
repository.refresh_method_caches(%i(readme license))
end
end
describe '#gitlab_ci_yml_for' do
+ let(:project) { create(:project, :repository) }
+
before do
repository.create_file(User.last, '.gitlab-ci.yml', 'CONTENT', message: 'Add .gitlab-ci.yml', branch_name: 'master')
end
@@ -2849,7 +2907,7 @@ RSpec.describe Repository do
end
describe '#changelog_config' do
- let(:user) { create(:user) }
+ let(:project) { create(:project, :repository) }
let(:changelog_config_path) { Gitlab::Changelog::Config::DEFAULT_FILE_PATH }
before do
@@ -2865,6 +2923,7 @@ RSpec.describe Repository do
context 'when there is a changelog_config_path at the commit' do
it 'returns the content' do
expect(repository.changelog_config(repository.commit.sha, changelog_config_path)).to eq('CONTENT')
+ expect(repository.changelog_config(repository.commit.parent.sha, changelog_config_path)).to be_nil
end
end
@@ -2876,6 +2935,8 @@ RSpec.describe Repository do
end
describe '#route_map_for' do
+ let(:project) { create(:project, :repository) }
+
before do
repository.create_file(User.last, '.gitlab/route-map.yml', 'CONTENT', message: 'Add .gitlab/route-map.yml', branch_name: 'master')
end
@@ -3148,7 +3209,6 @@ RSpec.describe Repository do
describe '#create_if_not_exists' do
let(:project) { create(:project) }
- let(:repository) { project.repository }
it 'creates the repository if it did not exist' do
expect { repository.create_if_not_exists }.to change { repository.exists? }.from(false).to(true)
@@ -3204,7 +3264,6 @@ RSpec.describe Repository do
describe '#create_from_bundle' do
let(:project) { create(:project) }
- let(:repository) { project.repository }
let(:valid_bundle_path) { File.join(Dir.tmpdir, "repo-#{SecureRandom.hex}.bundle") }
let(:raw_repository) { repository.raw }
@@ -3244,8 +3303,6 @@ RSpec.describe Repository do
describe "#blobs_metadata" do
let_it_be(:project) { create(:project, :repository) }
- let(:repository) { project.repository }
-
def expect_metadata_blob(thing)
expect(thing).to be_a(Blob)
expect(thing.data).to be_empty
@@ -3313,8 +3370,6 @@ RSpec.describe Repository do
subject { repository.lfs_enabled? }
context 'for a project repository' do
- let(:repository) { project.repository }
-
it 'returns true when LFS is enabled' do
stub_lfs_setting(enabled: true)
@@ -3425,6 +3480,8 @@ RSpec.describe Repository do
end
describe '#change_head' do
+ let_it_be(:project) { create(:project, :repository) }
+
let(:branch) { repository.container.default_branch }
context 'when the branch exists' do
diff --git a/spec/models/resource_label_event_spec.rb b/spec/models/resource_label_event_spec.rb
index 44de83d9240..5087a8e8524 100644
--- a/spec/models/resource_label_event_spec.rb
+++ b/spec/models/resource_label_event_spec.rb
@@ -144,4 +144,14 @@ RSpec.describe ResourceLabelEvent, type: :model do
create(:resource_label_event, issue: issue, label: label)
end
end
+
+ describe '#discussion_id' do
+ it 'generates different discussion ID for events created milliseconds apart' do
+ now = Time.current
+ event_1 = create(:resource_label_event, issue: issue, label: label, user: issue.author, created_at: now)
+ event_2 = create(:resource_label_event, issue: issue, label: label, user: issue.author, created_at: now.advance(seconds: 0.001))
+
+ expect(event_1.discussion_id).not_to eq(event_2.discussion_id)
+ end
+ end
end
diff --git a/spec/models/user_detail_spec.rb b/spec/models/user_detail_spec.rb
index 9189b9a1469..04964d36dcd 100644
--- a/spec/models/user_detail_spec.rb
+++ b/spec/models/user_detail_spec.rb
@@ -25,5 +25,140 @@ RSpec.describe UserDetail do
describe '#bio' do
it { is_expected.to validate_length_of(:bio).is_at_most(255) }
end
+
+ describe '#linkedin' do
+ it { is_expected.to validate_length_of(:linkedin).is_at_most(500) }
+ end
+
+ describe '#twitter' do
+ it { is_expected.to validate_length_of(:twitter).is_at_most(500) }
+ end
+
+ describe '#skype' do
+ it { is_expected.to validate_length_of(:skype).is_at_most(500) }
+ end
+
+ describe '#location' do
+ it { is_expected.to validate_length_of(:location).is_at_most(500) }
+ end
+
+ describe '#organization' do
+ it { is_expected.to validate_length_of(:organization).is_at_most(500) }
+ end
+
+ describe '#website_url' do
+ it { is_expected.to validate_length_of(:website_url).is_at_most(500) }
+ end
+ end
+
+ describe '.user_fields_changed?' do
+ let(:user) { create(:user) }
+
+ context 'when user detail fields unchanged' do
+ it 'returns false' do
+ expect(described_class.user_fields_changed?(user)).to be false
+ end
+
+ %i[linkedin location organization skype twitter website_url].each do |attr|
+ context "when #{attr} is changed" do
+ before do
+ user[attr] = 'new value'
+ end
+
+ it 'returns true' do
+ expect(described_class.user_fields_changed?(user)).to be true
+ end
+ end
+ end
+ end
+ end
+
+ describe '#sanitize_attrs' do
+ shared_examples 'sanitizes html' do |attr|
+ it 'sanitizes html tags' do
+ details = build_stubbed(:user_detail, attr => '<a href="//evil.com">https://example.com<a>')
+ expect { details.sanitize_attrs }.to change { details[attr] }.to('https://example.com')
+ end
+
+ it 'sanitizes iframe scripts' do
+ details = build_stubbed(:user_detail, attr => '<iframe src=javascript:alert()><iframe>')
+ expect { details.sanitize_attrs }.to change { details[attr] }.to('')
+ end
+
+ it 'sanitizes js scripts' do
+ details = build_stubbed(:user_detail, attr => '<script>alert("Test")</script>')
+ expect { details.sanitize_attrs }.to change { details[attr] }.to('')
+ end
+ end
+
+ %i[linkedin skype twitter website_url].each do |attr|
+ it_behaves_like 'sanitizes html', attr
+
+ it 'encodes HTML entities' do
+ details = build_stubbed(:user_detail, attr => 'test&attr')
+ expect { details.sanitize_attrs }.to change { details[attr] }.to('test&amp;attr')
+ end
+ end
+
+ %i[location organization].each do |attr|
+ it_behaves_like 'sanitizes html', attr
+
+ it 'does not encode HTML entities' do
+ details = build_stubbed(:user_detail, attr => 'test&attr')
+ expect { details.sanitize_attrs }.not_to change { details[attr] }
+ end
+ end
+
+ it 'sanitizes on validation' do
+ details = build(:user_detail)
+
+ expect(details)
+ .to receive(:sanitize_attrs)
+ .at_least(:once)
+ .and_call_original
+
+ details.save!
+ end
+ end
+
+ describe '#assign_changed_fields_from_user' do
+ let(:user_detail) { build(:user_detail) }
+
+ shared_examples 'syncs field with `user_details`' do |field|
+ it 'does not sync the field to `user_details` if unchanged' do
+ expect { user_detail.assign_changed_fields_from_user }
+ .to not_change { user_detail.public_send(field) }
+ end
+
+ it 'syncs the field to `user_details` if changed' do
+ user_detail.user[field] = "new_value"
+ expect { user_detail.assign_changed_fields_from_user }
+ .to change { user_detail.public_send(field) }
+ .to("new_value")
+ end
+
+ it 'truncates the field if too long' do
+ user_detail.user[field] = 'a' * (UserDetail::DEFAULT_FIELD_LENGTH + 1)
+ expect { user_detail.assign_changed_fields_from_user }
+ .to change { user_detail.public_send(field) }
+ .to('a' * UserDetail::DEFAULT_FIELD_LENGTH)
+ end
+
+ it 'properly syncs nil field to `user_details' do
+ user_detail.user[field] = 'Test'
+ user_detail.user.save!(validate: false)
+ user_detail.user[field] = nil
+ expect { user_detail.assign_changed_fields_from_user }
+ .to change { user_detail.public_send(field) }
+ .to('')
+ end
+ end
+
+ it_behaves_like 'syncs field with `user_details`', :linkedin
+ it_behaves_like 'syncs field with `user_details`', :location
+ it_behaves_like 'syncs field with `user_details`', :organization
+ it_behaves_like 'syncs field with `user_details`', :skype
+ it_behaves_like 'syncs field with `user_details`', :twitter
+ it_behaves_like 'syncs field with `user_details`', :website_url
end
end
diff --git a/spec/models/user_preference_spec.rb b/spec/models/user_preference_spec.rb
index 2492521c634..d76334d7c9e 100644
--- a/spec/models/user_preference_spec.rb
+++ b/spec/models/user_preference_spec.rb
@@ -10,20 +10,20 @@ RSpec.describe UserPreference do
using RSpec::Parameterized::TableSyntax
where(color: [
- '#000000',
- '#123456',
- '#abcdef',
- '#AbCdEf',
- '#ffffff',
- '#fFfFfF',
- '#000',
- '#123',
- '#abc',
- '#AbC',
- '#fff',
- '#fFf',
- ''
- ])
+ '#000000',
+ '#123456',
+ '#abcdef',
+ '#AbCdEf',
+ '#ffffff',
+ '#fFfFfF',
+ '#000',
+ '#123',
+ '#abc',
+ '#AbC',
+ '#fff',
+ '#fFf',
+ ''
+ ])
with_them do
it { is_expected.to allow_value(color).for(:diffs_deletion_color) }
@@ -31,20 +31,27 @@ RSpec.describe UserPreference do
end
where(color: [
- '#1',
- '#12',
- '#1234',
- '#12345',
- '#1234567',
- '123456',
- '#12345x'
- ])
+ '#1',
+ '#12',
+ '#1234',
+ '#12345',
+ '#1234567',
+ '123456',
+ '#12345x'
+ ])
with_them do
it { is_expected.not_to allow_value(color).for(:diffs_deletion_color) }
it { is_expected.not_to allow_value(color).for(:diffs_addition_color) }
end
end
+
+ describe 'use_legacy_web_ide' do
+ it { is_expected.to allow_value(true).for(:use_legacy_web_ide) }
+ it { is_expected.to allow_value(false).for(:use_legacy_web_ide) }
+ it { is_expected.not_to allow_value(nil).for(:use_legacy_web_ide) }
+ it { is_expected.not_to allow_value("").for(:use_legacy_web_ide) }
+ end
end
describe 'notes filters global keys' do
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 3cc34681ad6..8ebf3d70165 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -69,12 +69,18 @@ RSpec.describe User do
it { is_expected.to delegate_method(:markdown_surround_selection).to(:user_preference) }
it { is_expected.to delegate_method(:markdown_surround_selection=).to(:user_preference).with_arguments(:args) }
+ it { is_expected.to delegate_method(:markdown_automatic_lists).to(:user_preference) }
+ it { is_expected.to delegate_method(:markdown_automatic_lists=).to(:user_preference).with_arguments(:args) }
+
it { is_expected.to delegate_method(:diffs_deletion_color).to(:user_preference) }
it { is_expected.to delegate_method(:diffs_deletion_color=).to(:user_preference).with_arguments(:args) }
it { is_expected.to delegate_method(:diffs_addition_color).to(:user_preference) }
it { is_expected.to delegate_method(:diffs_addition_color=).to(:user_preference).with_arguments(:args) }
+ it { is_expected.to delegate_method(:use_legacy_web_ide).to(:user_preference) }
+ it { is_expected.to delegate_method(:use_legacy_web_ide=).to(:user_preference).with_arguments(:args) }
+
it { is_expected.to delegate_method(:job_title).to(:user_detail).allow_nil }
it { is_expected.to delegate_method(:job_title=).to(:user_detail).with_arguments(:args).allow_nil }
@@ -101,6 +107,7 @@ RSpec.describe User do
it { is_expected.to have_one(:atlassian_identity) }
it { is_expected.to have_one(:user_highest_role) }
it { is_expected.to have_one(:credit_card_validation) }
+ it { is_expected.to have_one(:phone_number_validation) }
it { is_expected.to have_one(:banned_user) }
it { is_expected.to have_many(:snippets).dependent(:destroy) }
it { is_expected.to have_many(:members) }
@@ -136,7 +143,6 @@ RSpec.describe User do
it { is_expected.to have_many(:timelogs) }
it { is_expected.to have_many(:callouts).class_name('Users::Callout') }
it { is_expected.to have_many(:group_callouts).class_name('Users::GroupCallout') }
- it { is_expected.to have_many(:namespace_callouts).class_name('Users::NamespaceCallout') }
it { is_expected.to have_many(:project_callouts).class_name('Users::ProjectCallout') }
describe '#user_detail' do
@@ -1163,6 +1169,20 @@ RSpec.describe User do
'ORDER BY "users"."last_activity_on" ASC NULLS FIRST, "users"."id" DESC')
end
end
+
+ describe '.order_recent_sign_in' do
+ it 'sorts users by current_sign_in_at in descending order' do
+ expect(described_class.order_recent_sign_in.to_sql).to include(
+ 'ORDER BY "users"."current_sign_in_at" DESC NULLS LAST')
+ end
+ end
+
+ describe '.order_oldest_sign_in' do
+ it 'sorts users by current_sign_in_at in ascending order' do
+ expect(described_class.order_oldest_sign_in.to_sql).to include(
+ 'ORDER BY "users"."current_sign_in_at" ASC NULLS LAST')
+ end
+ end
end
context 'strip attributes' do
@@ -1882,9 +1902,9 @@ RSpec.describe User do
end
it 'ensures correct rights and limits for user' do
- stub_config_setting(default_can_create_group: true)
+ stub_application_setting(can_create_group: true)
- expect { user.update!(external: false) }.to change { user.can_create_group }.to(true)
+ expect { user.update!(external: false) }.to change { user.can_create_group }.from(false).to(true)
.and change { user.projects_limit }.to(Gitlab::CurrentSettings.default_projects_limit)
end
end
@@ -2604,7 +2624,7 @@ RSpec.describe User do
it 'applies defaults to user' do
expect(user.projects_limit).to eq(Gitlab.config.gitlab.default_projects_limit)
- expect(user.can_create_group).to eq(Gitlab.config.gitlab.default_can_create_group)
+ expect(user.can_create_group).to eq(Gitlab::CurrentSettings.can_create_group)
expect(user.theme_id).to eq(Gitlab.config.gitlab.default_theme)
expect(user.external).to be_falsey
expect(user.private_profile).to eq(false)
@@ -3719,6 +3739,22 @@ RSpec.describe User do
expect(user.followees).to be_empty
end
+
+ it 'does not follow if max followee limit is reached' do
+ stub_const('Users::UserFollowUser::MAX_FOLLOWEE_LIMIT', 2)
+
+ user = create(:user)
+ Users::UserFollowUser::MAX_FOLLOWEE_LIMIT.times { user.follow(create(:user)) }
+
+ followee = create(:user)
+ user_follow_user = user.follow(followee)
+
+ expect(user_follow_user).not_to be_persisted
+ expected_message = format(_("You can't follow more than %{limit} users. To follow more users, unfollow some others."), limit: Users::UserFollowUser::MAX_FOLLOWEE_LIMIT)
+ expect(user_follow_user.errors.messages[:base].first).to eq(expected_message)
+
+ expect(user.following?(followee)).to be_falsey
+ end
end
describe '#unfollow' do
@@ -3747,6 +3783,18 @@ RSpec.describe User do
expect(user.followees).to be_empty
end
+
+ it 'unfollows when over followee limit' do
+ user = create(:user)
+
+ followees = create_list(:user, 4)
+ followees.each { |f| expect(user.follow(f)).to be_truthy }
+
+ stub_const('Users::UserFollowUser::MAX_FOLLOWEE_LIMIT', followees.length - 2)
+
+ expect(user.unfollow(followees.first)).to be_truthy
+ expect(user.following?(followees.first)).to be_falsey
+ end
end
describe '#notification_email_or_default' do
@@ -4838,23 +4886,6 @@ RSpec.describe User do
end
end
- describe '#remove_project_authorizations' do
- let_it_be(:project1) { create(:project) }
- let_it_be(:project2) { create(:project) }
- let_it_be(:project3) { create(:project) }
- let_it_be(:user) { create(:user) }
-
- it 'removes the project authorizations of the user, in specified projects' do
- create(:project_authorization, user: user, project: project1)
- create(:project_authorization, user: user, project: project2)
- create(:project_authorization, user: user, project: project3)
-
- user.remove_project_authorizations([project1.id, project2.id])
-
- expect(user.project_authorizations.pluck(:project_id)).to match_array([project3.id])
- end
- end
-
describe '#access_level=' do
let(:user) { build(:user) }
@@ -5393,6 +5424,41 @@ RSpec.describe User do
end
end
+ describe '#ensure_user_detail_assigned' do
+ let(:user) { build(:user) }
+
+ context 'when no user detail field has been changed' do
+ before do
+ allow(UserDetail)
+ .to receive(:user_fields_changed?)
+ .and_return(false)
+ end
+
+ it 'does not assign user details before save' do
+ expect(user.user_detail)
+ .not_to receive(:assign_changed_fields_from_user)
+
+ user.save!
+ end
+ end
+
+ context 'when a user detail field has been changed' do
+ before do
+ allow(UserDetail)
+ .to receive(:user_fields_changed?)
+ .and_return(true)
+ end
+
+ it 'assigns user details before save' do
+ expect(user.user_detail)
+ .to receive(:assign_changed_fields_from_user)
+ .and_call_original
+
+ user.save!
+ end
+ end
+ end
+
describe '#username_changed_hook' do
context 'for a new user' do
let(:user) { build(:user) }
@@ -6251,6 +6317,64 @@ RSpec.describe User do
it { is_expected.to be_falsey }
end
end
+
+ context 'user with autogenerated_password' do
+ let(:user) { build_stubbed(:user, password_automatically_set: true) }
+ let(:password) { user.password }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '#generate_otp_backup_codes!' do
+ let(:user) { create(:user) }
+
+ context 'with FIPS mode', :fips_mode do
+ it 'attempts to use #generate_otp_backup_codes_pbkdf2!' do
+ expect(user).to receive(:generate_otp_backup_codes_pbkdf2!).and_call_original
+
+ user.generate_otp_backup_codes!
+ end
+ end
+
+ context 'outside FIPS mode' do
+ it 'does not attempt to use #generate_otp_backup_codes_pbkdf2!' do
+ expect(user).not_to receive(:generate_otp_backup_codes_pbkdf2!)
+
+ user.generate_otp_backup_codes!
+ end
+ end
+ end
+
+ describe '#invalidate_otp_backup_code!' do
+ let(:user) { create(:user) }
+
+ context 'with FIPS mode', :fips_mode do
+ context 'with a PBKDF2-encrypted password' do
+ let(:encrypted_password) { '$pbkdf2-sha512$20000$boHGAw0hEyI$DBA67J7zNZebyzLtLk2X9wRDbmj1LNKVGnZLYyz6PGrIDGIl45fl/BPH0y1TPZnV90A20i.fD9C3G9Bp8jzzOA' }
+
+ it 'attempts to use #invalidate_otp_backup_code_pdkdf2!' do
+ expect(user).to receive(:otp_backup_codes).at_least(:once).and_return([encrypted_password])
+ expect(user).to receive(:invalidate_otp_backup_code_pdkdf2!).and_return(true)
+
+ user.invalidate_otp_backup_code!(user.password)
+ end
+ end
+
+ it 'does not attempt to use #invalidate_otp_backup_code_pdkdf2!' do
+ expect(user).not_to receive(:invalidate_otp_backup_code_pdkdf2!)
+
+ user.invalidate_otp_backup_code!(user.password)
+ end
+ end
+
+ context 'outside FIPS mode' do
+ it 'does not attempt to use #invalidate_otp_backup_code_pdkdf2!' do
+ expect(user).not_to receive(:invalidate_otp_backup_code_pdkdf2!)
+
+ user.invalidate_otp_backup_code!(user.password)
+ end
+ end
end
# These entire test section can be removed once the :pbkdf2_password_encryption feature flag is removed.
@@ -6593,96 +6717,6 @@ RSpec.describe User do
end
end
- describe 'Users::NamespaceCallout' do
- describe '#dismissed_callout_for_namespace?' do
- let_it_be(:user, refind: true) { create(:user) }
- let_it_be(:namespace) { create(:namespace) }
- let_it_be(:feature_name) { Users::NamespaceCallout.feature_names.each_key.first }
-
- let(:query) do
- { feature_name: feature_name, namespace: namespace }
- end
-
- def have_dismissed_callout
- be_dismissed_callout_for_namespace(**query)
- end
-
- context 'when no callout dismissal record exists' do
- it 'returns false when no ignore_dismissal_earlier_than provided' do
- expect(user).not_to have_dismissed_callout
- end
- end
-
- context 'when dismissed callout exists' do
- before_all do
- create(:namespace_callout,
- user: user,
- namespace_id: namespace.id,
- feature_name: feature_name,
- dismissed_at: 4.months.ago)
- end
-
- it 'returns true when no ignore_dismissal_earlier_than provided' do
- expect(user).to have_dismissed_callout
- end
-
- it 'returns true when ignore_dismissal_earlier_than is earlier than dismissed_at' do
- query[:ignore_dismissal_earlier_than] = 6.months.ago
-
- expect(user).to have_dismissed_callout
- end
-
- it 'returns false when ignore_dismissal_earlier_than is later than dismissed_at' do
- query[:ignore_dismissal_earlier_than] = 2.months.ago
-
- expect(user).not_to have_dismissed_callout
- end
- end
- end
-
- describe '#find_or_initialize_namespace_callout' do
- let_it_be(:user, refind: true) { create(:user) }
- let_it_be(:namespace) { create(:namespace) }
- let_it_be(:feature_name) { Users::NamespaceCallout.feature_names.each_key.first }
-
- subject(:callout_with_source) do
- user.find_or_initialize_namespace_callout(feature_name, namespace.id)
- end
-
- context 'when callout exists' do
- let!(:callout) do
- create(:namespace_callout, user: user, feature_name: feature_name, namespace_id: namespace.id)
- end
-
- it 'returns existing callout' do
- expect(callout_with_source).to eq(callout)
- end
- end
-
- context 'when callout does not exist' do
- context 'when feature name is valid' do
- it 'initializes a new callout' do
- expect(callout_with_source)
- .to be_a_new(Users::NamespaceCallout)
- .and be_valid
- end
- end
-
- context 'when feature name is not valid' do
- let(:feature_name) { 'notvalid' }
-
- it 'initializes a new callout' do
- expect(callout_with_source).to be_a_new(Users::NamespaceCallout)
- end
-
- it 'is not valid' do
- expect(callout_with_source).not_to be_valid
- end
- end
- end
- end
- end
-
describe '#dismissed_callout_for_group?' do
let_it_be(:user, refind: true) { create(:user) }
let_it_be(:group) { create(:group) }
@@ -7432,9 +7466,10 @@ RSpec.describe User do
let_it_be(:internal_user) { User.alert_bot.tap { |u| u.confirm } }
it 'does not return blocked or banned users' do
- expect(described_class.without_forbidden_states).to match_array([
- normal_user, admin_user, external_user, unconfirmed_user, omniauth_user, internal_user
- ])
+ expect(described_class.without_forbidden_states).to match_array(
+ [
+ normal_user, admin_user, external_user, unconfirmed_user, omniauth_user, internal_user
+ ])
end
end
diff --git a/spec/models/users/namespace_callout_spec.rb b/spec/models/users/namespace_callout_spec.rb
deleted file mode 100644
index f8207f2abc8..00000000000
--- a/spec/models/users/namespace_callout_spec.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Users::NamespaceCallout do
- let_it_be(:user) { create_default(:user) }
- let_it_be(:namespace) { create_default(:namespace) }
- let_it_be(:callout) { create(:namespace_callout) }
-
- it_behaves_like 'having unique enum values'
-
- describe 'relationships' do
- it { is_expected.to belong_to(:namespace) }
- end
-
- describe 'validations' do
- it { is_expected.to validate_presence_of(:namespace) }
- it { is_expected.to validate_presence_of(:user) }
- it { is_expected.to validate_presence_of(:feature_name) }
-
- specify do
- is_expected.to validate_uniqueness_of(:feature_name)
- .scoped_to(:user_id, :namespace_id)
- .ignoring_case_sensitivity
- end
-
- it { is_expected.to allow_value(:web_hook_disabled).for(:feature_name) }
-
- it 'rejects invalid feature names' do
- expect { callout.feature_name = :non_existent_feature }.to raise_error(ArgumentError)
- end
- end
-
- describe '#source_feature_name' do
- it 'provides string based off source and feature' do
- expect(callout.source_feature_name).to eq "#{callout.feature_name}_#{callout.namespace_id}"
- end
- end
-end
diff --git a/spec/models/users/phone_number_validation_spec.rb b/spec/models/users/phone_number_validation_spec.rb
new file mode 100644
index 00000000000..2f0fd1d3ac9
--- /dev/null
+++ b/spec/models/users/phone_number_validation_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::PhoneNumberValidation do
+ it { is_expected.to belong_to(:user) }
+ it { is_expected.to belong_to(:banned_user) }
+
+ it { is_expected.to validate_presence_of(:country) }
+ it { is_expected.to validate_length_of(:country).is_at_most(3) }
+
+ it { is_expected.to validate_presence_of(:international_dial_code) }
+
+ it {
+ is_expected.to validate_numericality_of(:international_dial_code)
+ .only_integer
+ .is_greater_than_or_equal_to(1)
+ .is_less_than_or_equal_to(999)
+ }
+
+ it { is_expected.to validate_presence_of(:phone_number) }
+ it { is_expected.to validate_length_of(:phone_number).is_at_most(12) }
+ it { is_expected.to allow_value('555555').for(:phone_number) }
+ it { is_expected.not_to allow_value('555-555').for(:phone_number) }
+ it { is_expected.not_to allow_value('+555555').for(:phone_number) }
+ it { is_expected.not_to allow_value('555 555').for(:phone_number) }
+
+ it { is_expected.to validate_length_of(:telesign_reference_xid).is_at_most(255) }
+
+ describe '.related_to_banned_user?' do
+ let_it_be(:international_dial_code) { 1 }
+ let_it_be(:phone_number) { '555' }
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:banned_user) { create(:user, :banned) }
+
+ subject(:related_to_banned_user?) do
+ described_class.related_to_banned_user?(international_dial_code, phone_number)
+ end
+
+ context 'when banned user has the same international dial code and phone number' do
+ before do
+ create(:phone_number_validation, user: banned_user)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when banned user has the same international dial code and phone number, but different country code' do
+ before do
+ create(:phone_number_validation, user: banned_user, country: 'CA')
+ end
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when banned user does not have the same international dial code' do
+ before do
+ create(:phone_number_validation, user: banned_user, international_dial_code: 61)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when banned user does not have the same phone number' do
+ before do
+ create(:phone_number_validation, user: banned_user, phone_number: '666')
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when not-banned user has the same international dial code and phone number' do
+ before do
+ create(:phone_number_validation, user: user)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
+end
diff --git a/spec/models/wiki_directory_spec.rb b/spec/models/wiki_directory_spec.rb
index 9b6cec99ddb..44c6f6c9c1a 100644
--- a/spec/models/wiki_directory_spec.rb
+++ b/spec/models/wiki_directory_spec.rb
@@ -24,31 +24,32 @@ RSpec.describe WikiDirectory do
[toplevel1, toplevel2, toplevel3, child1, child2, child3, grandchild1, grandchild2].sort_by(&:title)
)
- expect(entries).to match([
- toplevel1,
- a_kind_of(WikiDirectory).and(
- having_attributes(
- slug: 'parent1', entries: [
- child1,
- child2,
- a_kind_of(WikiDirectory).and(
- having_attributes(
- slug: 'parent1/subparent',
- entries: [grandchild1, grandchild2]
+ expect(entries).to match(
+ [
+ toplevel1,
+ a_kind_of(WikiDirectory).and(
+ having_attributes(
+ slug: 'parent1', entries: [
+ child1,
+ child2,
+ a_kind_of(WikiDirectory).and(
+ having_attributes(
+ slug: 'parent1/subparent',
+ entries: [grandchild1, grandchild2]
+ )
)
- )
- ]
- )
- ),
- a_kind_of(WikiDirectory).and(
- having_attributes(
- slug: 'parent2',
- entries: [child3]
- )
- ),
- toplevel2,
- toplevel3
- ])
+ ]
+ )
+ ),
+ a_kind_of(WikiDirectory).and(
+ having_attributes(
+ slug: 'parent2',
+ entries: [child3]
+ )
+ ),
+ toplevel2,
+ toplevel3
+ ])
end
end
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index 96c396f085c..fcb041aebe5 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -3,27 +3,24 @@
require "spec_helper"
RSpec.describe WikiPage do
- let_it_be(:user) { create(:user) }
- let_it_be(:container) { create(:project) }
+ let(:user) { create(:user) }
+ let(:container) { create(:project) }
+ let(:wiki) { container.wiki }
- def create_wiki_page(attrs = {})
- page = build_wiki_page(attrs)
+ def create_wiki_page(container, attrs = {})
+ page = build_wiki_page(container, attrs)
page.create(message: (attrs[:message] || 'test commit'))
container.wiki.find_page(page.slug)
end
- def build_wiki_page(attrs = {})
+ def build_wiki_page(container, attrs = {})
wiki_page_attrs = { container: container, content: 'test content' }.merge(attrs)
build(:wiki_page, wiki_page_attrs)
end
- def wiki
- container.wiki
- end
-
def disable_front_matter
stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => false)
end
@@ -32,11 +29,20 @@ RSpec.describe WikiPage do
stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => thing)
end
+ def force_wiki_change_branch
+ old_default_branch = wiki.default_branch
+ wiki.repository.add_branch(user, 'another_branch', old_default_branch)
+ wiki.repository.rm_branch(user, old_default_branch)
+ wiki.repository.expire_status_cache
+
+ wiki.container.clear_memoization(:wiki)
+ end
+
# Use for groups of tests that do not modify their `subject`.
#
# include_context 'subject is persisted page', title: 'my title'
shared_context 'subject is persisted page' do |attrs = {}|
- let_it_be(:persisted_page) { create_wiki_page(attrs) }
+ let(:persisted_page) { create_wiki_page(container, attrs) }
subject { persisted_page }
end
@@ -192,7 +198,7 @@ RSpec.describe WikiPage do
end
describe "validations" do
- subject { build_wiki_page }
+ subject { build_wiki_page(container) }
it "validates presence of title" do
subject.attributes.delete(:title)
@@ -357,7 +363,7 @@ RSpec.describe WikiPage do
let(:title) { attributes[:title] }
- subject { build_wiki_page }
+ subject { build_wiki_page(container) }
context "with valid attributes" do
it "saves the wiki page" do
@@ -394,7 +400,7 @@ RSpec.describe WikiPage do
let(:title) { 'Index v1.2.3' }
describe "#create" do
- subject { build_wiki_page }
+ subject { build_wiki_page(container) }
it "saves the wiki page and returns true", :aggregate_failures do
attributes = { title: title, content: "Home Page", format: "markdown" }
@@ -405,7 +411,7 @@ RSpec.describe WikiPage do
end
describe '#update' do
- subject { create_wiki_page(title: title) }
+ subject { create_wiki_page(container, title: title) }
it 'updates the content of the page and returns true', :aggregate_failures do
expect(subject.update(content: 'new content')).to be_truthy
@@ -420,7 +426,7 @@ RSpec.describe WikiPage do
describe "#update" do
let!(:original_title) { subject.title }
- subject { create_wiki_page }
+ subject { create_wiki_page(container) }
context "with valid attributes" do
it "updates the content of the page" do
@@ -527,7 +533,7 @@ RSpec.describe WikiPage do
describe 'in subdir' do
it 'keeps the page in the same dir when the content is updated' do
title = 'foo/Existing Page'
- page = create_wiki_page(title: title)
+ page = create_wiki_page(container, title: title)
expect(page.slug).to eq 'foo/Existing-Page'
expect(page.update(title: title, content: 'new_content')).to be_truthy
@@ -541,7 +547,7 @@ RSpec.describe WikiPage do
context 'when renaming a page' do
it 'raises an error if the page already exists' do
- existing_page = create_wiki_page
+ existing_page = create_wiki_page(container)
expect { subject.update(title: existing_page.title, content: 'new_content') }.to raise_error(WikiPage::PageRenameError)
expect(subject.title).to eq original_title
@@ -584,7 +590,7 @@ RSpec.describe WikiPage do
describe 'in subdir' do
it 'moves the page to the root folder if the title is preceded by /' do
- page = create_wiki_page(title: 'foo/Existing Page')
+ page = create_wiki_page(container, title: 'foo/Existing Page')
expect(page.slug).to eq 'foo/Existing-Page'
expect(page.update(title: '/Existing Page', content: 'new_content')).to be_truthy
@@ -592,7 +598,7 @@ RSpec.describe WikiPage do
end
it 'does nothing if it has the same title' do
- page = create_wiki_page(title: 'foo/Another Existing Page')
+ page = create_wiki_page(container, title: 'foo/Another Existing Page')
original_path = page.slug
@@ -625,7 +631,7 @@ RSpec.describe WikiPage do
describe "#delete" do
it "deletes the page and returns true", :aggregate_failures do
- page = create_wiki_page
+ page = create_wiki_page(container)
expect do
expect(page.delete).to eq(true)
@@ -634,22 +640,88 @@ RSpec.describe WikiPage do
end
describe "#versions" do
- let(:subject) { create_wiki_page }
+ let(:subject) { create_wiki_page(container) }
+
+ before do
+ 3.times { |i| subject.update(content: "content #{i}") }
+ end
+
+ context 'number of versions is less than the default paginiated per page' do
+ it "returns an array of all commits for the page" do
+ expect(subject.versions).to be_a(::CommitCollection)
+ expect(subject.versions.length).to eq(4)
+ expect(subject.versions.first.id).to eql(subject.last_version.id)
+ end
+ end
+
+ context 'number of versions is more than the default paginiated per page' do
+ before do
+ allow(Kaminari.config).to receive(:default_per_page).and_return(3)
+ end
+
+ it "returns an arrary containing the first page of commits for the page" do
+ expect(subject.versions).to be_a(::CommitCollection)
+ expect(subject.versions.length).to eq(3)
+ expect(subject.versions.first.id).to eql(subject.last_version.id)
+ end
+
+ it "returns an arrary containing the second page of commits for the page with options[:page] = 2" do
+ versions = subject.versions(page: 2)
+ expect(versions).to be_a(::CommitCollection)
+ expect(versions.length).to eq(1)
+ end
+ end
+
+ context "wiki repository's default is updated" do
+ before do
+ force_wiki_change_branch
+ end
+
+ it "returns the correct versions in the default branch" do
+ page = container.wiki.find_page(subject.title)
- it "returns an array of all commits for the page" do
+ expect(page.versions).to be_a(::CommitCollection)
+ expect(page.versions.length).to eq(4)
+ expect(page.versions.first.id).to eql(page.last_version.id)
+
+ page.update(content: "final content")
+ expect(page.versions.length).to eq(5)
+ end
+ end
+ end
+
+ describe "#count_versions" do
+ let(:subject) { create_wiki_page(container) }
+
+ it "returns the total numbers of commits" do
expect do
3.times { |i| subject.update(content: "content #{i}") }
- end.to change { subject.versions.count }.by(3)
+ end.to change(subject, :count_versions).from(1).to(4)
+ end
+
+ context "wiki repository's default is updated" do
+ before do
+ subject
+ force_wiki_change_branch
+ end
+
+ it "returns the correct number of versions in the default branch" do
+ page = container.wiki.find_page(subject.title)
+ expect(page.count_versions).to eq(1)
+
+ page.update(content: "final content")
+ expect(page.count_versions).to eq(2)
+ end
end
end
describe '#title_changed?' do
using RSpec::Parameterized::TableSyntax
- let_it_be(:unsaved_page) { build_wiki_page(title: 'test page') }
- let_it_be(:existing_page) { create_wiki_page(title: 'test page') }
- let_it_be(:directory_page) { create_wiki_page(title: 'parent directory/child page') }
- let_it_be(:page_with_special_characters) { create_wiki_page(title: 'test+page') }
+ let(:unsaved_page) { build_wiki_page(container, title: 'test page') }
+ let(:existing_page) { create_wiki_page(container, title: 'test page') }
+ let(:directory_page) { create_wiki_page(container, title: 'parent directory/child page') }
+ let(:page_with_special_characters) { create_wiki_page(container, title: 'test+page') }
let(:untitled_page) { described_class.new(wiki) }
@@ -704,7 +776,7 @@ RSpec.describe WikiPage do
describe '#content_changed?' do
context 'with a new page' do
- subject { build_wiki_page }
+ subject { build_wiki_page(container) }
it 'returns true if content is set' do
subject.attributes[:content] = 'new'
@@ -756,13 +828,13 @@ RSpec.describe WikiPage do
describe '#path' do
it 'returns the path when persisted' do
- existing_page = create_wiki_page(title: 'path test')
+ existing_page = create_wiki_page(container, title: 'path test')
expect(existing_page.path).to eq('path-test.md')
end
it 'returns nil when not persisted' do
- unsaved_page = build_wiki_page(title: 'path test')
+ unsaved_page = build_wiki_page(container, title: 'path test')
expect(unsaved_page.path).to be_nil
end
@@ -789,7 +861,7 @@ RSpec.describe WikiPage do
describe '#historical?' do
let!(:container) { create(:project) }
- subject { create_wiki_page }
+ subject { create_wiki_page(container) }
let(:wiki) { subject.wiki }
let(:old_version) { subject.versions.last.id }
@@ -830,17 +902,17 @@ RSpec.describe WikiPage do
describe '#persisted?' do
it 'returns true for a persisted page' do
- expect(create_wiki_page).to be_persisted
+ expect(create_wiki_page(container)).to be_persisted
end
it 'returns false for an unpersisted page' do
- expect(build_wiki_page).not_to be_persisted
+ expect(build_wiki_page(container)).not_to be_persisted
end
end
describe '#to_partial_path' do
it 'returns the relative path to the partial to be used' do
- expect(build_wiki_page.to_partial_path).to eq('../shared/wikis/wiki_page')
+ expect(build_wiki_page(container).to_partial_path).to eq('../shared/wikis/wiki_page')
end
end
@@ -868,7 +940,7 @@ RSpec.describe WikiPage do
end
it 'returns false for page with different slug on same container' do
- other_page = create_wiki_page
+ other_page = create_wiki_page(container)
expect(subject.slug).not_to eq(other_page.slug)
expect(subject.container).to eq(other_page.container)
@@ -902,7 +974,7 @@ RSpec.describe WikiPage do
end
describe '#hook_attrs' do
- subject { build_wiki_page }
+ subject { build_wiki_page(container) }
it 'adds absolute urls for images in the content' do
subject.attributes[:content] = 'test![WikiPage_Image](/uploads/abc/WikiPage_Image.png)'
@@ -914,13 +986,13 @@ RSpec.describe WikiPage do
describe '#version_commit_timestamp' do
context 'for a new page' do
it 'returns nil' do
- expect(build_wiki_page.version_commit_timestamp).to be_nil
+ expect(build_wiki_page(container).version_commit_timestamp).to be_nil
end
end
context 'for page that exists' do
it 'returns the timestamp of the commit' do
- existing_page = create_wiki_page
+ existing_page = create_wiki_page(container)
expect(existing_page.version_commit_timestamp).to eq(existing_page.version.commit.committed_date)
end