Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'spec/models')
-rw-r--r--spec/models/active_session_spec.rb2
-rw-r--r--spec/models/application_setting_spec.rb7
-rw-r--r--spec/models/aws/role_spec.rb2
-rw-r--r--spec/models/board_spec.rb4
-rw-r--r--spec/models/chat_name_spec.rb4
-rw-r--r--spec/models/ci/bridge_spec.rb14
-rw-r--r--spec/models/ci/build_dependencies_spec.rb4
-rw-r--r--spec/models/ci/build_metadata_spec.rb13
-rw-r--r--spec/models/ci/build_runner_session_spec.rb2
-rw-r--r--spec/models/ci/build_spec.rb249
-rw-r--r--spec/models/ci/daily_build_group_report_result_spec.rb2
-rw-r--r--spec/models/ci/job_artifact_spec.rb14
-rw-r--r--spec/models/ci/pipeline_schedule_spec.rb2
-rw-r--r--spec/models/ci/pipeline_spec.rb311
-rw-r--r--spec/models/ci/processable_spec.rb2
-rw-r--r--spec/models/ci/runner_spec.rb65
-rw-r--r--spec/models/ci/runner_version_spec.rb9
-rw-r--r--spec/models/ci/secure_file_spec.rb1
-rw-r--r--spec/models/clusters/cluster_spec.rb4
-rw-r--r--spec/models/commit_signatures/ssh_signature_spec.rb2
-rw-r--r--spec/models/commit_status_spec.rb2
-rw-r--r--spec/models/concerns/bulk_insert_safe_spec.rb4
-rw-r--r--spec/models/concerns/chronic_duration_attribute_spec.rb12
-rw-r--r--spec/models/concerns/ci/artifactable_spec.rb24
-rw-r--r--spec/models/concerns/counter_attribute_spec.rb2
-rw-r--r--spec/models/concerns/cross_database_modification_spec.rb32
-rw-r--r--spec/models/concerns/database_event_tracking_spec.rb69
-rw-r--r--spec/models/concerns/expirable_spec.rb5
-rw-r--r--spec/models/concerns/issuable_spec.rb21
-rw-r--r--spec/models/concerns/nullify_if_blank_spec.rb2
-rw-r--r--spec/models/concerns/participable_spec.rb26
-rw-r--r--spec/models/concerns/project_features_compatibility_spec.rb6
-rw-r--r--spec/models/concerns/reactive_caching_spec.rb2
-rw-r--r--spec/models/concerns/token_authenticatable_spec.rb12
-rw-r--r--spec/models/container_expiration_policy_spec.rb2
-rw-r--r--spec/models/container_repository_spec.rb184
-rw-r--r--spec/models/customer_relations/contact_spec.rb53
-rw-r--r--spec/models/customer_relations/contact_state_counts_spec.rb60
-rw-r--r--spec/models/data_list_spec.rb2
-rw-r--r--spec/models/deploy_key_spec.rb9
-rw-r--r--spec/models/design_management/version_spec.rb8
-rw-r--r--spec/models/environment_spec.rb231
-rw-r--r--spec/models/error_tracking/project_error_tracking_setting_spec.rb46
-rw-r--r--spec/models/event_spec.rb26
-rw-r--r--spec/models/group_group_link_spec.rb52
-rw-r--r--spec/models/group_spec.rb44
-rw-r--r--spec/models/hooks/system_hook_spec.rb2
-rw-r--r--spec/models/hooks/web_hook_spec.rb12
-rw-r--r--spec/models/incident_management/issuable_escalation_status_spec.rb1
-rw-r--r--spec/models/incident_management/timeline_event_spec.rb15
-rw-r--r--spec/models/integration_spec.rb5
-rw-r--r--spec/models/integrations/bamboo_spec.rb1
-rw-r--r--spec/models/integrations/bugzilla_spec.rb1
-rw-r--r--spec/models/integrations/buildkite_spec.rb1
-rw-r--r--spec/models/integrations/campfire_spec.rb2
-rw-r--r--spec/models/integrations/chat_message/issue_message_spec.rb4
-rw-r--r--spec/models/integrations/chat_message/wiki_page_message_spec.rb2
-rw-r--r--spec/models/integrations/custom_issue_tracker_spec.rb1
-rw-r--r--spec/models/integrations/datadog_spec.rb16
-rw-r--r--spec/models/integrations/drone_ci_spec.rb1
-rw-r--r--spec/models/integrations/ewm_spec.rb1
-rw-r--r--spec/models/integrations/external_wiki_spec.rb1
-rw-r--r--spec/models/integrations/harbor_spec.rb4
-rw-r--r--spec/models/integrations/jira_spec.rb22
-rw-r--r--spec/models/integrations/microsoft_teams_spec.rb1
-rw-r--r--spec/models/integrations/pumble_spec.rb14
-rw-r--r--spec/models/integrations/slack_spec.rb17
-rw-r--r--spec/models/integrations/teamcity_spec.rb1
-rw-r--r--spec/models/issue_spec.rb179
-rw-r--r--spec/models/jira_import_state_spec.rb30
-rw-r--r--spec/models/lfs_object_spec.rb4
-rw-r--r--spec/models/loose_foreign_keys/deleted_record_spec.rb16
-rw-r--r--spec/models/member_spec.rb31
-rw-r--r--spec/models/members/group_member_spec.rb90
-rw-r--r--spec/models/members/member_role_spec.rb17
-rw-r--r--spec/models/members/project_member_spec.rb9
-rw-r--r--spec/models/merge_request/approval_removal_settings_spec.rb52
-rw-r--r--spec/models/merge_request_spec.rb107
-rw-r--r--spec/models/milestone_spec.rb4
-rw-r--r--spec/models/ml/candidate_metric_spec.rb9
-rw-r--r--spec/models/ml/candidate_param_spec.rb9
-rw-r--r--spec/models/ml/candidate_spec.rb12
-rw-r--r--spec/models/ml/experiment_spec.rb11
-rw-r--r--spec/models/namespace/detail_spec.rb40
-rw-r--r--spec/models/namespace/root_storage_statistics_spec.rb4
-rw-r--r--spec/models/namespace/traversal_hierarchy_spec.rb6
-rw-r--r--spec/models/namespace_spec.rb50
-rw-r--r--spec/models/namespaces/project_namespace_spec.rb8
-rw-r--r--spec/models/note_spec.rb74
-rw-r--r--spec/models/oauth_access_token_spec.rb68
-rw-r--r--spec/models/onboarding_progress_spec.rb2
-rw-r--r--spec/models/packages/cleanup/policy_spec.rb1
-rw-r--r--spec/models/packages/conan/metadatum_spec.rb2
-rw-r--r--spec/models/packages/package_file_spec.rb6
-rw-r--r--spec/models/packages/package_spec.rb6
-rw-r--r--spec/models/personal_access_token_spec.rb24
-rw-r--r--spec/models/postgresql/replication_slot_spec.rb2
-rw-r--r--spec/models/preloaders/labels_preloader_spec.rb2
-rw-r--r--spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb2
-rw-r--r--spec/models/project_spec.rb115
-rw-r--r--spec/models/project_statistics_spec.rb39
-rw-r--r--spec/models/projects/import_export/relation_export_spec.rb32
-rw-r--r--spec/models/projects/topic_spec.rb11
-rw-r--r--spec/models/protected_branch_spec.rb124
-rw-r--r--spec/models/release_highlight_spec.rb14
-rw-r--r--spec/models/release_spec.rb2
-rw-r--r--spec/models/releases/link_spec.rb2
-rw-r--r--spec/models/remote_mirror_spec.rb16
-rw-r--r--spec/models/repository_spec.rb40
-rw-r--r--spec/models/snippet_input_action_collection_spec.rb6
-rw-r--r--spec/models/snippet_spec.rb4
-rw-r--r--spec/models/u2f_registration_spec.rb106
-rw-r--r--spec/models/user_spec.rb444
-rw-r--r--spec/models/user_status_spec.rb26
-rw-r--r--spec/models/users/calloutable_spec.rb4
-rw-r--r--spec/models/users/in_product_marketing_email_spec.rb3
-rw-r--r--spec/models/users/project_callout_spec.rb24
-rw-r--r--spec/models/webauthn_registration_spec.rb1
-rw-r--r--spec/models/wiki_page/meta_spec.rb2
-rw-r--r--spec/models/work_item_spec.rb67
-rw-r--r--spec/models/work_items/parent_link_spec.rb64
-rw-r--r--spec/models/work_items/type_spec.rb11
-rw-r--r--spec/models/work_items/widgets/hierarchy_spec.rb6
-rw-r--r--spec/models/work_items/widgets/labels_spec.rb31
-rw-r--r--spec/models/work_items/widgets/start_and_due_date_spec.rb31
125 files changed, 2910 insertions, 966 deletions
diff --git a/spec/models/active_session_spec.rb b/spec/models/active_session_spec.rb
index 751d31ad95a..5d316f7cff2 100644
--- a/spec/models/active_session_spec.rb
+++ b/spec/models/active_session_spec.rb
@@ -175,7 +175,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_sessions do
expect(Gitlab::Redis::Sessions).to receive(:with).and_yield(redis)
sessions = %w[session-a session-b]
- mget_responses = sessions.map { |session| [Marshal.dump(session)]}
+ mget_responses = sessions.map { |session| [Marshal.dump(session)] }
expect(redis).to receive(:mget).twice.times.and_return(*mget_responses)
expect(ActiveSession.sessions_from_ids([1, 2])).to eql(sessions)
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 0b3521cdd0c..16e1d8fbc4d 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -116,6 +116,7 @@ RSpec.describe ApplicationSetting do
it { is_expected.to validate_presence_of(:max_yaml_depth) }
it { is_expected.to validate_numericality_of(:max_yaml_depth).only_integer.is_greater_than(0) }
it { is_expected.to validate_presence_of(:max_pages_size) }
+
it 'ensures max_pages_size is an integer greater than 0 (or equal to 0 to indicate unlimited/maximum)' do
is_expected.to validate_numericality_of(:max_pages_size).only_integer.is_greater_than_or_equal_to(0)
.is_less_than(::Gitlab::Pages::MAX_SIZE / 1.megabyte)
@@ -1438,4 +1439,10 @@ RSpec.describe ApplicationSetting do
end
end
end
+
+ context 'personal accesss token prefix' do
+ it 'sets the correct default value' do
+ expect(setting.personal_access_token_prefix).to eql('glpat-')
+ end
+ end
end
diff --git a/spec/models/aws/role_spec.rb b/spec/models/aws/role_spec.rb
index ee93c9d6fad..f23f18ab44f 100644
--- a/spec/models/aws/role_spec.rb
+++ b/spec/models/aws/role_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Aws::Role do
end
context 'ARN is nil' do
- let(:role_arn) { }
+ let(:role_arn) {}
it { is_expected.to be_truthy }
end
diff --git a/spec/models/board_spec.rb b/spec/models/board_spec.rb
index 775cccd2aec..6017298e85b 100644
--- a/spec/models/board_spec.rb
+++ b/spec/models/board_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe Board do
- let(:project) { create(:project) }
- let(:other_project) { create(:project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:other_project) { create(:project) }
describe 'relationships' do
it { is_expected.to belong_to(:project) }
diff --git a/spec/models/chat_name_spec.rb b/spec/models/chat_name_spec.rb
index 1d2ad8b4dce..02c38479d1a 100644
--- a/spec/models/chat_name_spec.rb
+++ b/spec/models/chat_name_spec.rb
@@ -15,8 +15,8 @@ RSpec.describe ChatName do
it { is_expected.to validate_presence_of(:team_id) }
it { is_expected.to validate_presence_of(:chat_id) }
- it { is_expected.to validate_uniqueness_of(:user_id).scoped_to(:service_id) }
- it { is_expected.to validate_uniqueness_of(:chat_id).scoped_to(:service_id, :team_id) }
+ it { is_expected.to validate_uniqueness_of(:user_id).scoped_to(:integration_id) }
+ it { is_expected.to validate_uniqueness_of(:chat_id).scoped_to(:integration_id, :team_id) }
it 'is removed when the project is deleted' do
expect { subject.reload.integration.project.delete }.to change { ChatName.count }.by(-1)
diff --git a/spec/models/ci/bridge_spec.rb b/spec/models/ci/bridge_spec.rb
index cb29cce554f..40c2d62c465 100644
--- a/spec/models/ci/bridge_spec.rb
+++ b/spec/models/ci/bridge_spec.rb
@@ -25,6 +25,8 @@ RSpec.describe Ci::Bridge do
expect(bridge).to have_many(:sourced_pipelines)
end
+ it_behaves_like 'has ID tokens', :ci_bridge
+
it 'has one downstream pipeline' do
expect(bridge).to have_one(:sourced_pipeline)
expect(bridge).to have_one(:downstream_pipeline)
@@ -401,6 +403,18 @@ RSpec.describe Ci::Bridge do
end
end
+ describe '#downstream_project_path' do
+ context 'when trigger is defined' do
+ context 'when using variable expansion' do
+ let(:options) { { trigger: { project: 'my/$BRIDGE/project' } } }
+
+ it 'correctly expands variables' do
+ expect(bridge.downstream_project_path).to eq('my/cross/project')
+ end
+ end
+ end
+ end
+
describe '#target_ref' do
context 'when trigger is defined' do
it 'returns a ref name' do
diff --git a/spec/models/ci/build_dependencies_spec.rb b/spec/models/ci/build_dependencies_spec.rb
index 91048cae064..737348765d9 100644
--- a/spec/models/ci/build_dependencies_spec.rb
+++ b/spec/models/ci/build_dependencies_spec.rb
@@ -76,8 +76,8 @@ RSpec.describe Ci::BuildDependencies do
end
describe 'jobs from specified dependencies' do
- let(:dependencies) { }
- let(:needs) { }
+ let(:dependencies) {}
+ let(:needs) {}
let!(:job) do
scheduling_type = needs.present? ? :dag : :stage
diff --git a/spec/models/ci/build_metadata_spec.rb b/spec/models/ci/build_metadata_spec.rb
index 5e30f9160cd..e904463a5ca 100644
--- a/spec/models/ci/build_metadata_spec.rb
+++ b/spec/models/ci/build_metadata_spec.rb
@@ -105,6 +105,13 @@ RSpec.describe Ci::BuildMetadata do
}
}
}
+ metadata.id_tokens = {
+ TEST_JWT_TOKEN: {
+ id_token: {
+ aud: 'https://gitlab.test'
+ }
+ }
+ }
expect(metadata).to be_valid
end
@@ -113,10 +120,14 @@ RSpec.describe Ci::BuildMetadata do
context 'when data is invalid' do
it 'returns errors' do
metadata.secrets = { DATABASE_PASSWORD: { vault: {} } }
+ metadata.id_tokens = { TEST_JWT_TOKEN: { id_token: { aud: nil } } }
aggregate_failures do
expect(metadata).to be_invalid
- expect(metadata.errors.full_messages).to eq(["Secrets must be a valid json schema"])
+ expect(metadata.errors.full_messages).to contain_exactly(
+ 'Secrets must be a valid json schema',
+ 'Id tokens must be a valid json schema'
+ )
end
end
end
diff --git a/spec/models/ci/build_runner_session_spec.rb b/spec/models/ci/build_runner_session_spec.rb
index 601c6ad26f9..ed5ed456d7b 100644
--- a/spec/models/ci/build_runner_session_spec.rb
+++ b/spec/models/ci/build_runner_session_spec.rb
@@ -65,7 +65,7 @@ RSpec.describe Ci::BuildRunnerSession, model: true do
end
describe '#service_specification' do
- let(:service) { 'foo'}
+ let(:service) { 'foo' }
let(:port) { 80 }
let(:path) { 'path' }
let(:subprotocols) { nil }
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index e0166ba64a4..b865688d370 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -3,6 +3,9 @@
require 'spec_helper'
RSpec.describe Ci::Build do
+ include Ci::TemplateHelpers
+ include AfterNextHelpers
+
let_it_be(:user) { create(:user) }
let_it_be(:group, reload: true) { create(:group) }
let_it_be(:project, reload: true) { create(:project, :repository, group: group) }
@@ -59,11 +62,36 @@ RSpec.describe Ci::Build do
describe 'callbacks' do
context 'when running after_create callback' do
- it 'triggers asynchronous build hooks worker' do
- expect(BuildHooksWorker).to receive(:perform_async)
+ it 'executes hooks' do
+ expect_next(described_class).to receive(:execute_hooks)
create(:ci_build)
end
+
+ context 'when the execute_build_hooks_inline flag is disabled' do
+ before do
+ stub_feature_flags(execute_build_hooks_inline: false)
+ end
+
+ it 'uses the old job hooks worker' do
+ expect(::BuildHooksWorker).to receive(:perform_async).with(Ci::Build)
+
+ create(:ci_build)
+ end
+ end
+
+ context 'when the execute_build_hooks_inline flag is enabled for a project' do
+ before do
+ stub_feature_flags(execute_build_hooks_inline: project)
+ end
+
+ it 'executes hooks inline' do
+ expect(::BuildHooksWorker).not_to receive(:perform_async)
+ expect_next(described_class).to receive(:execute_hooks)
+
+ create(:ci_build, project: project)
+ end
+ end
end
end
@@ -81,6 +109,8 @@ RSpec.describe Ci::Build do
end
end
+ it_behaves_like 'has ID tokens', :ci_build
+
describe '.manual_actions' do
let!(:manual_but_created) { create(:ci_build, :manual, status: :created, pipeline: pipeline) }
let!(:manual_but_succeeded) { create(:ci_build, :manual, status: :success, pipeline: pipeline) }
@@ -1289,7 +1319,7 @@ RSpec.describe Ci::Build do
let(:subject) { build.hide_secrets(data) }
context 'hide runners token' do
- let(:data) { "new #{project.runners_token} data"}
+ let(:data) { "new #{project.runners_token} data" }
it { is_expected.to match(/^new x+ data$/) }
@@ -1303,7 +1333,7 @@ RSpec.describe Ci::Build do
end
context 'hide build token' do
- let(:data) { "new #{build.token} data"}
+ let(:data) { "new #{build.token} data" }
it { is_expected.to match(/^new x+ data$/) }
@@ -1335,6 +1365,43 @@ RSpec.describe Ci::Build do
end
end
+ describe 'state transition metrics' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { build.send(event) }
+
+ where(:ff_enabled, :state, :report_count, :trait) do
+ true | :success! | 1 | :sast
+ true | :cancel! | 1 | :sast
+ true | :drop! | 2 | :multiple_report_artifacts
+ true | :success! | 0 | :allowed_to_fail
+ true | :skip! | 0 | :pending
+ false | :success! | 0 | :sast
+ end
+
+ with_them do
+ let(:build) { create(:ci_build, trait, project: project, pipeline: pipeline) }
+ let(:event) { state }
+
+ context "when transitioning to #{params[:state]}" do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ stub_feature_flags(report_artifact_build_completed_metrics_on_build_completion: ff_enabled)
+ end
+
+ it 'increments build_completed_report_type metric' do
+ expect(
+ ::Gitlab::Ci::Artifacts::Metrics
+ ).to receive(
+ :build_completed_report_type_counter
+ ).exactly(report_count).times.and_call_original
+
+ subject
+ end
+ end
+ end
+ end
+
describe 'state transition as a deployable' do
subject { build.send(event) }
@@ -1518,8 +1585,8 @@ RSpec.describe Ci::Build do
end
end
- describe '#environment_deployment_tier' do
- subject { build.environment_deployment_tier }
+ describe '#environment_tier_from_options' do
+ subject { build.environment_tier_from_options }
let(:build) { described_class.new(options: options) }
let(:options) { { environment: { deployment_tier: 'production' } } }
@@ -1533,6 +1600,30 @@ RSpec.describe Ci::Build do
end
end
+ describe '#environment_tier' do
+ subject { build.environment_tier }
+
+ let(:options) { { environment: { deployment_tier: 'production' } } }
+ let!(:environment) { create(:environment, name: 'production', tier: 'development', project: project) }
+ let(:build) { described_class.new(options: options, environment: 'production', project: project) }
+
+ it { is_expected.to eq('production') }
+
+ context 'when options does not include deployment_tier' do
+ let(:options) { { environment: { name: 'production' } } }
+
+ it 'uses tier from environment' do
+ is_expected.to eq('development')
+ end
+
+ context 'when persisted environment is absent' do
+ let(:environment) { nil }
+
+ it { is_expected.to be_nil }
+ end
+ end
+ end
+
describe 'environment' do
describe '#has_environment?' do
subject { build.has_environment? }
@@ -1601,20 +1692,18 @@ RSpec.describe Ci::Build do
end
it 'returns an expanded environment name with a list of variables' do
- expect(build).to receive(:simple_variables).once.and_call_original
-
is_expected.to eq('review/host')
end
context 'when build metadata has already persisted the expanded environment name' do
before do
- build.metadata.expanded_environment_name = 'review/host'
+ build.metadata.expanded_environment_name = 'review/foo'
end
it 'returns a persisted expanded environment name without a list of variables' do
expect(build).not_to receive(:simple_variables)
- is_expected.to eq('review/host')
+ is_expected.to eq('review/foo')
end
end
end
@@ -1642,14 +1731,6 @@ RSpec.describe Ci::Build do
end
it { is_expected.to eq('review/master') }
-
- context 'when the FF ci_expand_environment_name_and_url is disabled' do
- before do
- stub_feature_flags(ci_expand_environment_name_and_url: false)
- end
-
- it { is_expected.to eq('review/${CI_COMMIT_REF_NAME}') }
- end
end
end
@@ -1693,7 +1774,7 @@ RSpec.describe Ci::Build do
end
context 'with a dynamic value' do
- let(:namespace) { 'deploy-$CI_COMMIT_REF_NAME'}
+ let(:namespace) { 'deploy-$CI_COMMIT_REF_NAME' }
it { is_expected.to eq 'deploy-master' }
end
@@ -1806,6 +1887,21 @@ RSpec.describe Ci::Build do
end
context 'build is erasable' do
+ context 'logging erase' do
+ let!(:build) { create(:ci_build, :test_reports, :trace_artifact, :success, :artifacts) }
+
+ it 'logs erased artifacts' do
+ expect(Gitlab::Ci::Artifacts::Logger)
+ .to receive(:log_deleted)
+ .with(
+ match_array(build.job_artifacts.to_a),
+ 'Ci::Build#erase'
+ )
+
+ build.erase
+ end
+ end
+
context 'when project is undergoing stats refresh' do
let!(:build) { create(:ci_build, :test_reports, :trace_artifact, :success, :artifacts) }
@@ -1908,7 +2004,14 @@ RSpec.describe Ci::Build do
end
end
- it "erases erasable artifacts" do
+ it "erases erasable artifacts and logs them" do
+ expect(Gitlab::Ci::Artifacts::Logger)
+ .to receive(:log_deleted)
+ .with(
+ match_array(build.job_artifacts.erasable.to_a),
+ 'Ci::Build#erase_erasable_artifacts!'
+ )
+
subject
expect(build.job_artifacts.erasable).to be_empty
@@ -2627,7 +2730,7 @@ RSpec.describe Ci::Build do
build.update_columns(token_encrypted: nil)
end
- it { is_expected.to be_nil}
+ it { is_expected.to be_nil }
end
end
@@ -2812,6 +2915,7 @@ RSpec.describe Ci::Build do
public: true,
masked: false },
{ key: 'CI_API_V4_URL', value: 'http://localhost/api/v4', public: true, masked: false },
+ { key: 'CI_TEMPLATE_REGISTRY_HOST', value: template_registry_host, public: true, masked: false },
{ key: 'CI_PIPELINE_IID', value: pipeline.iid.to_s, public: true, masked: false },
{ key: 'CI_PIPELINE_SOURCE', value: pipeline.source, public: true, masked: false },
{ key: 'CI_PIPELINE_CREATED_AT', value: pipeline.created_at.iso8601, public: true, masked: false },
@@ -2929,7 +3033,7 @@ RSpec.describe Ci::Build do
let(:expected_variables) do
predefined_variables.map { |variable| variable.fetch(:key) } +
%w[YAML_VARIABLE CI_ENVIRONMENT_NAME CI_ENVIRONMENT_SLUG
- CI_ENVIRONMENT_TIER CI_ENVIRONMENT_ACTION CI_ENVIRONMENT_URL]
+ CI_ENVIRONMENT_ACTION CI_ENVIRONMENT_TIER CI_ENVIRONMENT_URL]
end
before do
@@ -3096,6 +3200,16 @@ RSpec.describe Ci::Build do
end
end
+ context 'when environment_tier is updated in options' do
+ before do
+ build.update!(options: { environment: { name: 'production', deployment_tier: 'development' } })
+ end
+
+ it 'uses tier from options' do
+ is_expected.to include({ key: 'CI_ENVIRONMENT_TIER', value: 'development', public: true, masked: false })
+ end
+ end
+
context 'when project has an environment specific variable' do
let(:environment_specific_variable) do
{ key: 'MY_STAGING_ONLY_VARIABLE', value: 'environment_specific_variable', public: false, masked: false }
@@ -3508,8 +3622,8 @@ RSpec.describe Ci::Build do
context 'when gitlab-deploy-token does not exist for project' do
it 'does not include deploy token variables' do
- expect(subject.find { |v| v[:key] == 'CI_DEPLOY_USER'}).to be_nil
- expect(subject.find { |v| v[:key] == 'CI_DEPLOY_PASSWORD'}).to be_nil
+ expect(subject.find { |v| v[:key] == 'CI_DEPLOY_USER' }).to be_nil
+ expect(subject.find { |v| v[:key] == 'CI_DEPLOY_PASSWORD' }).to be_nil
end
context 'when gitlab-deploy-token exists for group' do
@@ -3527,8 +3641,8 @@ RSpec.describe Ci::Build do
end
it 'does not include deploy token variables' do
- expect(subject.find { |v| v[:key] == 'CI_DEPLOY_USER'}).to be_nil
- expect(subject.find { |v| v[:key] == 'CI_DEPLOY_PASSWORD'}).to be_nil
+ expect(subject.find { |v| v[:key] == 'CI_DEPLOY_USER' }).to be_nil
+ expect(subject.find { |v| v[:key] == 'CI_DEPLOY_PASSWORD' }).to be_nil
end
end
end
@@ -3559,10 +3673,10 @@ RSpec.describe Ci::Build do
context 'when harbor_integration does not exist' do
it 'does not include harbor variables' do
- expect(subject.find { |v| v[:key] == 'HARBOR_URL'}).to be_nil
- expect(subject.find { |v| v[:key] == 'HARBOR_PROJECT_NAME'}).to be_nil
- expect(subject.find { |v| v[:key] == 'HARBOR_USERNAME'}).to be_nil
- expect(subject.find { |v| v[:key] == 'HARBOR_PASSWORD'}).to be_nil
+ expect(subject.find { |v| v[:key] == 'HARBOR_URL' }).to be_nil
+ expect(subject.find { |v| v[:key] == 'HARBOR_PROJECT_NAME' }).to be_nil
+ expect(subject.find { |v| v[:key] == 'HARBOR_USERNAME' }).to be_nil
+ expect(subject.find { |v| v[:key] == 'HARBOR_PASSWORD' }).to be_nil
end
end
end
@@ -3807,8 +3921,20 @@ RSpec.describe Ci::Build do
build.enqueue
end
- it 'queues BuildHooksWorker' do
- expect(BuildHooksWorker).to receive(:perform_async).with(build)
+ context 'when the execute_build_hooks_inline flag is disabled' do
+ before do
+ stub_feature_flags(execute_build_hooks_inline: false)
+ end
+
+ it 'queues BuildHooksWorker' do
+ expect(BuildHooksWorker).to receive(:perform_async).with(build)
+
+ build.enqueue
+ end
+ end
+
+ it 'executes hooks' do
+ expect(build).to receive(:execute_hooks)
build.enqueue
end
@@ -4526,7 +4652,7 @@ RSpec.describe Ci::Build do
end
describe '#each_report' do
- let(:report_types) { Ci::JobArtifact::COVERAGE_REPORT_FILE_TYPES }
+ let(:report_types) { Ci::JobArtifact.file_types_for_report(:coverage) }
let!(:codequality) { create(:ci_job_artifact, :codequality, job: build) }
let!(:coverage) { create(:ci_job_artifact, :coverage_gocov_xml, job: build) }
@@ -4559,6 +4685,7 @@ RSpec.describe Ci::Build do
end
before do
+ allow(build).to receive(:execute_hooks)
stub_artifacts_object_storage
end
@@ -5499,7 +5626,7 @@ RSpec.describe Ci::Build do
build.cancel_gracefully?
end
- let_it_be(:build) { create(:ci_build, pipeline: pipeline) }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
it 'cannot cancel gracefully' do
expect(subject).to be false
@@ -5520,4 +5647,58 @@ RSpec.describe Ci::Build do
let!(:model) { create(:ci_build, user: create(:user)) }
let!(:parent) { model.user }
end
+
+ describe '#clone' do
+ let_it_be(:user) { FactoryBot.build(:user) }
+
+ context 'when given new job variables' do
+ context 'when the cloned build has an action' do
+ it 'applies the new job variables' do
+ build = create(:ci_build, :actionable)
+ create(:ci_job_variable, job: build, key: 'TEST_KEY', value: 'old value')
+ create(:ci_job_variable, job: build, key: 'OLD_KEY', value: 'i will not live for long')
+
+ new_build = build.clone(current_user: user, new_job_variables_attributes: [
+ { key: 'TEST_KEY', value: 'new value' },
+ { key: 'NEW_KEY', value: 'exciting new value' }
+ ])
+ new_build.save!
+
+ expect(new_build.job_variables.count).to be(2)
+ expect(new_build.job_variables.pluck(:key)).to contain_exactly('TEST_KEY', 'NEW_KEY')
+ expect(new_build.job_variables.map(&:value)).to contain_exactly('new value', 'exciting new value')
+ end
+ end
+
+ context 'when the cloned build does not have an action' do
+ it 'applies the old job variables' do
+ build = create(:ci_build)
+ create(:ci_job_variable, job: build, key: 'TEST_KEY', value: 'old value')
+
+ new_build = build.clone(current_user: user, new_job_variables_attributes: [
+ { key: 'TEST_KEY', value: 'new value' }
+ ])
+ new_build.save!
+
+ expect(new_build.job_variables.count).to be(1)
+ expect(new_build.job_variables.pluck(:key)).to contain_exactly('TEST_KEY')
+ expect(new_build.job_variables.map(&:value)).to contain_exactly('old value')
+ end
+ end
+ end
+
+ context 'when not given new job variables' do
+ it 'applies the old job variables' do
+ build = create(:ci_build)
+ create(:ci_job_variable, job: build, key: 'TEST_KEY', value: 'old value')
+
+ new_build = build.clone(current_user: user)
+ new_build.save!
+
+ expect(new_build.job_variables.count).to be(1)
+ expect(new_build.job_variables.pluck(:key)).to contain_exactly('TEST_KEY')
+ expect(new_build.job_variables.map(&:value)).to contain_exactly('old value')
+ end
+ end
+ end
end
diff --git a/spec/models/ci/daily_build_group_report_result_spec.rb b/spec/models/ci/daily_build_group_report_result_spec.rb
index 43ba4c32477..d0141a1469e 100644
--- a/spec/models/ci/daily_build_group_report_result_spec.rb
+++ b/spec/models/ci/daily_build_group_report_result_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Ci::DailyBuildGroupReportResult do
- let(:daily_build_group_report_result) { build(:ci_daily_build_group_report_result)}
+ let(:daily_build_group_report_result) { build(:ci_daily_build_group_report_result) }
describe 'associations' do
it { is_expected.to belong_to(:last_pipeline) }
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index b9cac6c3f99..b996bf84529 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -128,6 +128,18 @@ RSpec.describe Ci::JobArtifact do
end
end
+ describe '.file_types_for_report' do
+ it 'returns the report file types for the report type' do
+ expect(described_class.file_types_for_report(:test)).to match_array(%w[junit])
+ end
+
+ context 'when given an unrecognized report type' do
+ it 'raises error' do
+ expect { described_class.file_types_for_report(:blah) }.to raise_error(KeyError, /blah/)
+ end
+ end
+ end
+
describe '.associated_file_types_for' do
using RSpec::Parameterized::TableSyntax
@@ -193,7 +205,7 @@ RSpec.describe Ci::JobArtifact do
it { is_expected.to be_truthy }
context 'when the job does have archived trace' do
- let!(:artifact) { }
+ let!(:artifact) {}
it { is_expected.to be_falsy }
end
diff --git a/spec/models/ci/pipeline_schedule_spec.rb b/spec/models/ci/pipeline_schedule_spec.rb
index 3c295fb345b..b28b61e2b39 100644
--- a/spec/models/ci/pipeline_schedule_spec.rb
+++ b/spec/models/ci/pipeline_schedule_spec.rb
@@ -73,7 +73,7 @@ RSpec.describe Ci::PipelineSchedule do
end
context 'when there are no runnable schedules' do
- let!(:pipeline_schedule) { }
+ let!(:pipeline_schedule) {}
it 'returns an empty array' do
is_expected.to be_empty
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 6a71b2cfbed..0c28c99c113 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -844,6 +844,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
it 'has 8 items' do
expect(subject.size).to eq(8)
end
+
it { expect(pipeline.sha).to start_with(subject) }
end
@@ -2162,6 +2163,60 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe '#modified_paths_since' do
+ let(:project) do
+ create(:project, :custom_repo,
+ files: { 'file1.txt' => 'file 1' })
+ end
+
+ let(:user) { project.owner }
+ let(:main_branch) { project.default_branch }
+ let(:new_branch) { 'feature_x' }
+ let(:pipeline) { build(:ci_pipeline, project: project, sha: new_branch) }
+
+ subject(:modified_paths_since) { pipeline.modified_paths_since(main_branch) }
+
+ before do
+ project.repository.add_branch(user, new_branch, main_branch)
+ end
+
+ context 'when no change in the new branch' do
+ it 'returns an empty array' do
+ expect(modified_paths_since).to be_empty
+ end
+ end
+
+ context 'when adding a new file' do
+ before do
+ project.repository.create_file(user, 'file2.txt', 'file 2', message: 'Create file2.txt', branch_name: new_branch)
+ end
+
+ it 'returns the new file path' do
+ expect(modified_paths_since).to eq(['file2.txt'])
+ end
+
+ context 'and when updating an existing file' do
+ before do
+ project.repository.update_file(user, 'file1.txt', 'file 1 updated', message: 'Update file1.txt', branch_name: new_branch)
+ end
+
+ it 'returns the new and updated file paths' do
+ expect(modified_paths_since).to eq(['file1.txt', 'file2.txt'])
+ end
+ end
+ end
+
+ context 'when updating an existing file' do
+ before do
+ project.repository.update_file(user, 'file1.txt', 'file 1 updated', message: 'Update file1.txt', branch_name: new_branch)
+ end
+
+ it 'returns the updated file path' do
+ expect(modified_paths_since).to eq(['file1.txt'])
+ end
+ end
+ end
+
describe '#all_worktree_paths' do
let(:files) { { 'main.go' => '', 'mocks/mocks.go' => '' } }
let(:project) { create(:project, :custom_repo, files: files) }
@@ -2866,7 +2921,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
describe '#cancel_running' do
- subject(:latest_status) { pipeline.statuses.pluck(:status) }
+ let(:latest_status) { pipeline.statuses.pluck(:status) }
let_it_be(:pipeline) { create(:ci_empty_pipeline, :created) }
@@ -2909,6 +2964,32 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ context 'with bridge jobs' do
+ before do
+ create(:ci_bridge, :created, pipeline: pipeline)
+
+ pipeline.cancel_running
+ end
+
+ it 'bridges are canceled' do
+ expect(pipeline.bridges.first.status).to eq 'canceled'
+ end
+ end
+
+ context 'when pipeline is not cancelable' do
+ before do
+ create(:ci_build, :canceled, stage_idx: 0, pipeline: pipeline)
+
+ pipeline.cancel_running
+ end
+
+ it 'does not send cancel signal to cancel self' do
+ expect(pipeline).not_to receive(:cancel_self_only)
+
+ pipeline.cancel_running
+ end
+ end
+
context 'preloading relations' do
let(:pipeline1) { create(:ci_empty_pipeline, :created) }
let(:pipeline2) { create(:ci_empty_pipeline, :created) }
@@ -2940,37 +3021,211 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
- context 'when the first try cannot get an exclusive lock' do
- let(:retries) { 1 }
+ shared_examples 'retries' do
+ context 'when the first try cannot get an exclusive lock' do
+ let(:retries) { 1 }
- subject(:cancel_running) { pipeline.cancel_running(retries: retries) }
+ subject { pipeline.cancel_running(retries: retries) }
- before do
- build = create(:ci_build, :running, pipeline: pipeline)
+ before do
+ create(:ci_build, :running, pipeline: pipeline)
- allow(pipeline.cancelable_statuses).to receive(:find_in_batches).and_yield([build])
+ stub_first_cancel_call_fails
+ end
+
+ it 'retries again and cancels the build' do
+ subject
+
+ expect(latest_status).to contain_exactly('canceled')
+ end
+ context 'when the retries parameter is 0' do
+ let(:retries) { 0 }
+
+ it 'raises error' do
+ expect { subject }.to raise_error(ActiveRecord::StaleObjectError)
+ end
+ end
+ end
+
+ def stub_first_cancel_call_fails
call_count = 0
- allow(build).to receive(:cancel).and_wrap_original do |original, *args|
- call_count >= retries ? raise(ActiveRecord::StaleObjectError) : original.call(*args)
- call_count += 1
+ allow_next_found_instance_of(Ci::Build) do |build|
+ allow(build).to receive(:cancel).and_wrap_original do |original, *args| # rubocop:disable RSpec/AnyInstanceOf
+ call_count >= retries ? raise(ActiveRecord::StaleObjectError) : original.call(*args)
+
+ call_count += 1
+ end
end
end
+ end
+
+ it_behaves_like 'retries'
- it 'retries again and cancels the build' do
- cancel_running
+ context 'when auto canceled' do
+ let!(:canceled_by) { create(:ci_empty_pipeline) }
- expect(latest_status).to contain_exactly('canceled')
+ before do
+ create(:ci_build, :running, pipeline: pipeline)
+
+ pipeline.cancel_running(auto_canceled_by_pipeline_id: canceled_by.id)
+ end
+
+ it 'sets auto cancel' do
+ jobs_canceled_by = pipeline.statuses.map { |s| s.auto_canceled_by.id }
+
+ expect(jobs_canceled_by).to contain_exactly(canceled_by.id)
+ expect(pipeline.auto_canceled_by.id).to eq(canceled_by.id)
end
+ end
- context 'when the retries parameter is 0' do
- let(:retries) { 0 }
+ context 'when there are child pipelines', :sidekiq_inline do
+ let_it_be(:child_pipeline) { create(:ci_empty_pipeline, :created, child_of: pipeline) }
- it 'raises error' do
- expect do
+ before do
+ project.clear_memoization(:cascade_cancel_pipelines_enabled)
+
+ pipeline.reload
+ end
+
+ context 'when cascade_to_children is true' do
+ let(:cascade_to_children) { true }
+ let(:canceled_by) { nil }
+ let(:execute_async) { true }
+
+ let(:params) do
+ {
+ cascade_to_children: cascade_to_children,
+ execute_async: execute_async
+ }.tap do |p|
+ p.merge!(auto_canceled_by_pipeline_id: canceled_by.id) if canceled_by
+ end
+ end
+
+ subject(:cancel_running) { pipeline.cancel_running(**params) }
+
+ context 'when cancelable child pipeline builds' do
+ before do
+ create(:ci_build, :created, pipeline: child_pipeline)
+ create(:ci_build, :running, pipeline: child_pipeline)
+ end
+
+ it 'cancels child builds' do
cancel_running
- end.to raise_error(ActiveRecord::StaleObjectError)
+
+ latest_status_for_child = child_pipeline.statuses.pluck(:status)
+ expect(latest_status_for_child).to eq %w(canceled canceled)
+ expect(latest_status).to eq %w(canceled)
+ end
+
+ it 'cancels bridges' do
+ create(:ci_bridge, :created, pipeline: pipeline)
+ create(:ci_bridge, :created, pipeline: child_pipeline)
+
+ cancel_running
+
+ expect(pipeline.bridges.reload.first.status).to eq 'canceled'
+ expect(child_pipeline.bridges.reload.first.status).to eq 'canceled'
+ end
+
+ context 'with nested child pipelines' do
+ let!(:nested_child_pipeline) { create(:ci_empty_pipeline, :created, child_of: child_pipeline) }
+ let!(:nested_child_pipeline_build) { create(:ci_build, :created, pipeline: nested_child_pipeline) }
+
+ it 'cancels them' do
+ cancel_running
+
+ expect(nested_child_pipeline.reload.status).to eq 'canceled'
+ expect(nested_child_pipeline_build.reload.status).to eq 'canceled'
+ end
+ end
+
+ context 'when auto canceled' do
+ let(:canceled_by) { create(:ci_empty_pipeline) }
+
+ it 'sets auto cancel' do
+ cancel_running
+
+ pipeline.reload
+
+ jobs_canceled_by_ids = pipeline.statuses.map(&:auto_canceled_by_id)
+ child_pipelines_canceled_by_ids = pipeline.child_pipelines.map(&:auto_canceled_by_id)
+ child_pipelines_jobs_canceled_by_ids = pipeline.child_pipelines.map(&:statuses).flatten.map(&:auto_canceled_by_id)
+
+ expect(jobs_canceled_by_ids).to contain_exactly(canceled_by.id)
+ expect(pipeline.auto_canceled_by_id).to eq(canceled_by.id)
+ expect(child_pipelines_canceled_by_ids).to contain_exactly(canceled_by.id)
+ expect(child_pipelines_jobs_canceled_by_ids).to contain_exactly(canceled_by.id, canceled_by.id)
+ end
+ end
+
+ context 'when execute_async is false' do
+ let(:execute_async) { false }
+
+ it 'runs sync' do
+ expect(::Ci::CancelPipelineWorker).not_to receive(:perform_async)
+
+ cancel_running
+ end
+
+ it 'cancels children' do
+ cancel_running
+
+ latest_status_for_child = child_pipeline.statuses.pluck(:status)
+ expect(latest_status_for_child).to eq %w(canceled canceled)
+ expect(latest_status).to eq %w(canceled)
+ end
+
+ context 'with nested child pipelines' do
+ let!(:nested_child_pipeline) { create(:ci_empty_pipeline, :created, child_of: child_pipeline) }
+ let!(:nested_child_pipeline_build) { create(:ci_build, :created, pipeline: nested_child_pipeline) }
+
+ it 'cancels them' do
+ cancel_running
+
+ expect(nested_child_pipeline.reload.status).to eq 'canceled'
+ expect(nested_child_pipeline_build.reload.status).to eq 'canceled'
+ end
+ end
+ end
+ end
+
+ it 'does not cancel uncancelable child pipeline builds' do
+ create(:ci_build, :failed, pipeline: child_pipeline)
+
+ cancel_running
+
+ latest_status_for_child = child_pipeline.statuses.pluck(:status)
+ expect(latest_status_for_child).to eq %w(failed)
+ expect(latest_status).to eq %w(canceled)
+ end
+ end
+
+ context 'when cascade_to_children is false' do
+ let(:cascade_to_children) { false }
+
+ subject(:cancel_running) { pipeline.cancel_running(cascade_to_children: cascade_to_children) }
+
+ it 'does not cancel cancelable child pipeline builds' do
+ create(:ci_build, :created, pipeline: child_pipeline)
+ create(:ci_build, :running, pipeline: child_pipeline)
+
+ cancel_running
+
+ latest_status_for_child = child_pipeline.statuses.order_id_desc.pluck(:status)
+ expect(latest_status_for_child).to eq %w(running created)
+ expect(latest_status).to eq %w(canceled)
+ end
+
+ it 'does not cancel uncancelable child pipeline builds' do
+ create(:ci_build, :failed, pipeline: child_pipeline)
+
+ cancel_running
+
+ latest_status_for_child = child_pipeline.statuses.pluck(:status)
+ expect(latest_status_for_child).to eq %w(failed)
+ expect(latest_status).to eq %w(canceled)
end
end
end
@@ -3352,7 +3607,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
context 'when pipeline is a triggered pipeline' do
- let!(:upstream) { create(:ci_pipeline, project: create(:project), upstream_of: pipeline)}
+ let!(:upstream) { create(:ci_pipeline, project: create(:project), upstream_of: pipeline) }
it 'returns self id' do
expect(subject).to contain_exactly(pipeline.id)
@@ -4335,24 +4590,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
end
-
- describe '#find_stage_by_name' do
- subject { pipeline.find_stage_by_name!(stage_name) }
-
- context 'when stage exists' do
- it { is_expected.to eq(stage) }
- end
-
- context 'when stage does not exist' do
- let(:stage_name) { 'build' }
-
- it 'raises an ActiveRecord exception' do
- expect do
- subject
- end.to raise_exception(ActiveRecord::RecordNotFound)
- end
- end
- end
end
describe '#full_error_messages' do
diff --git a/spec/models/ci/processable_spec.rb b/spec/models/ci/processable_spec.rb
index 789ae3a2ccc..127a1417d9e 100644
--- a/spec/models/ci/processable_spec.rb
+++ b/spec/models/ci/processable_spec.rb
@@ -72,7 +72,7 @@ RSpec.describe Ci::Processable do
job_artifacts_network_referee job_artifacts_dotenv
job_artifacts_cobertura needs job_artifacts_accessibility
job_artifacts_requirements job_artifacts_coverage_fuzzing
- job_artifacts_api_fuzzing terraform_state_versions].freeze
+ job_artifacts_api_fuzzing terraform_state_versions job_artifacts_cyclonedx].freeze
end
let(:ignore_accessors) do
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index 2fbfbbaf830..ae8748f8ae3 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -489,7 +489,7 @@ RSpec.describe Ci::Runner do
let!(:runner3) { create(:ci_runner, :instance, contacted_at: 1.month.ago, created_at: 2.months.ago) }
let!(:runner4) { create(:ci_runner, :instance, contacted_at: 1.month.ago, created_at: 3.months.ago) }
- it { is_expected.to eq([runner1, runner3, runner4])}
+ it { is_expected.to eq([runner1, runner3, runner4]) }
end
describe '.active' do
@@ -552,6 +552,10 @@ RSpec.describe Ci::Runner do
allow_any_instance_of(described_class).to receive(:cached_attribute).and_call_original
allow_any_instance_of(described_class).to receive(:cached_attribute)
.with(:platform).and_return("darwin")
+ allow_any_instance_of(described_class).to receive(:cached_attribute)
+ .with(:version).and_return("14.0.0")
+
+ allow(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async).once
end
context 'table tests' do
@@ -623,6 +627,10 @@ RSpec.describe Ci::Runner do
allow_any_instance_of(described_class).to receive(:cached_attribute).and_call_original
allow_any_instance_of(described_class).to receive(:cached_attribute)
.with(:platform).and_return("darwin")
+ allow_any_instance_of(described_class).to receive(:cached_attribute)
+ .with(:version).and_return("14.0.0")
+
+ allow(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async).once
end
context 'no cache value' do
@@ -693,19 +701,6 @@ RSpec.describe Ci::Runner do
it { is_expected.to eq([runner1]) }
end
- describe '#tick_runner_queue' do
- it 'sticks the runner to the primary and calls the original method' do
- runner = create(:ci_runner)
-
- expect(described_class.sticking).to receive(:stick)
- .with(:runner, runner.id)
-
- expect(Gitlab::Workhorse).to receive(:set_key_and_notify)
-
- runner.tick_runner_queue
- end
- end
-
describe '#matches_build?' do
using RSpec::Parameterized::TableSyntax
@@ -866,7 +861,7 @@ RSpec.describe Ci::Runner do
describe '#status' do
let(:runner) { build(:ci_runner, :instance, created_at: 4.months.ago) }
- let(:legacy_mode) { }
+ let(:legacy_mode) {}
subject { runner.status(legacy_mode) }
@@ -989,6 +984,16 @@ RSpec.describe Ci::Runner do
it 'returns a new last_update value' do
expect(runner.tick_runner_queue).not_to be_empty
end
+
+ it 'sticks the runner to the primary and calls the original method' do
+ runner = create(:ci_runner)
+
+ expect(described_class.sticking).to receive(:stick).with(:runner, runner.id)
+
+ expect(Gitlab::Workhorse).to receive(:set_key_and_notify)
+
+ runner.tick_runner_queue
+ end
end
describe '#ensure_runner_queue_value' do
@@ -1055,14 +1060,19 @@ RSpec.describe Ci::Runner do
it 'updates cache' do
expect_redis_update
+ expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).not_to receive(:perform_async)
heartbeat
+
+ expect(runner.runner_version).to be_nil
end
end
context 'when database was not updated recently' do
before do
runner.contacted_at = 2.hours.ago
+
+ allow(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async)
end
context 'with invalid runner' do
@@ -1075,12 +1085,25 @@ RSpec.describe Ci::Runner do
expect_redis_update
does_db_update
+
+ expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async).once
+ end
+ end
+
+ context 'with unchanged runner version' do
+ let(:runner) { create(:ci_runner, version: version) }
+
+ it 'does not schedule ci_runner_versions update' do
+ heartbeat
+
+ expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).not_to have_received(:perform_async)
end
end
it 'updates redis cache and database' do
expect_redis_update
does_db_update
+ expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async).once
end
%w(custom shell docker docker-windows docker-ssh ssh parallels virtualbox docker+machine docker-ssh+machine kubernetes some-unknown-type).each do |executor|
@@ -1795,11 +1818,21 @@ RSpec.describe Ci::Runner do
end
context ':recommended' do
- let(:upgrade_status) { :recommended}
+ let(:upgrade_status) { :recommended }
it 'returns runners whose version is assigned :recommended' do
is_expected.to contain_exactly(runner_14_1_0)
end
end
+
+ describe 'composed with other scopes' do
+ subject { described_class.active(false).with_upgrade_status(:available) }
+
+ let(:inactive_runner_14_0_0) { create(:ci_runner, version: '14.0.0', active: false) }
+
+ it 'returns runner matching the composed scope' do
+ is_expected.to contain_exactly(inactive_runner_14_0_0)
+ end
+ end
end
end
diff --git a/spec/models/ci/runner_version_spec.rb b/spec/models/ci/runner_version_spec.rb
index d3395942a39..7a4b2e8f21e 100644
--- a/spec/models/ci/runner_version_spec.rb
+++ b/spec/models/ci/runner_version_spec.rb
@@ -27,16 +27,11 @@ RSpec.describe Ci::RunnerVersion do
create(:ci_runner_version, version: 'abc456', status: :available)
end
- let_it_be(:runner_version_unknown) do
- create(:ci_runner_version, version: 'abc567', status: :unknown)
- end
-
- it 'contains any runner version that is not already recommended' do
+ it 'contains any valid or unprocessed runner version that is not already recommended' do
is_expected.to match_array([
runner_version_nil,
runner_version_not_available,
- runner_version_available,
- runner_version_unknown
+ runner_version_available
])
end
end
diff --git a/spec/models/ci/secure_file_spec.rb b/spec/models/ci/secure_file_spec.rb
index a3f1c7b7ef7..e47efff5dfd 100644
--- a/spec/models/ci/secure_file_spec.rb
+++ b/spec/models/ci/secure_file_spec.rb
@@ -26,6 +26,7 @@ RSpec.describe Ci::SecureFile do
it { is_expected.to validate_presence_of(:file_store) }
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_presence_of(:project_id) }
+
context 'unique filename' do
let_it_be(:project1) { create(:project) }
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index 65ead01a2bd..73cd7bb9075 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -1278,14 +1278,14 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
context 'generic timeout' do
let(:connection_status) { { connection_status: :unreachable, connection_error: :http_error } }
- let(:error_message) { 'Timed out connecting to server'}
+ let(:error_message) { 'Timed out connecting to server' }
it { is_expected.to eq(**connection_status, **expected_nodes) }
end
context 'gateway timeout' do
let(:connection_status) { { connection_status: :unreachable, connection_error: :http_error } }
- let(:error_message) { '504 Gateway Timeout for GET https://kubernetes.example.com/api/v1'}
+ let(:error_message) { '504 Gateway Timeout for GET https://kubernetes.example.com/api/v1' }
it { is_expected.to eq(**connection_status, **expected_nodes) }
end
diff --git a/spec/models/commit_signatures/ssh_signature_spec.rb b/spec/models/commit_signatures/ssh_signature_spec.rb
index ac4496e9d8c..64d95fe3a71 100644
--- a/spec/models/commit_signatures/ssh_signature_spec.rb
+++ b/spec/models/commit_signatures/ssh_signature_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe CommitSignatures::SshSignature do
it_behaves_like 'commit signature'
describe 'associations' do
- it { is_expected.to belong_to(:key).required }
+ it { is_expected.to belong_to(:key).optional }
end
describe '.by_commit_sha scope' do
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index 3cccc41a892..78d4d9de84e 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -747,7 +747,7 @@ RSpec.describe CommitStatus do
end
context 'when failure_reason is nil' do
- let(:reason) { }
+ let(:reason) {}
let(:failure_reason) { 'unknown_failure' }
it { is_expected.to be_unknown_failure }
diff --git a/spec/models/concerns/bulk_insert_safe_spec.rb b/spec/models/concerns/bulk_insert_safe_spec.rb
index e6b197f34ca..569dc3a3a3e 100644
--- a/spec/models/concerns/bulk_insert_safe_spec.rb
+++ b/spec/models/concerns/bulk_insert_safe_spec.rb
@@ -170,7 +170,9 @@ RSpec.describe BulkInsertSafe do
all_items = bulk_insert_item_class.valid_list(10) + bulk_insert_item_class.invalid_list(10)
expect do
- bulk_insert_item_class.bulk_insert!(all_items, batch_size: 2) rescue nil
+ bulk_insert_item_class.bulk_insert!(all_items, batch_size: 2)
+ rescue StandardError
+ nil
end.not_to change { bulk_insert_item_class.count }
end
diff --git a/spec/models/concerns/chronic_duration_attribute_spec.rb b/spec/models/concerns/chronic_duration_attribute_spec.rb
index 00e28e19bd5..61b86455840 100644
--- a/spec/models/concerns/chronic_duration_attribute_spec.rb
+++ b/spec/models/concerns/chronic_duration_attribute_spec.rb
@@ -95,8 +95,8 @@ end
RSpec.describe 'ChronicDurationAttribute' do
context 'when default value is not set' do
- let(:source_field) {:maximum_timeout}
- let(:virtual_field) {:maximum_timeout_human_readable}
+ let(:source_field) { :maximum_timeout }
+ let(:virtual_field) { :maximum_timeout_human_readable }
let(:default_value) { nil }
subject { create(:ci_runner) }
@@ -106,8 +106,8 @@ RSpec.describe 'ChronicDurationAttribute' do
end
context 'when default value is set' do
- let(:source_field) {:build_timeout}
- let(:virtual_field) {:build_timeout_human_readable}
+ let(:source_field) { :build_timeout }
+ let(:virtual_field) { :build_timeout_human_readable }
let(:default_value) { 3600 }
subject { create(:project) }
@@ -118,8 +118,8 @@ RSpec.describe 'ChronicDurationAttribute' do
end
RSpec.describe 'ChronicDurationAttribute - reader' do
- let(:source_field) {:timeout}
- let(:virtual_field) {:timeout_human_readable}
+ let(:source_field) { :timeout }
+ let(:virtual_field) { :timeout_human_readable }
subject { create(:ci_build).ensure_metadata }
diff --git a/spec/models/concerns/ci/artifactable_spec.rb b/spec/models/concerns/ci/artifactable_spec.rb
index 6af244a5a0f..64691165e21 100644
--- a/spec/models/concerns/ci/artifactable_spec.rb
+++ b/spec/models/concerns/ci/artifactable_spec.rb
@@ -46,8 +46,30 @@ RSpec.describe Ci::Artifactable do
end
end
+ context 'when file format is zip' do
+ context 'when artifact contains one file' do
+ let(:artifact) { build(:ci_job_artifact, :zip_with_single_file) }
+
+ it 'iterates blob once' do
+ expect { |b| artifact.each_blob(&b) }.to yield_control.once
+ end
+ end
+
+ context 'when artifact contains two files' do
+ let(:artifact) { build(:ci_job_artifact, :zip_with_multiple_files) }
+
+ it 'iterates blob two times' do
+ expect { |b| artifact.each_blob(&b) }.to yield_control.exactly(2).times
+ end
+ end
+ end
+
context 'when there are no adapters for the file format' do
- let(:artifact) { build(:ci_job_artifact, :junit, file_format: :zip) }
+ let(:artifact) { build(:ci_job_artifact, :junit) }
+
+ before do
+ allow(artifact).to receive(:file_format).and_return(:unknown)
+ end
it 'raises an error' do
expect { |b| artifact.each_blob(&b) }.to raise_error(described_class::NotSupportedAdapterError)
diff --git a/spec/models/concerns/counter_attribute_spec.rb b/spec/models/concerns/counter_attribute_spec.rb
index a19fbae3cfb..8d32ef14f47 100644
--- a/spec/models/concerns/counter_attribute_spec.rb
+++ b/spec/models/concerns/counter_attribute_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe CounterAttribute, :counter_attribute, :clean_gitlab_redis_shared_
end
describe 'after_flush callbacks' do
- let(:attribute) { model.class.counter_attributes.first}
+ let(:attribute) { model.class.counter_attributes.first }
subject { model.flush_increments_to_database!(attribute) }
diff --git a/spec/models/concerns/cross_database_modification_spec.rb b/spec/models/concerns/cross_database_modification_spec.rb
index 72544536953..c3831b654cf 100644
--- a/spec/models/concerns/cross_database_modification_spec.rb
+++ b/spec/models/concerns/cross_database_modification_spec.rb
@@ -4,38 +4,6 @@ require 'spec_helper'
RSpec.describe CrossDatabaseModification do
describe '.transaction' do
- context 'feature flag disabled' do
- before do
- stub_feature_flags(track_gitlab_schema_in_current_transaction: false)
- end
-
- it 'does not add to gitlab_transactions_stack' do
- ApplicationRecord.transaction do
- expect(ApplicationRecord.gitlab_transactions_stack).to be_empty
-
- Project.first
- end
-
- expect(ApplicationRecord.gitlab_transactions_stack).to be_empty
- end
- end
-
- context 'feature flag is not yet setup' do
- before do
- allow(Feature::FlipperFeature).to receive(:table_exists?).and_raise(ActiveRecord::NoDatabaseError)
- end
-
- it 'does not add to gitlab_transactions_stack' do
- ApplicationRecord.transaction do
- expect(ApplicationRecord.gitlab_transactions_stack).to be_empty
-
- Project.first
- end
-
- expect(ApplicationRecord.gitlab_transactions_stack).to be_empty
- end
- end
-
it 'adds the current gitlab schema to gitlab_transactions_stack', :aggregate_failures do
ApplicationRecord.transaction do
expect(ApplicationRecord.gitlab_transactions_stack).to contain_exactly(:gitlab_main)
diff --git a/spec/models/concerns/database_event_tracking_spec.rb b/spec/models/concerns/database_event_tracking_spec.rb
new file mode 100644
index 00000000000..976462b4174
--- /dev/null
+++ b/spec/models/concerns/database_event_tracking_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe DatabaseEventTracking, :snowplow do
+ let(:test_class) do
+ Class.new(ActiveRecord::Base) do
+ include DatabaseEventTracking
+
+ self.table_name = 'application_setting_terms'
+
+ self::SNOWPLOW_ATTRIBUTES = %w[id].freeze # rubocop:disable RSpec/LeakyConstantDeclaration
+ end
+ end
+
+ subject(:create_test_class_record) { test_class.create!(id: 1, terms: "") }
+
+ context 'if event emmiter failed' do
+ before do
+ allow(Gitlab::Tracking).to receive(:event).and_raise(StandardError) # rubocop:disable RSpec/ExpectGitlabTracking
+ end
+
+ it 'tracks the exception' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+
+ create_test_class_record
+ end
+ end
+
+ context 'if product_intelligence_database_event_tracking FF is off' do
+ before do
+ stub_feature_flags(product_intelligence_database_event_tracking: false)
+ end
+
+ it 'does not track the event' do
+ create_test_class_record
+
+ expect_no_snowplow_event
+ end
+ end
+
+ describe 'event tracking' do
+ let(:category) { test_class.to_s }
+ let(:event) { 'database_event' }
+
+ it 'when created' do
+ create_test_class_record
+
+ expect_snowplow_event(category: category, action: "#{event}_create", label: 'application_setting_terms',
+ property: 'create', namespace: nil, "id" => 1)
+ end
+
+ it 'when updated' do
+ create_test_class_record
+ test_class.first.update!(id: 3)
+
+ expect_snowplow_event(category: category, action: "#{event}_update", label: 'application_setting_terms',
+ property: 'update', namespace: nil, "id" => 3)
+ end
+
+ it 'when destroyed' do
+ create_test_class_record
+ test_class.first.destroy!
+
+ expect_snowplow_event(category: category, action: "#{event}_destroy", label: 'application_setting_terms',
+ property: 'destroy', namespace: nil, "id" => 1)
+ end
+ end
+end
diff --git a/spec/models/concerns/expirable_spec.rb b/spec/models/concerns/expirable_spec.rb
index 5eb6530881e..50dfb138ac9 100644
--- a/spec/models/concerns/expirable_spec.rb
+++ b/spec/models/concerns/expirable_spec.rb
@@ -16,6 +16,11 @@ RSpec.describe Expirable do
it { expect(ProjectMember.expired).to match_array([expired]) }
end
+ describe '.not_expired' do
+ it { expect(ProjectMember.not_expired).to include(no_expire, expire_later) }
+ it { expect(ProjectMember.not_expired).not_to include(expired) }
+ end
+
describe '#expired?' do
it { expect(no_expire.expired?).to eq(false) }
it { expect(expire_later.expired?).to eq(false) }
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index 87821de3cf5..6763cc904b4 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -569,6 +569,27 @@ RSpec.describe Issuable do
end
end
+ context 'merge_request update reviewers' do
+ let(:merge_request) { create(:merge_request) }
+ let(:user2) { create(:user) }
+
+ before do
+ merge_request.update!(reviewers: [user])
+ merge_request.update!(reviewers: [user, user2])
+ expect(Gitlab::DataBuilder::Issuable)
+ .to receive(:new).with(merge_request).and_return(builder)
+ end
+
+ it 'delegates to Gitlab::DataBuilder::Issuable#build' do
+ expect(builder).to receive(:build).with(
+ user: user,
+ changes: hash_including(
+ 'reviewers' => [[user.hook_attrs], [user.hook_attrs, user2.hook_attrs]]
+ ))
+ merge_request.to_hook_data(user, old_associations: { reviewers: [user] })
+ end
+ end
+
context 'incident severity is updated' do
let(:issue) { create(:incident) }
diff --git a/spec/models/concerns/nullify_if_blank_spec.rb b/spec/models/concerns/nullify_if_blank_spec.rb
index 2d1bdba39dd..b0e229f4c91 100644
--- a/spec/models/concerns/nullify_if_blank_spec.rb
+++ b/spec/models/concerns/nullify_if_blank_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe NullifyIfBlank do
context 'attribute is nil' do
let(:name) { nil }
- it { is_expected.to be_nil}
+ it { is_expected.to be_nil }
end
context 'attribute is not blank' do
diff --git a/spec/models/concerns/participable_spec.rb b/spec/models/concerns/participable_spec.rb
index b92c7c52f0b..f7f68cb38d8 100644
--- a/spec/models/concerns/participable_spec.rb
+++ b/spec/models/concerns/participable_spec.rb
@@ -124,6 +124,7 @@ RSpec.describe Participable do
end
let(:readable) { true }
+ let(:project) { build(:project, :public) }
it 'returns the list of participants' do
model.participant(:foo)
@@ -132,7 +133,6 @@ RSpec.describe Participable do
user1 = build(:user)
user2 = build(:user)
user3 = build(:user)
- project = build(:project, :public)
instance = model.new
allow(instance).to receive_message_chain(:model_name, :element) { 'class' }
@@ -155,7 +155,6 @@ RSpec.describe Participable do
instance = model.new
user1 = build(:user)
user2 = build(:user)
- project = build(:project, :public)
allow(instance).to receive_message_chain(:model_name, :element) { 'class' }
allow(instance).to receive(:bar).and_return(user2)
@@ -164,6 +163,29 @@ RSpec.describe Participable do
expect(instance.visible_participants(user1)).to be_empty
end
end
+
+ context 'with multiple system notes from the same author and mentioned_users' do
+ let!(:user1) { create(:user) }
+ let!(:user2) { create(:user) }
+
+ it 'skips expensive checks if the author is aleady in participants list' do
+ model.participant(:notes)
+
+ instance = model.new
+ note1 = create(:system_note, author: user1)
+ note2 = create(:system_note, author: user1) # only skip system notes with no mentioned users
+ note3 = create(:system_note, author: user1, note: "assigned to #{user2.to_reference}")
+ note4 = create(:note, author: user2)
+
+ allow(instance).to receive(:project).and_return(project)
+ allow(instance).to receive_message_chain(:model_name, :element) { 'class' }
+ allow(instance).to receive(:notes).and_return([note1, note2, note3, note4])
+
+ allow(Ability).to receive(:allowed?).with(anything, :read_project, anything).and_return(true)
+ allow(Ability).to receive(:allowed?).with(anything, :read_note, anything).exactly(3).times.and_return(true)
+ expect(instance.visible_participants(user1)).to match_array [user1, user2]
+ end
+ end
end
describe '#participant?' do
diff --git a/spec/models/concerns/project_features_compatibility_spec.rb b/spec/models/concerns/project_features_compatibility_spec.rb
index f2dc8464e86..b49b9ce8a2a 100644
--- a/spec/models/concerns/project_features_compatibility_spec.rb
+++ b/spec/models/concerns/project_features_compatibility_spec.rb
@@ -5,7 +5,11 @@ require 'spec_helper'
RSpec.describe ProjectFeaturesCompatibility do
let(:project) { create(:project) }
let(:features_enabled) { %w(issues wiki builds merge_requests snippets security_and_compliance) }
- let(:features) { features_enabled + %w(repository pages operations container_registry package_registry) }
+ let(:features) do
+ features_enabled + %w(
+ repository pages operations container_registry package_registry environments feature_flags releases
+ )
+ end
# We had issues_enabled, snippets_enabled, builds_enabled, merge_requests_enabled and issues_enabled fields on projects table
# All those fields got moved to a new table called project_feature and are now integers instead of booleans
diff --git a/spec/models/concerns/reactive_caching_spec.rb b/spec/models/concerns/reactive_caching_spec.rb
index 5468699f9dd..cb9bb676ede 100644
--- a/spec/models/concerns/reactive_caching_spec.rb
+++ b/spec/models/concerns/reactive_caching_spec.rb
@@ -320,7 +320,7 @@ RSpec.describe ReactiveCaching, :use_clean_rails_memory_store_caching do
stub_reactive_cache(instance, "preexisting")
end
- let(:calculation) { -> { raise "foo"} }
+ let(:calculation) { -> { raise "foo" } }
it 'leaves the cache untouched' do
expect { go! }.to raise_error("foo")
diff --git a/spec/models/concerns/token_authenticatable_spec.rb b/spec/models/concerns/token_authenticatable_spec.rb
index a2ce02f4661..3f6bbe795cc 100644
--- a/spec/models/concerns/token_authenticatable_spec.rb
+++ b/spec/models/concerns/token_authenticatable_spec.rb
@@ -102,7 +102,7 @@ RSpec.describe ApplicationSetting, 'TokenAuthenticatable' do
subject { described_class.send(:add_authentication_token_field, :runners_registration_token) }
it 'raises error' do
- expect {subject}.to raise_error(ArgumentError)
+ expect { subject }.to raise_error(ArgumentError)
end
end
end
@@ -126,7 +126,7 @@ RSpec.describe PersonalAccessToken, 'TokenAuthenticatable' do
end
end
- let(:token_value) { 'token' }
+ let(:token_value) { Devise.friendly_token }
let(:token_digest) { Gitlab::CryptoHelper.sha256(token_value) }
let(:user) { create(:user) }
let(:personal_access_token) do
@@ -442,7 +442,7 @@ RSpec.shared_examples 'prefixed token rotation' do
context 'token is not set' do
it 'generates a new token' do
- expect(subject).to match(/^#{RunnersTokenPrefixable::RUNNERS_TOKEN_PREFIX}/)
+ expect(subject).to match(/^#{RunnersTokenPrefixable::RUNNERS_TOKEN_PREFIX}/o)
expect(instance).not_to be_persisted
end
end
@@ -453,7 +453,7 @@ RSpec.shared_examples 'prefixed token rotation' do
end
it 'generates a new token' do
- expect(subject).to match(/^#{RunnersTokenPrefixable::RUNNERS_TOKEN_PREFIX}/)
+ expect(subject).to match(/^#{RunnersTokenPrefixable::RUNNERS_TOKEN_PREFIX}/o)
expect(instance).not_to be_persisted
end
end
@@ -475,7 +475,7 @@ RSpec.shared_examples 'prefixed token rotation' do
context 'token is not set' do
it 'generates a new token' do
- expect(subject).to match(/^#{RunnersTokenPrefixable::RUNNERS_TOKEN_PREFIX}/)
+ expect(subject).to match(/^#{RunnersTokenPrefixable::RUNNERS_TOKEN_PREFIX}/o)
expect(instance).to be_persisted
end
end
@@ -486,7 +486,7 @@ RSpec.shared_examples 'prefixed token rotation' do
end
it 'generates a new token' do
- expect(subject).to match(/^#{RunnersTokenPrefixable::RUNNERS_TOKEN_PREFIX}/)
+ expect(subject).to match(/^#{RunnersTokenPrefixable::RUNNERS_TOKEN_PREFIX}/o)
expect(instance).to be_persisted
end
end
diff --git a/spec/models/container_expiration_policy_spec.rb b/spec/models/container_expiration_policy_spec.rb
index 191913ed454..b88eddf19dc 100644
--- a/spec/models/container_expiration_policy_spec.rb
+++ b/spec/models/container_expiration_policy_spec.rb
@@ -131,7 +131,7 @@ RSpec.describe ContainerExpirationPolicy, type: :model do
end
context 'when there are no runnable schedules' do
- let!(:policy) { }
+ let!(:policy) {}
it 'returns an empty array' do
is_expected.to be_empty
diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb
index e35788b1848..a4329993e91 100644
--- a/spec/models/container_repository_spec.rb
+++ b/spec/models/container_repository_spec.rb
@@ -525,6 +525,162 @@ RSpec.describe ContainerRepository, :aggregate_failures do
end
end
+ describe '#each_tags_page' do
+ let(:page_size) { 100 }
+
+ shared_examples 'iterating through a page' do |expected_tags: true|
+ it 'iterates through one page' do
+ expect(repository.gitlab_api_client).to receive(:tags)
+ .with(repository.path, page_size: page_size, last: nil)
+ .and_return(client_response)
+ expect { |b| repository.each_tags_page(page_size: page_size, &b) }
+ .to yield_with_args(expected_tags ? expected_tags_from(client_response_tags) : [])
+ end
+ end
+
+ context 'with an empty page' do
+ let(:client_response) { { pagination: {}, response_body: [] } }
+
+ it_behaves_like 'iterating through a page', expected_tags: false
+ end
+
+ context 'with one page' do
+ let(:client_response) { { pagination: {}, response_body: client_response_tags } }
+ let(:client_response_tags) do
+ [
+ {
+ 'name' => '0.1.0',
+ 'created_at' => '2022-06-07T12:10:12.412+00:00'
+ },
+ {
+ 'name' => 'latest',
+ 'created_at' => '2022-06-07T12:11:13.633+00:00'
+ }
+ ]
+ end
+
+ context 'with a nil created_at' do
+ let(:client_response_tags) { ['name' => '0.1.0', 'created_at' => nil] }
+
+ it_behaves_like 'iterating through a page'
+ end
+
+ context 'with an invalid created_at' do
+ let(:client_response_tags) { ['name' => '0.1.0', 'created_at' => 'not_a_timestamp'] }
+
+ it_behaves_like 'iterating through a page'
+ end
+ end
+
+ context 'with two pages' do
+ let(:client_response1) { { pagination: { next: { uri: URI('http://localhost/next?last=latest') } }, response_body: client_response_tags1 } }
+ let(:client_response_tags1) do
+ [
+ {
+ 'name' => '0.1.0',
+ 'created_at' => '2022-06-07T12:10:12.412+00:00'
+ },
+ {
+ 'name' => 'latest',
+ 'created_at' => '2022-06-07T12:11:13.633+00:00'
+ }
+ ]
+ end
+
+ let(:client_response2) { { pagination: {}, response_body: client_response_tags2 } }
+ let(:client_response_tags2) do
+ [
+ {
+ 'name' => '1.2.3',
+ 'created_at' => '2022-06-10T12:10:15.412+00:00'
+ },
+ {
+ 'name' => '2.3.4',
+ 'created_at' => '2022-06-11T12:11:17.633+00:00'
+ }
+ ]
+ end
+
+ it 'iterates through two pages' do
+ expect(repository.gitlab_api_client).to receive(:tags)
+ .with(repository.path, page_size: page_size, last: nil)
+ .and_return(client_response1)
+ expect(repository.gitlab_api_client).to receive(:tags)
+ .with(repository.path, page_size: page_size, last: 'latest')
+ .and_return(client_response2)
+ expect { |b| repository.each_tags_page(page_size: page_size, &b) }
+ .to yield_successive_args(expected_tags_from(client_response_tags1), expected_tags_from(client_response_tags2))
+ end
+ end
+
+ context 'when max pages is reached' do
+ before do
+ stub_const('ContainerRepository::MAX_TAGS_PAGES', 0)
+ end
+
+ it 'raises an error' do
+ expect { repository.each_tags_page(page_size: page_size) {} }
+ .to raise_error(StandardError, 'too many pages requested')
+ end
+ end
+
+ context 'without a block set' do
+ it 'raises an Argument error' do
+ expect { repository.each_tags_page(page_size: page_size) }.to raise_error(ArgumentError, 'block not given')
+ end
+ end
+
+ context 'without a page size set' do
+ let(:client_response) { { pagination: {}, response_body: [] } }
+
+ it 'uses a default size' do
+ expect(repository.gitlab_api_client).to receive(:tags)
+ .with(repository.path, page_size: 100, last: nil)
+ .and_return(client_response)
+ expect { |b| repository.each_tags_page(&b) }.to yield_with_args([])
+ end
+ end
+
+ context 'with an empty client response' do
+ let(:client_response) { {} }
+
+ it 'breaks the loop' do
+ expect(repository.gitlab_api_client).to receive(:tags)
+ .with(repository.path, page_size: page_size, last: nil)
+ .and_return(client_response)
+ expect { |b| repository.each_tags_page(page_size: page_size, &b) }.not_to yield_control
+ end
+ end
+
+ context 'with a nil page' do
+ let(:client_response) { { pagination: {}, response_body: nil } }
+
+ it_behaves_like 'iterating through a page', expected_tags: false
+ end
+
+ context 'calling on a non migrated repository' do
+ before do
+ repository.update!(created_at: described_class::MIGRATION_PHASE_1_ENDED_AT - 3.days)
+ end
+
+ it 'raises an Argument error' do
+ expect { repository.each_tags_page }.to raise_error(ArgumentError, 'not a migrated repository')
+ end
+ end
+
+ def expected_tags_from(client_tags)
+ client_tags.map do |tag|
+ created_at =
+ begin
+ DateTime.iso8601(tag['created_at'])
+ rescue ArgumentError
+ nil
+ end
+ an_object_having_attributes(name: tag['name'], created_at: created_at)
+ end
+ end
+ end
+
describe '#tags_count' do
it 'returns the count of tags' do
expect(repository.tags_count).to eq(1)
@@ -1195,7 +1351,7 @@ RSpec.describe ContainerRepository, :aggregate_failures do
described_class::MIGRATION_STATES.each do |state|
context "when in #{state} migration_state" do
- let(:container_repository) { create(:container_repository, state.to_sym)}
+ let(:container_repository) { create(:container_repository, state.to_sym) }
it { is_expected.to eq(state == 'importing' || state == 'pre_importing') }
end
@@ -1207,7 +1363,7 @@ RSpec.describe ContainerRepository, :aggregate_failures do
described_class::MIGRATION_STATES.each do |state|
context "when in #{state} migration_state" do
- let(:container_repository) { create(:container_repository, state.to_sym)}
+ let(:container_repository) { create(:container_repository, state.to_sym) }
it { is_expected.to eq(state == 'importing') }
end
@@ -1219,7 +1375,7 @@ RSpec.describe ContainerRepository, :aggregate_failures do
described_class::MIGRATION_STATES.each do |state|
context "when in #{state} migration_state" do
- let(:container_repository) { create(:container_repository, state.to_sym)}
+ let(:container_repository) { create(:container_repository, state.to_sym) }
it { is_expected.to eq(state == 'pre_importing') }
end
@@ -1348,6 +1504,28 @@ RSpec.describe ContainerRepository, :aggregate_failures do
end
end
+ describe '#migrated?' do
+ subject { repository.migrated? }
+
+ it { is_expected.to eq(true) }
+
+ context 'with a created_at older than phase 1 ends' do
+ before do
+ repository.update!(created_at: described_class::MIGRATION_PHASE_1_ENDED_AT - 3.days)
+ end
+
+ it { is_expected.to eq(false) }
+
+ context 'with migration state set to import_done' do
+ before do
+ repository.update!(migration_state: 'import_done')
+ end
+
+ it { is_expected.to eq(true) }
+ end
+ end
+ end
+
context 'with repositories' do
let_it_be_with_reload(:repository) { create(:container_repository, :cleanup_unscheduled) }
let_it_be(:other_repository) { create(:container_repository, :cleanup_unscheduled) }
diff --git a/spec/models/customer_relations/contact_spec.rb b/spec/models/customer_relations/contact_spec.rb
index f91546f5240..487af404a7c 100644
--- a/spec/models/customer_relations/contact_spec.rb
+++ b/spec/models/customer_relations/contact_spec.rb
@@ -226,15 +226,58 @@ RSpec.describe CustomerRelations::Contact, type: :model do
end
end
- describe '.sort_by_name' do
+ describe '.counts_by_state' do
+ before do
+ create_list(:contact, 3, group: group)
+ create_list(:contact, 2, group: group, state: 'inactive')
+ end
+
+ it 'returns correct contact counts' do
+ counts = group.contacts.counts_by_state
+
+ expect(counts['active']).to be(3)
+ expect(counts['inactive']).to be(2)
+ end
+ end
+
+ describe 'sorting' do
+ let_it_be(:organization_a) { create(:organization, name: 'a') }
+ let_it_be(:organization_b) { create(:organization, name: 'b') }
let_it_be(:contact_a) { create(:contact, group: group, first_name: "c", last_name: "d") }
- let_it_be(:contact_b) { create(:contact, group: group, first_name: "a", last_name: "b") }
- let_it_be(:contact_c) { create(:contact, group: group, first_name: "e", last_name: "d") }
+ let_it_be(:contact_b) do
+ create(:contact,
+ group: group,
+ first_name: "a",
+ last_name: "b",
+ phone: "123",
+ organization: organization_a)
+ end
- context 'when sorting the contacts' do
- it 'sorts them by last name then first name in ascendent order' do
+ let_it_be(:contact_c) do
+ create(:contact,
+ group: group,
+ first_name: "e",
+ last_name: "d",
+ phone: "456",
+ organization: organization_b)
+ end
+
+ describe '.sort_by_name' do
+ it 'sorts them by last name then first name in ascending order' do
expect(group.contacts.sort_by_name).to eq([contact_b, contact_a, contact_c])
end
end
+
+ describe '.sort_by_organization' do
+ it 'sorts them by organization in descending order' do
+ expect(group.contacts.sort_by_organization(:desc)).to eq([contact_c, contact_b, contact_a])
+ end
+ end
+
+ describe '.sort_by_field' do
+ it 'sorts them by phone in ascending order' do
+ expect(group.contacts.sort_by_field('phone', :asc)).to eq([contact_b, contact_c, contact_a])
+ end
+ end
end
end
diff --git a/spec/models/customer_relations/contact_state_counts_spec.rb b/spec/models/customer_relations/contact_state_counts_spec.rb
new file mode 100644
index 00000000000..a19f6f08489
--- /dev/null
+++ b/spec/models/customer_relations/contact_state_counts_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe CustomerRelations::ContactStateCounts do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group, :crm_enabled) }
+
+ let(:counter) { described_class.new(user, group, params) }
+ let(:params) { {} }
+
+ before_all do
+ group.add_reporter(user)
+ create(:contact, group: group, first_name: 'filter')
+ create(:contact, group: group, last_name: 'filter')
+ create(:contact, group: group)
+ create(:contact, group: group, state: 'inactive', email: 'filter@example.com')
+ create(:contact, group: group, state: 'inactive')
+ end
+
+ describe '.declarative_policy_class' do
+ subject { described_class.declarative_policy_class }
+
+ it { is_expected.to eq('CustomerRelations::ContactPolicy') }
+ end
+
+ describe '#all' do
+ it 'returns the total number of contacts' do
+ expect(counter.all).to be(5)
+ end
+ end
+
+ describe '#active' do
+ it 'returns the number of active contacts' do
+ expect(counter.active).to be(3)
+ end
+ end
+
+ describe '#inactive' do
+ it 'returns the number of inactive contacts' do
+ expect(counter.inactive).to be(2)
+ end
+ end
+
+ describe 'when filtered' do
+ let(:params) { { search: 'filter' } }
+
+ it '#all returns the number of contacts with a filter' do
+ expect(counter.all).to be(3)
+ end
+
+ it '#active returns the number of active contacts with a filter' do
+ expect(counter.active).to be(2)
+ end
+
+ it '#inactive returns the number of inactive contacts with a filter' do
+ expect(counter.inactive).to be(1)
+ end
+ end
+end
diff --git a/spec/models/data_list_spec.rb b/spec/models/data_list_spec.rb
index 67db2730a78..6e01f4786ba 100644
--- a/spec/models/data_list_spec.rb
+++ b/spec/models/data_list_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe DataList do
let(:zentao_integration) { create(:zentao_integration) }
let(:cases) do
[
- [jira_integration, 'Integrations::JiraTrackerData', 'service_id'],
+ [jira_integration, 'Integrations::JiraTrackerData', 'integration_id'],
[zentao_integration, 'Integrations::ZentaoTrackerData', 'integration_id']
]
end
diff --git a/spec/models/deploy_key_spec.rb b/spec/models/deploy_key_spec.rb
index c22bad0e062..3272d5236d3 100644
--- a/spec/models/deploy_key_spec.rb
+++ b/spec/models/deploy_key_spec.rb
@@ -5,17 +5,20 @@ require 'spec_helper'
RSpec.describe DeployKey, :mailer do
describe "Associations" do
it { is_expected.to have_many(:deploy_keys_projects) }
+
it do
is_expected.to have_many(:deploy_keys_projects_with_write_access)
.conditions(can_push: true)
.class_name('DeployKeysProject')
end
+
it do
is_expected.to have_many(:projects_with_write_access)
.class_name('Project')
.through(:deploy_keys_projects_with_write_access)
.source(:project)
end
+
it { is_expected.to have_many(:projects) }
it { is_expected.to have_many(:protected_branch_push_access_levels) }
end
@@ -146,4 +149,10 @@ RSpec.describe DeployKey, :mailer do
end
end
end
+
+ describe '#audit_details' do
+ it "equals to the key's title" do
+ expect(subject.audit_details).to eq(subject.title)
+ end
+ end
end
diff --git a/spec/models/design_management/version_spec.rb b/spec/models/design_management/version_spec.rb
index 303bac61e1e..519ba3c67b4 100644
--- a/spec/models/design_management/version_spec.rb
+++ b/spec/models/design_management/version_spec.rb
@@ -142,14 +142,18 @@ RSpec.describe DesignManagement::Version do
it 'does not leave invalid versions around if creation fails' do
expect do
- described_class.create_for_designs([], 'abcdef', author) rescue nil
+ described_class.create_for_designs([], 'abcdef', author)
+ rescue StandardError
+ nil
end.not_to change { described_class.count }
end
it 'does not leave orphaned design-versions around if creation fails' do
actions = as_actions(designs)
expect do
- described_class.create_for_designs(actions, '', author) rescue nil
+ described_class.create_for_designs(actions, '', author)
+ rescue StandardError
+ nil
end.not_to change { DesignManagement::Action.count }
end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index e3207636bdc..3f4372dafd0 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -42,6 +42,48 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
end
end
+ describe 'validate and sanitize external url' do
+ let_it_be_with_refind(:environment) { create(:environment) }
+
+ where(:source_external_url, :expected_error_message) do
+ nil | nil
+ 'http://example.com' | nil
+ 'example.com' | nil
+ 'www.example.io' | nil
+ 'http://$URL' | nil
+ 'http://$(URL)' | nil
+ 'custom://example.com' | nil
+ '1.1.1.1' | nil
+ '$BASE_URL/${CI_COMMIT_REF_NAME}' | nil
+ '$ENVIRONMENT_URL' | nil
+ 'https://$SUB.$MAIN' | nil
+ 'https://$SUB-$REGION.$MAIN' | nil
+ 'https://example.com?param={()}' | nil
+ 'http://XSS?x=<script>alert(1)</script>' | nil
+ 'https://user:${VARIABLE}@example.io' | nil
+ 'https://example.com/test?param={data}' | nil
+ 'http://${URL}' | 'URI is invalid'
+ 'https://${URL}.example/test' | 'URI is invalid'
+ 'http://test${CI_MERGE_REQUEST_IID}.example.com' | 'URI is invalid'
+ 'javascript:alert("hello")' | 'javascript scheme is not allowed'
+ end
+ with_them do
+ it 'sets an external URL or an error' do
+ environment.external_url = source_external_url
+
+ environment.valid?
+
+ if expected_error_message
+ expect(environment.errors[:external_url].first).to eq(expected_error_message)
+ else
+ expect(environment.errors[:external_url]).to be_empty,
+ "There were unexpected errors: #{environment.errors.full_messages}"
+ expect(environment.external_url).to eq(source_external_url)
+ end
+ end
+ end
+ end
+
describe '.before_save' do
it 'ensures environment tier when a new object is created' do
environment = build(:environment, name: 'gprd', tier: nil)
@@ -194,7 +236,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
end
context 'when query is nil' do
- let(:query) { }
+ let(:query) {}
it 'raises an error' do
expect { subject }.to raise_error(NoMethodError)
@@ -770,16 +812,6 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
it 'returns the successful deployment jobs for the last deployment pipeline' do
expect(subject.pluck(:id)).to contain_exactly(deployment_a.id, deployment_b.id)
end
-
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(batch_load_environment_last_deployment_group: false)
- end
-
- it 'returns the successful deployment jobs for the last deployment pipeline' do
- expect(subject.pluck(:id)).to contain_exactly(deployment_a.id, deployment_b.id)
- end
- end
end
end
@@ -817,8 +849,8 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
describe '#actions_for' do
let(:deployment) { create(:deployment, :success, environment: environment) }
let(:pipeline) { deployment.deployable.pipeline }
- let!(:review_action) { create(:ci_build, :manual, name: 'review-apps', pipeline: pipeline, environment: 'review/$CI_COMMIT_REF_NAME' )}
- let!(:production_action) { create(:ci_build, :manual, name: 'production', pipeline: pipeline, environment: 'production' )}
+ let!(:review_action) { create(:ci_build, :manual, name: 'review-apps', pipeline: pipeline, environment: 'review/$CI_COMMIT_REF_NAME' ) }
+ let!(:production_action) { create(:ci_build, :manual, name: 'production', pipeline: pipeline, environment: 'production' ) }
it 'returns a list of actions with matching environment' do
expect(environment.actions_for('review/master')).to contain_exactly(review_action)
@@ -993,178 +1025,29 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
describe '#last_visible_deployable' do
subject { environment.last_visible_deployable }
- context 'does not join across databases' do
- let(:pipeline_a) { create(:ci_pipeline, project: project) }
- let(:pipeline_b) { create(:ci_pipeline, project: project) }
- let(:ci_build_a) { create(:ci_build, project: project, pipeline: pipeline_a) }
- let(:ci_build_b) { create(:ci_build, project: project, pipeline: pipeline_b) }
-
- before do
- create(:deployment, :success, project: project, environment: environment, deployable: ci_build_a)
- create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_b)
- end
-
- it 'for direct call' do
- with_cross_joins_prevented do
- expect(subject.id).to eq(ci_build_b.id)
- end
- end
-
- it 'for preload' do
- environment.reload
-
- with_cross_joins_prevented do
- ActiveRecord::Associations::Preloader.new.preload(environment, [last_visible_deployable: []])
- expect(subject.id).to eq(ci_build_b.id)
- end
- end
+ let!(:deployment) do
+ create(:deployment, :success, project: project, environment: environment, deployable: deployable)
end
- context 'call after preload' do
- it 'fetches from association cache' do
- pipeline = create(:ci_pipeline, project: project)
- ci_build = create(:ci_build, project: project, pipeline: pipeline)
- create(:deployment, :failed, project: project, environment: environment, deployable: ci_build)
-
- environment.reload
- ActiveRecord::Associations::Preloader.new.preload(environment, [last_visible_deployable: []])
-
- query_count = ActiveRecord::QueryRecorder.new do
- expect(subject.id).to eq(ci_build.id)
- end.count
+ let!(:deployable) { create(:ci_build, :success, project: project) }
- expect(query_count).to eq(0)
- end
+ it 'fetches the deployable through the last visible deployment' do
+ is_expected.to eq(deployable)
end
end
describe '#last_visible_pipeline' do
- let(:user) { create(:user) }
- let_it_be(:project) { create(:project, :repository) }
-
- let(:environment) { create(:environment, project: project) }
- let(:commit) { project.commit }
-
- let(:success_pipeline) do
- create(:ci_pipeline, :success, project: project, user: user, sha: commit.sha)
- end
-
- let(:failed_pipeline) do
- create(:ci_pipeline, :failed, project: project, user: user, sha: commit.sha)
- end
-
- it 'uses the last deployment even if it failed' do
- pipeline = create(:ci_pipeline, project: project, user: user, sha: commit.sha)
- ci_build = create(:ci_build, project: project, pipeline: pipeline)
- create(:deployment, :failed, project: project, environment: environment, deployable: ci_build, sha: commit.sha)
-
- last_pipeline = environment.last_visible_pipeline
+ subject { environment.last_visible_pipeline }
- expect(last_pipeline).to eq(pipeline)
+ let!(:deployment) do
+ create(:deployment, :success, project: project, environment: environment, deployable: deployable)
end
- it 'returns nil if there is no deployment' do
- create(:ci_build, project: project, pipeline: success_pipeline)
+ let!(:deployable) { create(:ci_build, :success, project: project, pipeline: pipeline) }
+ let!(:pipeline) { create(:ci_pipeline, :success, project: project) }
- expect(environment.last_visible_pipeline).to be_nil
- end
-
- it 'does not return an invisible pipeline' do
- failed_pipeline = create(:ci_pipeline, project: project, user: user, sha: commit.sha)
- ci_build_a = create(:ci_build, project: project, pipeline: failed_pipeline)
- create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_a, sha: commit.sha)
- pipeline = create(:ci_pipeline, project: project, user: user, sha: commit.sha)
- ci_build_b = create(:ci_build, project: project, pipeline: pipeline)
- create(:deployment, :created, project: project, environment: environment, deployable: ci_build_b, sha: commit.sha)
-
- last_pipeline = environment.last_visible_pipeline
-
- expect(last_pipeline).to eq(failed_pipeline)
- end
-
- context 'does not join across databases' do
- let(:pipeline_a) { create(:ci_pipeline, project: project) }
- let(:pipeline_b) { create(:ci_pipeline, project: project) }
- let(:ci_build_a) { create(:ci_build, project: project, pipeline: pipeline_a) }
- let(:ci_build_b) { create(:ci_build, project: project, pipeline: pipeline_b) }
-
- before do
- create(:deployment, :success, project: project, environment: environment, deployable: ci_build_a)
- create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_b)
- end
-
- subject { environment.last_visible_pipeline }
-
- it 'for direct call' do
- with_cross_joins_prevented do
- expect(subject.id).to eq(pipeline_b.id)
- end
- end
-
- it 'for preload' do
- environment.reload
-
- with_cross_joins_prevented do
- ActiveRecord::Associations::Preloader.new.preload(environment, [last_visible_pipeline: []])
- expect(subject.id).to eq(pipeline_b.id)
- end
- end
- end
-
- context 'for the environment' do
- it 'returns the last pipeline' do
- pipeline = create(:ci_pipeline, project: project, user: user, sha: commit.sha)
- ci_build = create(:ci_build, project: project, pipeline: pipeline)
- create(:deployment, :success, project: project, environment: environment, deployable: ci_build, sha: commit.sha)
-
- last_pipeline = environment.last_visible_pipeline
-
- expect(last_pipeline).to eq(pipeline)
- end
-
- context 'with multiple deployments' do
- it 'returns the last pipeline' do
- pipeline_a = create(:ci_pipeline, project: project, user: user)
- pipeline_b = create(:ci_pipeline, project: project, user: user)
- ci_build_a = create(:ci_build, project: project, pipeline: pipeline_a)
- ci_build_b = create(:ci_build, project: project, pipeline: pipeline_b)
- create(:deployment, :success, project: project, environment: environment, deployable: ci_build_a)
- create(:deployment, :success, project: project, environment: environment, deployable: ci_build_b)
-
- last_pipeline = environment.last_visible_pipeline
-
- expect(last_pipeline).to eq(pipeline_b)
- end
- end
-
- context 'with multiple pipelines' do
- it 'returns the last pipeline' do
- create(:ci_build, project: project, pipeline: success_pipeline)
- ci_build_b = create(:ci_build, project: project, pipeline: failed_pipeline)
- create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_b, sha: commit.sha)
-
- last_pipeline = environment.last_visible_pipeline
-
- expect(last_pipeline).to eq(failed_pipeline)
- end
- end
- end
-
- context 'call after preload' do
- it 'fetches from association cache' do
- pipeline = create(:ci_pipeline, project: project)
- ci_build = create(:ci_build, project: project, pipeline: pipeline)
- create(:deployment, :failed, project: project, environment: environment, deployable: ci_build)
-
- environment.reload
- ActiveRecord::Associations::Preloader.new.preload(environment, [last_visible_pipeline: []])
-
- query_count = ActiveRecord::QueryRecorder.new do
- expect(environment.last_visible_pipeline.id).to eq(pipeline.id)
- end.count
-
- expect(query_count).to eq(0)
- end
+ it 'fetches the pipeline through the last visible deployment' do
+ is_expected.to eq(pipeline)
end
end
diff --git a/spec/models/error_tracking/project_error_tracking_setting_spec.rb b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
index ebfd9f04f6a..0685144dea6 100644
--- a/spec/models/error_tracking/project_error_tracking_setting_spec.rb
+++ b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
@@ -121,36 +121,36 @@ RSpec.describe ErrorTracking::ProjectErrorTrackingSetting do
end
end
end
- end
- describe 'before_validation :reset_token' do
- context 'when a token was previously set' do
- subject { create(:project_error_tracking_setting, project: project) }
+ describe 'before_validation :reset_token' do
+ context 'when a token was previously set' do
+ subject { create(:project_error_tracking_setting, project: project) }
- it 'resets token if url changed' do
- subject.api_url = 'http://sentry.com/api/0/projects/org-slug/proj-slug/'
+ it 'resets token if url changed' do
+ subject.api_url = 'http://sentry.com/api/0/projects/org-slug/proj-slug/'
- expect(subject).not_to be_valid
- expect(subject.token).to be_nil
- end
+ expect(subject).not_to be_valid
+ expect(subject.token).to be_nil
+ end
- it "does not reset token if new url is set together with the same token" do
- subject.api_url = 'http://sentrytest.com/api/0/projects/org-slug/proj-slug/'
- current_token = subject.token
- subject.token = current_token
+ it "does not reset token if new url is set together with the same token" do
+ subject.api_url = 'http://sentrytest.com/api/0/projects/org-slug/proj-slug/'
+ current_token = subject.token
+ subject.token = current_token
- expect(subject).to be_valid
- expect(subject.token).to eq(current_token)
- expect(subject.api_url).to eq('http://sentrytest.com/api/0/projects/org-slug/proj-slug/')
- end
+ expect(subject).to be_valid
+ expect(subject.token).to eq(current_token)
+ expect(subject.api_url).to eq('http://sentrytest.com/api/0/projects/org-slug/proj-slug/')
+ end
- it 'does not reset token if new url is set together with a new token' do
- subject.api_url = 'http://sentrytest.com/api/0/projects/org-slug/proj-slug/'
- subject.token = 'token'
+ it 'does not reset token if new url is set together with a new token' do
+ subject.api_url = 'http://sentrytest.com/api/0/projects/org-slug/proj-slug/'
+ subject.token = 'token'
- expect(subject).to be_valid
- expect(subject.token).to eq('token')
- expect(subject.api_url).to eq('http://sentrytest.com/api/0/projects/org-slug/proj-slug/')
+ expect(subject).to be_valid
+ expect(subject.token).to eq('token')
+ expect(subject.api_url).to eq('http://sentrytest.com/api/0/projects/org-slug/proj-slug/')
+ end
end
end
end
diff --git a/spec/models/event_spec.rb b/spec/models/event_spec.rb
index 2c1bbfcb35f..9700852e567 100644
--- a/spec/models/event_spec.rb
+++ b/spec/models/event_spec.rb
@@ -264,6 +264,8 @@ RSpec.describe Event do
let(:project) { public_project }
let(:issue) { create(:issue, project: project, author: author, assignees: [assignee]) }
let(:confidential_issue) { create(:issue, :confidential, project: project, author: author, assignees: [assignee]) }
+ let(:work_item) { create(:work_item, project: project, author: author) }
+ let(:confidential_work_item) { create(:work_item, :confidential, project: project, author: author) }
let(:project_snippet) { create(:project_snippet, :public, project: project, author: author) }
let(:personal_snippet) { create(:personal_snippet, :public, author: author) }
let(:design) { create(:design, issue: issue, project: project) }
@@ -380,6 +382,28 @@ RSpec.describe Event do
end
end
+ context 'work item event' do
+ context 'for non confidential work item' do
+ let(:target) { work_item }
+
+ include_examples 'visibility examples' do
+ let(:visibility) { visible_to_all }
+ end
+
+ include_examples 'visible to assignee and author', true
+ end
+
+ context 'for confidential work item' do
+ let(:target) { confidential_work_item }
+
+ include_examples 'visibility examples' do
+ let(:visibility) { visible_to_none_except(:member, :admin) }
+ end
+
+ include_examples 'visible to author', true
+ end
+ end
+
context 'issue note event' do
context 'on non confidential issues' do
let(:target) { note_on_issue }
@@ -947,7 +971,7 @@ RSpec.describe Event do
let_it_be(:user) { create(:user) }
let_it_be(:note_on_project_snippet) { create(:note_on_project_snippet, author: user) }
let_it_be(:note_on_personal_snippet) { create(:note_on_personal_snippet, author: user) }
- let_it_be(:other_note) { create(:note_on_issue, author: user)}
+ let_it_be(:other_note) { create(:note_on_issue, author: user) }
let_it_be(:personal_snippet_event) { create(:event, :commented, project: nil, target: note_on_personal_snippet, author: user) }
let_it_be(:project_snippet_event) { create(:event, :commented, project: note_on_project_snippet.project, target: note_on_project_snippet, author: user) }
let_it_be(:other_event) { create(:event, :commented, project: other_note.project, target: other_note, author: user) }
diff --git a/spec/models/group_group_link_spec.rb b/spec/models/group_group_link_spec.rb
index 72c700e7981..969987c7e64 100644
--- a/spec/models/group_group_link_spec.rb
+++ b/spec/models/group_group_link_spec.rb
@@ -24,12 +24,60 @@ RSpec.describe GroupGroupLink do
it 'returns all records which are greater than Guests access' do
expect(described_class.non_guests).to match_array([
- group_group_link_reporter, group_group_link,
- group_group_link_maintainer, group_group_link_owner
+ group_group_link_reporter, group_group_link,
+ group_group_link_maintainer, group_group_link_owner
])
end
end
+ describe '.with_owner_or_maintainer_access' do
+ let_it_be(:group_group_link_maintainer) { create :group_group_link, :maintainer }
+ let_it_be(:group_group_link_owner) { create :group_group_link, :owner }
+ let_it_be(:group_group_link_reporter) { create :group_group_link, :reporter }
+ let_it_be(:group_group_link_guest) { create :group_group_link, :guest }
+
+ it 'returns all records which have OWNER or MAINTAINER access' do
+ expect(described_class.with_owner_or_maintainer_access).to match_array([
+ group_group_link_maintainer,
+ group_group_link_owner
+ ])
+ end
+ end
+
+ context 'for access via group shares' do
+ let_it_be(:shared_with_group_1) { create(:group) }
+ let_it_be(:shared_with_group_2) { create(:group) }
+ let_it_be(:shared_with_group_3) { create(:group) }
+ let_it_be(:shared_group_1) { create(:group) }
+ let_it_be(:shared_group_2) { create(:group) }
+ let_it_be(:shared_group_3) { create(:group) }
+ let_it_be(:shared_group_1_subgroup) { create(:group, parent: shared_group_1) }
+
+ before do
+ create :group_group_link, shared_with_group: shared_with_group_1, shared_group: shared_group_1
+ create :group_group_link, shared_with_group: shared_with_group_2, shared_group: shared_group_2
+ create :group_group_link, shared_with_group: shared_with_group_3, shared_group: shared_group_3
+ end
+
+ describe '.groups_accessible_via' do
+ it 'returns other groups that you can get access to, via the group shares of the specified groups' do
+ group_ids = [shared_with_group_1.id, shared_with_group_2.id]
+ expected_result = Group.id_in([shared_group_1.id, shared_group_1_subgroup.id, shared_group_2.id])
+
+ expect(described_class.groups_accessible_via(group_ids)).to match_array(expected_result)
+ end
+ end
+
+ describe '.groups_having_access_to' do
+ it 'returns all other groups that are having access to these specified groups, via group share' do
+ group_ids = [shared_group_1.id, shared_group_2.id]
+ expected_result = Group.id_in([shared_with_group_1.id, shared_with_group_2.id])
+
+ expect(described_class.groups_having_access_to(group_ids)).to match_array(expected_result)
+ end
+ end
+ end
+
describe '.distinct_on_shared_with_group_id_with_group_access' do
let_it_be(:sub_shared_group) { create(:group, parent: shared_group) }
let_it_be(:other_group) { create(:group) }
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index e8e805b2678..61662411ac8 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -359,7 +359,7 @@ RSpec.describe Group do
context 'parent is updated' do
let(:new_parent) { create(:group) }
- subject {group.update!(parent: new_parent, name: 'new name') }
+ subject { group.update!(parent: new_parent, name: 'new name') }
it_behaves_like 'update on column', :traversal_ids
end
@@ -806,6 +806,20 @@ RSpec.describe Group do
end
end
+ describe '.project_creation_allowed' do
+ let_it_be(:group_1) { create(:group, project_creation_level: Gitlab::Access::NO_ONE_PROJECT_ACCESS) }
+ let_it_be(:group_2) { create(:group, project_creation_level: Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS) }
+ let_it_be(:group_3) { create(:group, project_creation_level: Gitlab::Access::MAINTAINER_PROJECT_ACCESS) }
+ let_it_be(:group_4) { create(:group, project_creation_level: nil) }
+
+ it 'only includes groups where project creation is allowed' do
+ result = described_class.project_creation_allowed
+
+ expect(result).to include(group_2, group_3, group_4)
+ expect(result).not_to include(group_1)
+ end
+ end
+
describe 'by_ids_or_paths' do
let(:group_path) { 'group_path' }
let!(:group) { create(:group, path: group_path) }
@@ -2603,7 +2617,11 @@ RSpec.describe Group do
it 'does not enable shared runners' do
expect do
- subject rescue nil
+ begin
+ subject
+ rescue StandardError
+ nil
+ end
parent.reload
group.reload
@@ -2704,7 +2722,11 @@ RSpec.describe Group do
it 'does not allow descendants to override' do
expect do
- subject rescue nil
+ begin
+ subject
+ rescue StandardError
+ nil
+ end
parent.reload
group.reload
@@ -2848,7 +2870,7 @@ RSpec.describe Group do
end
context 'for subgroup project' do
- let_it_be(:subgroup) { create(:group, :private, parent: group)}
+ let_it_be(:subgroup) { create(:group, :private, parent: group) }
let_it_be(:project) { create(:project, group: subgroup, service_desk_enabled: true) }
it { is_expected.to eq(true) }
@@ -3383,6 +3405,20 @@ RSpec.describe Group do
end
end
+ describe '#work_items_mvc_2_feature_flag_enabled?' do
+ it_behaves_like 'checks self and root ancestor feature flag' do
+ let(:feature_flag) { :work_items_mvc_2 }
+ let(:feature_flag_method) { :work_items_mvc_2_feature_flag_enabled? }
+ end
+ end
+
+ describe '#work_items_create_from_markdown_feature_flag_enabled?' do
+ it_behaves_like 'checks self and root ancestor feature flag' do
+ let(:feature_flag) { :work_items_create_from_markdown }
+ let(:feature_flag_method) { :work_items_create_from_markdown_feature_flag_enabled? }
+ end
+ end
+
describe 'group shares' do
let!(:sub_group) { create(:group, parent: group) }
let!(:sub_sub_group) { create(:group, parent: sub_group) }
diff --git a/spec/models/hooks/system_hook_spec.rb b/spec/models/hooks/system_hook_spec.rb
index 9f5f81dd6c0..f4786083b75 100644
--- a/spec/models/hooks/system_hook_spec.rb
+++ b/spec/models/hooks/system_hook_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe SystemHook do
let(:project) { create(:project, namespace: user.namespace) }
let(:group) { create(:group) }
let(:params) do
- { name: 'John Doe', username: 'jduser', email: 'jg@example.com', password: 'mydummypass' }
+ { name: 'John Doe', username: 'jduser', email: 'jg@example.com', password: User.random_password }
end
before do
diff --git a/spec/models/hooks/web_hook_spec.rb b/spec/models/hooks/web_hook_spec.rb
index 9faa5e1567c..036d2effc0f 100644
--- a/spec/models/hooks/web_hook_spec.rb
+++ b/spec/models/hooks/web_hook_spec.rb
@@ -482,12 +482,6 @@ RSpec.describe WebHook do
expect(hook).not_to be_temporarily_disabled
end
-
- it 'can ignore the feature flag' do
- stub_feature_flags(web_hooks_disable_failed: false)
-
- expect(hook).to be_temporarily_disabled(ignore_flag: true)
- end
end
end
@@ -510,12 +504,6 @@ RSpec.describe WebHook do
expect(hook).not_to be_permanently_disabled
end
-
- it 'can ignore the feature flag' do
- stub_feature_flags(web_hooks_disable_failed: false)
-
- expect(hook).to be_permanently_disabled(ignore_flag: true)
- end
end
end
diff --git a/spec/models/incident_management/issuable_escalation_status_spec.rb b/spec/models/incident_management/issuable_escalation_status_spec.rb
index 39d1fb325f5..f87e6e8327a 100644
--- a/spec/models/incident_management/issuable_escalation_status_spec.rb
+++ b/spec/models/incident_management/issuable_escalation_status_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe IncidentManagement::IssuableEscalationStatus do
describe 'associations' do
it { is_expected.to belong_to(:issue) }
+
it do
is_expected.to have_one(:project).through(:issue).inverse_of(:incident_management_issuable_escalation_statuses)
end
diff --git a/spec/models/incident_management/timeline_event_spec.rb b/spec/models/incident_management/timeline_event_spec.rb
index 17150fc9266..9f4011fe6a7 100644
--- a/spec/models/incident_management/timeline_event_spec.rb
+++ b/spec/models/incident_management/timeline_event_spec.rb
@@ -47,11 +47,20 @@ RSpec.describe IncidentManagement::TimelineEvent do
describe '#cache_markdown_field' do
let(:note) { 'note **bold** _italic_ `code` ![image](/path/img.png) :+1:👍' }
+
+ let(:expected_image_html) do
+ '<a class="with-attachment-icon" href="/path/img.png" target="_blank" rel="noopener noreferrer">image</a>'
+ end
+
+ # rubocop:disable Layout/LineLength
+ let(:expected_emoji_html) do
+ '<gl-emoji title="thumbs up sign" data-name="thumbsup" data-unicode-version="6.0">👍</gl-emoji><gl-emoji title="thumbs up sign" data-name="thumbsup" data-unicode-version="6.0">👍</gl-emoji>'
+ end
+
let(:expected_note_html) do
- # rubocop:disable Layout/LineLength
- '<p>note <strong>bold</strong> <em>italic</em> <code>code</code> <a class="with-attachment-icon" href="/path/img.png" target="_blank" rel="noopener noreferrer">image</a> 👍👍</p>'
- # rubocop:enable Layout/LineLength
+ %Q(<p>note <strong>bold</strong> <em>italic</em> <code>code</code> #{expected_image_html} #{expected_emoji_html}</p>)
end
+ # rubocop:enable Layout/LineLength
before do
allow(Banzai::Renderer).to receive(:cacheless_render_field).and_call_original
diff --git a/spec/models/integration_spec.rb b/spec/models/integration_spec.rb
index 86074765c7b..950f2c639fb 100644
--- a/spec/models/integration_spec.rb
+++ b/spec/models/integration_spec.rb
@@ -11,9 +11,8 @@ RSpec.describe Integration do
describe "Associations" do
it { is_expected.to belong_to(:project).inverse_of(:integrations) }
it { is_expected.to belong_to(:group).inverse_of(:integrations) }
- it { is_expected.to have_one(:service_hook).inverse_of(:integration).with_foreign_key(:service_id) }
- it { is_expected.to have_one(:issue_tracker_data).autosave(true).inverse_of(:integration).with_foreign_key(:service_id).class_name('Integrations::IssueTrackerData') }
- it { is_expected.to have_one(:jira_tracker_data).autosave(true).inverse_of(:integration).with_foreign_key(:service_id).class_name('Integrations::JiraTrackerData') }
+ it { is_expected.to have_one(:issue_tracker_data).autosave(true).inverse_of(:integration).with_foreign_key(:integration_id).class_name('Integrations::IssueTrackerData') }
+ it { is_expected.to have_one(:jira_tracker_data).autosave(true).inverse_of(:integration).with_foreign_key(:integration_id).class_name('Integrations::JiraTrackerData') }
end
describe 'validations' do
diff --git a/spec/models/integrations/bamboo_spec.rb b/spec/models/integrations/bamboo_spec.rb
index 574b87d6c60..e92226d109f 100644
--- a/spec/models/integrations/bamboo_spec.rb
+++ b/spec/models/integrations/bamboo_spec.rb
@@ -33,6 +33,7 @@ RSpec.describe Integrations::Bamboo, :use_clean_rails_memory_store_caching do
it { is_expected.to validate_presence_of(:build_key) }
it { is_expected.to validate_presence_of(:bamboo_url) }
+
it_behaves_like 'issue tracker integration URL attribute', :bamboo_url
describe '#username' do
diff --git a/spec/models/integrations/bugzilla_spec.rb b/spec/models/integrations/bugzilla_spec.rb
index 432306c8fa8..f05bc26d066 100644
--- a/spec/models/integrations/bugzilla_spec.rb
+++ b/spec/models/integrations/bugzilla_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Integrations::Bugzilla do
it { is_expected.to validate_presence_of(:project_url) }
it { is_expected.to validate_presence_of(:issues_url) }
it { is_expected.to validate_presence_of(:new_issue_url) }
+
it_behaves_like 'issue tracker integration URL attribute', :project_url
it_behaves_like 'issue tracker integration URL attribute', :issues_url
it_behaves_like 'issue tracker integration URL attribute', :new_issue_url
diff --git a/spec/models/integrations/buildkite_spec.rb b/spec/models/integrations/buildkite_spec.rb
index af2e587dc7b..38ceb5db49c 100644
--- a/spec/models/integrations/buildkite_spec.rb
+++ b/spec/models/integrations/buildkite_spec.rb
@@ -30,6 +30,7 @@ RSpec.describe Integrations::Buildkite, :use_clean_rails_memory_store_caching do
it { is_expected.to validate_presence_of(:project_url) }
it { is_expected.to validate_presence_of(:token) }
+
it_behaves_like 'issue tracker integration URL attribute', :project_url
end
diff --git a/spec/models/integrations/campfire_spec.rb b/spec/models/integrations/campfire_spec.rb
index 48e24299bbd..a6bcd22b6f6 100644
--- a/spec/models/integrations/campfire_spec.rb
+++ b/spec/models/integrations/campfire_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Integrations::Campfire do
describe 'Validations' do
it { is_expected.to validate_numericality_of(:room).is_greater_than(0).only_integer }
- it { is_expected.to validate_length_of(:subdomain).is_at_most(63) }
+ it { is_expected.to validate_length_of(:subdomain).is_at_least(1).is_at_most(63).allow_blank }
it { is_expected.to allow_value("foo").for(:subdomain) }
it { is_expected.not_to allow_value("foo.bar").for(:subdomain) }
it { is_expected.not_to allow_value("foo.bar/#").for(:subdomain) }
diff --git a/spec/models/integrations/chat_message/issue_message_spec.rb b/spec/models/integrations/chat_message/issue_message_spec.rb
index 7026a314b78..4a86322cdaf 100644
--- a/spec/models/integrations/chat_message/issue_message_spec.rb
+++ b/spec/models/integrations/chat_message/issue_message_spec.rb
@@ -65,7 +65,7 @@ RSpec.describe Integrations::ChatMessage::IssueMessage do
end
it 'returns a message regarding closing of issues' do
- expect(subject.pretext). to eq(
+ expect(subject.pretext).to eq(
'[<http://somewhere.com|project_name>] Issue <http://url.com|#100 Issue title> closed by Test User (test.user)')
expect(subject.attachments).to be_empty
end
@@ -111,7 +111,7 @@ RSpec.describe Integrations::ChatMessage::IssueMessage do
end
it 'returns a message regarding closing of issues' do
- expect(subject.pretext). to eq(
+ expect(subject.pretext).to eq(
'[[project_name](http://somewhere.com)] Issue [#100 Issue title](http://url.com) closed by Test User (test.user)')
expect(subject.attachments).to be_empty
expect(subject.activity).to eq({
diff --git a/spec/models/integrations/chat_message/wiki_page_message_spec.rb b/spec/models/integrations/chat_message/wiki_page_message_spec.rb
index 4aa96c7e031..16659311c52 100644
--- a/spec/models/integrations/chat_message/wiki_page_message_spec.rb
+++ b/spec/models/integrations/chat_message/wiki_page_message_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Integrations::ChatMessage::WikiPageMessage do
let(:username) { 'test.user' }
let(:avatar_url) { 'http://someavatar.com' }
let(:project_name) { 'project_name' }
- let(:project_url) {'http://somewhere.com' }
+ let(:project_url) { 'http://somewhere.com' }
let(:url) { 'http://url.com' }
let(:diff_url) { 'http://url.com/diff?version_id=1234' }
let(:wiki_page_title) { 'Wiki page title' }
diff --git a/spec/models/integrations/custom_issue_tracker_spec.rb b/spec/models/integrations/custom_issue_tracker_spec.rb
index e1ffe7a74f0..11f98b99bbe 100644
--- a/spec/models/integrations/custom_issue_tracker_spec.rb
+++ b/spec/models/integrations/custom_issue_tracker_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Integrations::CustomIssueTracker do
it { is_expected.to validate_presence_of(:project_url) }
it { is_expected.to validate_presence_of(:issues_url) }
it { is_expected.to validate_presence_of(:new_issue_url) }
+
it_behaves_like 'issue tracker integration URL attribute', :project_url
it_behaves_like 'issue tracker integration URL attribute', :issues_url
it_behaves_like 'issue tracker integration URL attribute', :new_issue_url
diff --git a/spec/models/integrations/datadog_spec.rb b/spec/models/integrations/datadog_spec.rb
index 47f916e8457..cfc44b22a84 100644
--- a/spec/models/integrations/datadog_spec.rb
+++ b/spec/models/integrations/datadog_spec.rb
@@ -240,20 +240,4 @@ RSpec.describe Integrations::Datadog do
end
end
end
-
- describe '#fields' do
- it 'includes the archive_trace_events field' do
- expect(instance.fields).to include(have_attributes(name: 'archive_trace_events'))
- end
-
- context 'when the FF :datadog_integration_logs_collection is disabled' do
- before do
- stub_feature_flags(datadog_integration_logs_collection: false)
- end
-
- it 'does not include the archive_trace_events field' do
- expect(instance.fields).not_to include(have_attributes(name: 'archive_trace_events'))
- end
- end
- end
end
diff --git a/spec/models/integrations/drone_ci_spec.rb b/spec/models/integrations/drone_ci_spec.rb
index 5ae4af1a665..8a51f8a0705 100644
--- a/spec/models/integrations/drone_ci_spec.rb
+++ b/spec/models/integrations/drone_ci_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe Integrations::DroneCi, :use_clean_rails_memory_store_caching do
it { is_expected.to validate_presence_of(:token) }
it { is_expected.to validate_presence_of(:drone_url) }
+
it_behaves_like 'issue tracker integration URL attribute', :drone_url
end
diff --git a/spec/models/integrations/ewm_spec.rb b/spec/models/integrations/ewm_spec.rb
index 49681fefe55..dc48a2c982f 100644
--- a/spec/models/integrations/ewm_spec.rb
+++ b/spec/models/integrations/ewm_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Integrations::Ewm do
it { is_expected.to validate_presence_of(:project_url) }
it { is_expected.to validate_presence_of(:issues_url) }
it { is_expected.to validate_presence_of(:new_issue_url) }
+
it_behaves_like 'issue tracker integration URL attribute', :project_url
it_behaves_like 'issue tracker integration URL attribute', :issues_url
it_behaves_like 'issue tracker integration URL attribute', :new_issue_url
diff --git a/spec/models/integrations/external_wiki_spec.rb b/spec/models/integrations/external_wiki_spec.rb
index 1621605d39f..8644e20690c 100644
--- a/spec/models/integrations/external_wiki_spec.rb
+++ b/spec/models/integrations/external_wiki_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Integrations::ExternalWiki do
end
it { is_expected.to validate_presence_of(:external_wiki_url) }
+
it_behaves_like 'issue tracker integration URL attribute', :external_wiki_url
end
diff --git a/spec/models/integrations/harbor_spec.rb b/spec/models/integrations/harbor_spec.rb
index 5d8597969a1..3952495119a 100644
--- a/spec/models/integrations/harbor_spec.rb
+++ b/spec/models/integrations/harbor_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe Integrations::Harbor do
it { is_expected.not_to allow_value('https://192.168.1.1').for(:url) }
it { is_expected.not_to allow_value('https://127.0.0.1').for(:url) }
- it { is_expected.to allow_value('https://demo.goharbor.io').for(:url)}
+ it { is_expected.to allow_value('https://demo.goharbor.io').for(:url) }
end
describe '#fields' do
@@ -63,6 +63,8 @@ RSpec.describe Integrations::Harbor do
it 'returns vars when harbor_integration is activated' do
ci_vars = [
{ key: 'HARBOR_URL', value: url },
+ { key: 'HARBOR_HOST', value: 'demo.goharbor.io' },
+ { key: 'HARBOR_OCI', value: 'oci://demo.goharbor.io' },
{ key: 'HARBOR_PROJECT', value: project_name },
{ key: 'HARBOR_USERNAME', value: username },
{ key: 'HARBOR_PASSWORD', value: password, public: false, masked: true }
diff --git a/spec/models/integrations/jira_spec.rb b/spec/models/integrations/jira_spec.rb
index 01c08a0948f..a52a4514ebe 100644
--- a/spec/models/integrations/jira_spec.rb
+++ b/spec/models/integrations/jira_spec.rb
@@ -619,6 +619,18 @@ RSpec.describe Integrations::Jira do
close_issue
end
+ it_behaves_like 'Snowplow event tracking' do
+ subject { close_issue }
+
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ let(:category) { 'Integrations::Jira' }
+ let(:action) { 'perform_integrations_action' }
+ let(:namespace) { project.namespace }
+ let(:user) { current_user }
+ let(:label) { 'redis_hll_counters.ecosystem.ecosystem_total_unique_counts_monthly' }
+ let(:property) { 'i_ecosystem_jira_service_close_issue' }
+ end
+
it 'does not fail if remote_link.all on issue returns nil' do
allow(JIRA::Resource::Remotelink).to receive(:all).and_return(nil)
@@ -962,6 +974,16 @@ RSpec.describe Integrations::Jira do
subject
end
+
+ it_behaves_like 'Snowplow event tracking' do
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ let(:category) { 'Integrations::Jira' }
+ let(:action) { 'perform_integrations_action' }
+ let(:namespace) { project.namespace }
+ let(:user) { current_user }
+ let(:label) { 'redis_hll_counters.ecosystem.ecosystem_total_unique_counts_monthly' }
+ let(:property) { 'i_ecosystem_jira_service_cross_reference' }
+ end
end
context 'for commits' do
diff --git a/spec/models/integrations/microsoft_teams_spec.rb b/spec/models/integrations/microsoft_teams_spec.rb
index af6c142525c..b1b3e42b5e9 100644
--- a/spec/models/integrations/microsoft_teams_spec.rb
+++ b/spec/models/integrations/microsoft_teams_spec.rb
@@ -24,6 +24,7 @@ RSpec.describe Integrations::MicrosoftTeams do
end
it { is_expected.to validate_presence_of(:webhook) }
+
it_behaves_like 'issue tracker integration URL attribute', :webhook
end
diff --git a/spec/models/integrations/pumble_spec.rb b/spec/models/integrations/pumble_spec.rb
new file mode 100644
index 00000000000..8b9b5d214c6
--- /dev/null
+++ b/spec/models/integrations/pumble_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Integrations::Pumble do
+ it_behaves_like "chat integration", "Pumble" do
+ let(:client_arguments) { webhook_url }
+ let(:payload) do
+ {
+ text: be_present
+ }
+ end
+ end
+end
diff --git a/spec/models/integrations/slack_spec.rb b/spec/models/integrations/slack_spec.rb
index 5801a4c3749..ed282f1d39d 100644
--- a/spec/models/integrations/slack_spec.rb
+++ b/spec/models/integrations/slack_spec.rb
@@ -6,7 +6,8 @@ RSpec.describe Integrations::Slack do
it_behaves_like Integrations::SlackMattermostNotifier, "Slack"
describe '#execute' do
- let_it_be(:slack_integration) { create(:integrations_slack, branches_to_be_notified: 'all') }
+ let(:slack_integration) { create(:integrations_slack, branches_to_be_notified: 'all', project_id: project.id) }
+ let(:project) { create_default(:project, :repository, :wiki_repo) }
before do
stub_request(:post, slack_integration.webhook)
@@ -20,13 +21,23 @@ RSpec.describe Integrations::Slack do
context 'hook data includes a user object' do
let_it_be(:user) { create_default(:user) }
- let_it_be(:project) { create_default(:project, :repository, :wiki_repo) }
shared_examples 'increases the usage data counter' do |event_name|
+ subject(:execute) { slack_integration.execute(data) }
+
it 'increases the usage data counter' do
expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(event_name, values: user.id).and_call_original
- slack_integration.execute(data)
+ execute
+ end
+
+ it_behaves_like 'Snowplow event tracking' do
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ let(:category) { 'Integrations::Slack' }
+ let(:action) { 'perform_integrations_action' }
+ let(:namespace) { project.namespace }
+ let(:label) { 'redis_hll_counters.ecosystem.ecosystem_total_unique_counts_monthly' }
+ let(:property) { event_name }
end
end
diff --git a/spec/models/integrations/teamcity_spec.rb b/spec/models/integrations/teamcity_spec.rb
index 046476225a6..da559264c1e 100644
--- a/spec/models/integrations/teamcity_spec.rb
+++ b/spec/models/integrations/teamcity_spec.rb
@@ -76,6 +76,7 @@ RSpec.describe Integrations::Teamcity, :use_clean_rails_memory_store_caching do
it { is_expected.to validate_presence_of(:build_type) }
it { is_expected.to validate_presence_of(:teamcity_url) }
+
it_behaves_like 'issue tracker integration URL attribute', :teamcity_url
describe '#username' do
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index 89c440dc49c..15fe6d7625a 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -69,7 +69,57 @@ RSpec.describe Issue do
end
describe 'validations' do
- subject { issue.valid? }
+ subject(:valid?) { issue.valid? }
+
+ describe 'due_date_after_start_date' do
+ let(:today) { Date.today }
+
+ context 'when both values are not present' do
+ let(:issue) { build(:issue) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when start date is present and due date is not' do
+ let(:issue) { build(:work_item, start_date: today) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when due date is present and start date is not' do
+ let(:issue) { build(:work_item, due_date: today) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when both date values are present' do
+ context 'when due date is greater than start date' do
+ let(:issue) { build(:work_item, start_date: today, due_date: 1.week.from_now) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when due date is equal to start date' do
+ let(:issue) { build(:work_item, start_date: today, due_date: today) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when due date is before start date' do
+ let(:issue) { build(:work_item, due_date: today, start_date: 1.week.from_now) }
+
+ it { is_expected.to be_falsey }
+
+ it 'adds an error message' do
+ valid?
+
+ expect(issue.errors.full_messages).to contain_exactly(
+ 'Due date must be greater than or equal to start date'
+ )
+ end
+ end
+ end
+ end
describe 'issue_type' do
let(:issue) { build(:issue, issue_type: issue_type) }
@@ -86,6 +136,54 @@ RSpec.describe Issue do
it { is_expected.to eq(false) }
end
end
+
+ describe 'confidentiality' do
+ let_it_be(:project) { create(:project) }
+
+ context 'when parent and child are confidential' do
+ let_it_be(:parent) { create(:work_item, confidential: true, project: project) }
+ let_it_be(:child) { create(:work_item, :task, confidential: true, project: project) }
+ let_it_be(:link) { create(:parent_link, work_item: child, work_item_parent: parent) }
+
+ it 'does not allow to make child not-confidential' do
+ issue = Issue.find(child.id)
+ issue.confidential = false
+
+ expect(issue).not_to be_valid
+ expect(issue.errors[:confidential])
+ .to include('associated parent is confidential and can not have non-confidential children.')
+ end
+
+ it 'allows to make parent not-confidential' do
+ issue = Issue.find(parent.id)
+ issue.confidential = false
+
+ expect(issue).to be_valid
+ end
+ end
+
+ context 'when parent and child are not-confidential' do
+ let_it_be(:parent) { create(:work_item, project: project) }
+ let_it_be(:child) { create(:work_item, :task, project: project) }
+ let_it_be(:link) { create(:parent_link, work_item: child, work_item_parent: parent) }
+
+ it 'does not allow to make parent confidential' do
+ issue = Issue.find(parent.id)
+ issue.confidential = true
+
+ expect(issue).not_to be_valid
+ expect(issue.errors[:confidential])
+ .to include('confidential parent can not be used if there are non-confidential children.')
+ end
+
+ it 'allows to make child confidential' do
+ issue = Issue.find(child.id)
+ issue.confidential = true
+
+ expect(issue).to be_valid
+ end
+ end
+ end
end
subject { create(:issue, project: reusable_project) }
@@ -124,6 +222,61 @@ RSpec.describe Issue do
end
end
+ describe '#ensure_work_item_type' do
+ let_it_be(:issue_type) { create(:work_item_type, :issue, :default) }
+ let_it_be(:task_type) { create(:work_item_type, :issue, :default) }
+ let_it_be(:project) { create(:project) }
+
+ context 'when a type was already set' do
+ let_it_be(:issue, refind: true) { create(:issue, project: project) }
+
+ it 'does not fetch a work item type from the DB' do
+ expect(issue.work_item_type_id).to eq(issue_type.id)
+ expect(WorkItems::Type).not_to receive(:default_by_type)
+
+ expect(issue).to be_valid
+ end
+
+ it 'does not fetch a work item type from the DB when updating the type' do
+ expect(issue.work_item_type_id).to eq(issue_type.id)
+ expect(WorkItems::Type).not_to receive(:default_by_type)
+
+ issue.update!(work_item_type: task_type, issue_type: 'task')
+
+ expect(issue.work_item_type_id).to eq(task_type.id)
+ end
+
+ it 'ensures a work item type if updated to nil' do
+ expect(issue.work_item_type_id).to eq(issue_type.id)
+
+ expect do
+ issue.update!(work_item_type: nil)
+ end.to not_change(issue, :work_item_type).from(issue_type)
+ end
+ end
+
+ context 'when no type was set' do
+ let_it_be(:issue, refind: true) { build(:issue, project: project, work_item_type: nil).tap { |issue| issue.save!(validate: false) } }
+
+ it 'sets a work item type before validation' do
+ expect(issue.work_item_type_id).to be_nil
+
+ issue.save!
+
+ expect(issue.work_item_type_id).to eq(issue_type.id)
+ end
+
+ it 'does not fetch type from DB if provided during update' do
+ expect(issue.work_item_type_id).to be_nil
+ expect(WorkItems::Type).not_to receive(:default_by_type)
+
+ issue.update!(work_item_type: task_type, issue_type: 'task')
+
+ expect(issue.work_item_type_id).to eq(task_type.id)
+ end
+ end
+ end
+
describe '#record_create_action' do
it 'records the creation action after saving' do
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_created_action)
@@ -289,7 +442,7 @@ RSpec.describe Issue do
# TODO: Remove when NOT NULL constraint is added to the relationship
describe '#work_item_type' do
- let(:issue) { create(:issue, :incident, project: reusable_project, work_item_type: nil) }
+ let(:issue) { build(:issue, :incident, project: reusable_project, work_item_type: nil).tap { |issue| issue.save!(validate: false) } }
it 'returns a default type if the legacy issue does not have a work item type associated yet' do
expect(issue.work_item_type_id).to be_nil
@@ -493,7 +646,7 @@ RSpec.describe Issue do
let_it_be(:authorized_issue_a) { create(:issue, project: authorized_project) }
let_it_be(:authorized_issue_b) { create(:issue, project: authorized_project) }
let_it_be(:authorized_issue_c) { create(:issue, project: authorized_project2) }
- let_it_be(:authorized_incident_a) { create(:incident, project: authorized_project )}
+ let_it_be(:authorized_incident_a) { create(:incident, project: authorized_project ) }
let_it_be(:unauthorized_issue) { create(:issue, project: unauthorized_project) }
@@ -550,7 +703,7 @@ RSpec.describe Issue do
subject { issue.can_move?(user) }
context 'user is not a member of project issue belongs to' do
- it { is_expected.to eq false}
+ it { is_expected.to eq false }
end
context 'user is reporter in project issue belongs to' do
@@ -1074,7 +1227,7 @@ RSpec.describe Issue do
end
context 'when issue is moved to a private project' do
- let(:private_project) { build(:project, :private)}
+ let(:private_project) { build(:project, :private) }
before do
issue.update!(project: private_project) # move issue to private project
@@ -1621,4 +1774,20 @@ RSpec.describe Issue do
end
end
end
+
+ describe '#full_search' do
+ context 'when searching non-english terms' do
+ [
+ 'abc 中文語',
+ '中文語cn',
+ '中文語'
+ ].each do |term|
+ it 'adds extra where clause to match partial index' do
+ expect(described_class.full_search(term).to_sql).to include(
+ "AND (issues.title NOT SIMILAR TO '[\\u0000-\\u218F]*' OR issues.description NOT SIMILAR TO '[\\u0000-\\u218F]*')"
+ )
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/jira_import_state_spec.rb b/spec/models/jira_import_state_spec.rb
index a272d001429..95e9594f885 100644
--- a/spec/models/jira_import_state_spec.rb
+++ b/spec/models/jira_import_state_spec.rb
@@ -25,25 +25,25 @@ RSpec.describe JiraImportState do
let(:project) { create(:project) }
context 'when project has an initial jira_import' do
- let!(:jira_import) { create(:jira_import_state, project: project)}
+ let!(:jira_import) { create(:jira_import_state, project: project) }
it_behaves_like 'multiple running imports not allowed'
end
context 'when project has a scheduled jira_import' do
- let!(:jira_import) { create(:jira_import_state, :scheduled, project: project)}
+ let!(:jira_import) { create(:jira_import_state, :scheduled, project: project) }
it_behaves_like 'multiple running imports not allowed'
end
context 'when project has a started jira_import' do
- let!(:jira_import) { create(:jira_import_state, :started, project: project)}
+ let!(:jira_import) { create(:jira_import_state, :started, project: project) }
it_behaves_like 'multiple running imports not allowed'
end
context 'when project has a failed jira_import' do
- let!(:jira_import) { create(:jira_import_state, :failed, project: project)}
+ let!(:jira_import) { create(:jira_import_state, :failed, project: project) }
it 'returns valid' do
new_import = build(:jira_import_state, project: project)
@@ -54,7 +54,7 @@ RSpec.describe JiraImportState do
end
context 'when project has a finished jira_import' do
- let!(:jira_import) { create(:jira_import_state, :finished, project: project)}
+ let!(:jira_import) { create(:jira_import_state, :finished, project: project) }
it 'returns valid' do
new_import = build(:jira_import_state, project: project)
@@ -83,40 +83,40 @@ RSpec.describe JiraImportState do
let(:project) { create(:project) }
context 'when jira import is in initial state' do
- let!(:jira_import) { build(:jira_import_state, project: project)}
+ let!(:jira_import) { build(:jira_import_state, project: project) }
it_behaves_like 'can transition', [:schedule, :do_fail]
it_behaves_like 'cannot transition', [:start, :finish]
end
context 'when jira import is in scheduled state' do
- let!(:jira_import) { build(:jira_import_state, :scheduled, project: project)}
+ let!(:jira_import) { build(:jira_import_state, :scheduled, project: project) }
it_behaves_like 'can transition', [:start, :do_fail]
it_behaves_like 'cannot transition', [:finish]
end
context 'when jira import is in started state' do
- let!(:jira_import) { build(:jira_import_state, :started, project: project)}
+ let!(:jira_import) { build(:jira_import_state, :started, project: project) }
it_behaves_like 'can transition', [:finish, :do_fail]
it_behaves_like 'cannot transition', [:schedule]
end
context 'when jira import is in failed state' do
- let!(:jira_import) { build(:jira_import_state, :failed, project: project)}
+ let!(:jira_import) { build(:jira_import_state, :failed, project: project) }
it_behaves_like 'cannot transition', [:schedule, :finish, :do_fail]
end
context 'when jira import is in finished state' do
- let!(:jira_import) { build(:jira_import_state, :finished, project: project)}
+ let!(:jira_import) { build(:jira_import_state, :finished, project: project) }
it_behaves_like 'cannot transition', [:schedule, :do_fail, :start]
end
context 'after transition to scheduled' do
- let!(:jira_import) { build(:jira_import_state, project: project)}
+ let!(:jira_import) { build(:jira_import_state, project: project) }
it 'triggers the import job' do
expect(Gitlab::JiraImport::Stage::StartImportWorker).to receive(:perform_async).and_return('some-job-id')
@@ -129,7 +129,7 @@ RSpec.describe JiraImportState do
end
context 'after transition to finished' do
- let!(:jira_import) { build(:jira_import_state, :started, jid: 'some-other-jid', project: project)}
+ let!(:jira_import) { build(:jira_import_state, :started, jid: 'some-other-jid', project: project) }
subject { jira_import.finish }
@@ -172,7 +172,7 @@ RSpec.describe JiraImportState do
end
context 'when jira import has no error_message' do
- let(:jira_import) { build(:jira_import_state, project: project)}
+ let(:jira_import) { build(:jira_import_state, project: project) }
it 'does not run the callback', :aggregate_failures do
expect { jira_import.save! }.to change { JiraImportState.count }.by(1)
@@ -181,7 +181,7 @@ RSpec.describe JiraImportState do
end
context 'when jira import error_message does not exceed the limit' do
- let(:jira_import) { build(:jira_import_state, project: project, error_message: 'error')}
+ let(:jira_import) { build(:jira_import_state, project: project, error_message: 'error') }
it 'does not run the callback', :aggregate_failures do
expect { jira_import.save! }.to change { JiraImportState.count }.by(1)
@@ -190,7 +190,7 @@ RSpec.describe JiraImportState do
end
context 'when error_message exceeds limit' do
- let(:jira_import) { build(:jira_import_state, project: project, error_message: 'error message longer than the limit')}
+ let(:jira_import) { build(:jira_import_state, project: project, error_message: 'error message longer than the limit') }
it 'truncates error_message to the limit', :aggregate_failures do
expect { jira_import.save! }.to change { JiraImportState.count }.by(1)
diff --git a/spec/models/lfs_object_spec.rb b/spec/models/lfs_object_spec.rb
index 5210709a468..c25d0451f18 100644
--- a/spec/models/lfs_object_spec.rb
+++ b/spec/models/lfs_object_spec.rb
@@ -193,9 +193,9 @@ RSpec.describe LfsObject do
end
describe '.unreferenced_in_batches' do
- let!(:unreferenced_lfs_object1) { create(:lfs_object, oid: '1') }
+ let!(:unreferenced_lfs_object1) { create(:lfs_object, oid: '1' * 64) }
let!(:referenced_lfs_object) { create(:lfs_objects_project).lfs_object }
- let!(:unreferenced_lfs_object2) { create(:lfs_object, oid: '2') }
+ let!(:unreferenced_lfs_object2) { create(:lfs_object, oid: '2' * 64) }
it 'returns lfs objects in batches' do
stub_const('LfsObject::BATCH_SIZE', 1)
diff --git a/spec/models/loose_foreign_keys/deleted_record_spec.rb b/spec/models/loose_foreign_keys/deleted_record_spec.rb
index 23e0ed1f39d..9ee5b7340f3 100644
--- a/spec/models/loose_foreign_keys/deleted_record_spec.rb
+++ b/spec/models/loose_foreign_keys/deleted_record_spec.rb
@@ -94,14 +94,6 @@ RSpec.describe LooseForeignKeys::DeletedRecord, type: :model do
end
it { is_expected.to eq(true) }
-
- context 'when the lfk_automatic_partition_creation FF is off' do
- before do
- stub_feature_flags(lfk_automatic_partition_creation: false)
- end
-
- it { is_expected.to eq(false) }
- end
end
end
@@ -126,14 +118,6 @@ RSpec.describe LooseForeignKeys::DeletedRecord, type: :model do
end
it { is_expected.to eq(true) }
-
- context 'when the lfk_automatic_partition_dropping FF is off' do
- before do
- stub_feature_flags(lfk_automatic_partition_dropping: false)
- end
-
- it { is_expected.to eq(false) }
- end
end
end
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index 286167c918f..2716244b7f3 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Member do
describe 'Associations' do
it { is_expected.to belong_to(:user) }
it { is_expected.to belong_to(:member_namespace) }
+ it { is_expected.to belong_to(:member_role) }
it { is_expected.to have_one(:member_task) }
end
@@ -166,6 +167,36 @@ RSpec.describe Member do
end
end
end
+
+ context 'member role access level' do
+ let_it_be(:member) { create(:group_member, access_level: Gitlab::Access::DEVELOPER) }
+
+ context 'no member role is associated' do
+ it 'is valid' do
+ expect(member).to be_valid
+ end
+ end
+
+ context 'member role is associated' do
+ let_it_be(:member_role) do
+ create(:member_role, members: [member])
+ end
+
+ context 'member role matches access level' do
+ it 'is valid' do
+ expect(member).to be_valid
+ end
+ end
+
+ context 'member role does not match access level' do
+ it 'is invalid' do
+ member_role.base_access_level = Gitlab::Access::MAINTAINER
+
+ expect(member).not_to be_valid
+ end
+ end
+ end
+ end
end
describe 'Scopes & finders' do
diff --git a/spec/models/members/group_member_spec.rb b/spec/models/members/group_member_spec.rb
index 94032146f51..c6266f15340 100644
--- a/spec/models/members/group_member_spec.rb
+++ b/spec/models/members/group_member_spec.rb
@@ -165,78 +165,28 @@ RSpec.describe GroupMember do
let_it_be(:project_b) { create(:project, group: group) }
let_it_be(:project_c) { create(:project, group: group) }
let_it_be(:user) { create(:user) }
- let_it_be(:affected_project_ids) { Project.id_in([project_a, project_b, project_c]).ids }
- before do
- stub_const(
- "#{described_class.name}::THRESHOLD_FOR_REFRESHING_AUTHORIZATIONS_VIA_PROJECTS",
- affected_project_ids.size - 1)
- end
-
- shared_examples_for 'calls UserProjectAccessChangedService to recalculate authorizations' do
- it 'calls UserProjectAccessChangedService to recalculate authorizations' do
- expect_next_instance_of(UserProjectAccessChangedService, user.id) do |service|
- expect(service).to receive(:execute).with(blocking: blocking)
- end
+ shared_examples_for 'calls AuthorizedProjectsWorker inline to recalculate authorizations' do
+ # this is inline with the overridden behaviour in stubbed_member.rb
+ it 'calls AuthorizedProjectsWorker inline to recalculate authorizations' do
+ worker_instance = AuthorizedProjectsWorker.new
+ expect(AuthorizedProjectsWorker).to receive(:new).and_return(worker_instance)
+ expect(worker_instance).to receive(:perform).with(user.id)
action
end
end
- shared_examples_for 'tries to update permissions via refreshing authorizations for the affected projects' do
- context 'when the number of affected projects exceeds the set threshold' do
- it 'updates permissions via refreshing authorizations for the affected projects asynchronously' do
- expect_next_instance_of(
- AuthorizedProjectUpdate::ProjectAccessChangedService, affected_project_ids
- ) do |service|
- expect(service).to receive(:execute).with(blocking: false)
- end
-
- action
- end
-
- it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay as a safety net' do
- expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
- receive(:bulk_perform_in)
- .with(1.hour,
- [[user.id]],
- batch_delay: 30.seconds, batch_size: 100)
- )
-
- action
- end
- end
-
- context 'when the number of affected projects does not exceed the set threshold' do
- before do
- stub_const(
- "#{described_class.name}::THRESHOLD_FOR_REFRESHING_AUTHORIZATIONS_VIA_PROJECTS",
- affected_project_ids.size + 1)
- end
-
- it_behaves_like 'calls UserProjectAccessChangedService to recalculate authorizations'
- end
- end
-
context 'on create' do
let(:action) { group.add_member(user, Gitlab::Access::GUEST) }
- let(:blocking) { true }
- it 'changes access level', :sidekiq_inline do
+ it 'changes access level' do
expect { action }.to change { user.can?(:guest_access, project_a) }.from(false).to(true)
.and change { user.can?(:guest_access, project_b) }.from(false).to(true)
.and change { user.can?(:guest_access, project_c) }.from(false).to(true)
end
- it_behaves_like 'tries to update permissions via refreshing authorizations for the affected projects'
-
- context 'when the feature flag `refresh_authorizations_via_affected_projects_on_group_membership` is disabled' do
- before do
- stub_feature_flags(refresh_authorizations_via_affected_projects_on_group_membership: false)
- end
-
- it_behaves_like 'calls UserProjectAccessChangedService to recalculate authorizations'
- end
+ it_behaves_like 'calls AuthorizedProjectsWorker inline to recalculate authorizations'
end
context 'on update' do
@@ -245,23 +195,14 @@ RSpec.describe GroupMember do
end
let(:action) { group.members.find_by(user: user).update!(access_level: Gitlab::Access::DEVELOPER) }
- let(:blocking) { true }
- it 'changes access level', :sidekiq_inline do
+ it 'changes access level' do
expect { action }.to change { user.can?(:developer_access, project_a) }.from(false).to(true)
.and change { user.can?(:developer_access, project_b) }.from(false).to(true)
.and change { user.can?(:developer_access, project_c) }.from(false).to(true)
end
- it_behaves_like 'tries to update permissions via refreshing authorizations for the affected projects'
-
- context 'when the feature flag `refresh_authorizations_via_affected_projects_on_group_membership` is disabled' do
- before do
- stub_feature_flags(refresh_authorizations_via_affected_projects_on_group_membership: false)
- end
-
- it_behaves_like 'calls UserProjectAccessChangedService to recalculate authorizations'
- end
+ it_behaves_like 'calls AuthorizedProjectsWorker inline to recalculate authorizations'
end
context 'on destroy' do
@@ -270,7 +211,6 @@ RSpec.describe GroupMember do
end
let(:action) { group.members.find_by(user: user).destroy! }
- let(:blocking) { false }
it 'changes access level', :sidekiq_inline do
expect { action }.to change { user.can?(:guest_access, project_a) }.from(true).to(false)
@@ -278,14 +218,10 @@ RSpec.describe GroupMember do
.and change { user.can?(:guest_access, project_c) }.from(true).to(false)
end
- it_behaves_like 'tries to update permissions via refreshing authorizations for the affected projects'
+ it 'schedules an AuthorizedProjectsWorker job to recalculate authorizations' do
+ expect(AuthorizedProjectsWorker).to receive(:bulk_perform_async).with([[user.id]])
- context 'when the feature flag `refresh_authorizations_via_affected_projects_on_group_membership` is disabled' do
- before do
- stub_feature_flags(refresh_authorizations_via_affected_projects_on_group_membership: false)
- end
-
- it_behaves_like 'calls UserProjectAccessChangedService to recalculate authorizations'
+ action
end
end
end
diff --git a/spec/models/members/member_role_spec.rb b/spec/models/members/member_role_spec.rb
new file mode 100644
index 00000000000..e8993491918
--- /dev/null
+++ b/spec/models/members/member_role_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MemberRole do
+ describe 'associations' do
+ it { is_expected.to belong_to(:namespace) }
+ it { is_expected.to have_many(:members) }
+ end
+
+ describe 'validation' do
+ subject { described_class.new }
+
+ it { is_expected.to validate_presence_of(:namespace_id) }
+ it { is_expected.to validate_presence_of(:base_access_level) }
+ end
+end
diff --git a/spec/models/members/project_member_spec.rb b/spec/models/members/project_member_spec.rb
index 39d9d25a98c..99fc5dc14df 100644
--- a/spec/models/members/project_member_spec.rb
+++ b/spec/models/members/project_member_spec.rb
@@ -213,10 +213,11 @@ RSpec.describe ProjectMember do
let_it_be(:user) { create(:user) }
shared_examples_for 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker inline to recalculate authorizations' do
- it 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker' do
- expect(AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker).to receive(:bulk_perform_and_wait).with(
- [[project.id, user.id]]
- )
+ # this is inline with the overridden behaviour in stubbed_member.rb
+ it 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker inline' do
+ worker_instance = AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker.new
+ expect(AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker).to receive(:new).and_return(worker_instance)
+ expect(worker_instance).to receive(:perform).with(project.id, user.id)
action
end
diff --git a/spec/models/merge_request/approval_removal_settings_spec.rb b/spec/models/merge_request/approval_removal_settings_spec.rb
new file mode 100644
index 00000000000..5f879207a72
--- /dev/null
+++ b/spec/models/merge_request/approval_removal_settings_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequest::ApprovalRemovalSettings do
+ describe 'validations' do
+ let(:reset_approvals_on_push) {}
+ let(:selective_code_owner_removals) {}
+
+ subject { described_class.new(project, reset_approvals_on_push, selective_code_owner_removals) }
+
+ context 'when enabling selective_code_owner_removals and reset_approvals_on_push is disabled' do
+ let(:project) { create(:project, reset_approvals_on_push: false) }
+ let(:selective_code_owner_removals) { true }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when enabling selective_code_owner_removals and reset_approvals_on_push is enabled' do
+ let(:project) { create(:project) }
+ let(:selective_code_owner_removals) { true }
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when enabling reset_approvals_on_push and selective_code_owner_removals is disabled' do
+ let(:project) { create(:project) }
+ let(:reset_approvals_on_push) { true }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when enabling reset_approvals_on_push and selective_code_owner_removals is enabled' do
+ let(:project) { create(:project) }
+ let(:reset_approvals_on_push) { true }
+
+ before do
+ project.project_setting.update!(selective_code_owner_removals: true)
+ end
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when enabling reset_approvals_on_push and selective_code_owner_removals' do
+ let(:project) { create(:project) }
+ let(:reset_approvals_on_push) { true }
+ let(:selective_code_owner_removals) { true }
+
+ it { is_expected.not_to be_valid }
+ end
+ end
+end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index c3e325c4e6c..19026a4772d 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -114,10 +114,10 @@ RSpec.describe MergeRequest, factory_default: :keep do
let_it_be(:user1) { create(:user) }
let_it_be(:user2) { create(:user) }
- let_it_be(:merge_request1) { create(:merge_request, :unique_branches, reviewers: [user1])}
- let_it_be(:merge_request2) { create(:merge_request, :unique_branches, reviewers: [user2])}
- let_it_be(:merge_request3) { create(:merge_request, :unique_branches, reviewers: [])}
- let_it_be(:merge_request4) { create(:merge_request, :draft_merge_request)}
+ let_it_be(:merge_request1) { create(:merge_request, :unique_branches, reviewers: [user1]) }
+ let_it_be(:merge_request2) { create(:merge_request, :unique_branches, reviewers: [user2]) }
+ let_it_be(:merge_request3) { create(:merge_request, :unique_branches, reviewers: []) }
+ let_it_be(:merge_request4) { create(:merge_request, :draft_merge_request) }
describe '.review_requested' do
it 'returns MRs that have any review requests' do
@@ -145,8 +145,8 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
describe '.attention' do
- let_it_be(:merge_request5) { create(:merge_request, :unique_branches, assignees: [user2])}
- let_it_be(:merge_request6) { create(:merge_request, :unique_branches, assignees: [user2])}
+ let_it_be(:merge_request5) { create(:merge_request, :unique_branches, assignees: [user2]) }
+ let_it_be(:merge_request6) { create(:merge_request, :unique_branches, assignees: [user2]) }
before do
assignee = merge_request6.find_assignee(user2)
@@ -2056,7 +2056,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
context 'when failed to find an actual head pipeline' do
before do
- allow(merge_request).to receive(:find_actual_head_pipeline) { }
+ allow(merge_request).to receive(:find_actual_head_pipeline) {}
end
it 'does not update the current head pipeline' do
@@ -3232,6 +3232,62 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
+ describe '#detailed_merge_status' do
+ subject(:detailed_merge_status) { merge_request.detailed_merge_status }
+
+ context 'when merge status is cannot_be_merged_rechecking' do
+ let(:merge_request) { create(:merge_request, merge_status: :cannot_be_merged_rechecking) }
+
+ it 'returns :checking' do
+ expect(detailed_merge_status).to eq(:checking)
+ end
+ end
+
+ context 'when merge status is preparing' do
+ let(:merge_request) { create(:merge_request, merge_status: :preparing) }
+
+ it 'returns :checking' do
+ expect(detailed_merge_status).to eq(:checking)
+ end
+ end
+
+ context 'when merge status is checking' do
+ let(:merge_request) { create(:merge_request, merge_status: :checking) }
+
+ it 'returns :checking' do
+ expect(detailed_merge_status).to eq(:checking)
+ end
+ end
+
+ context 'when merge status is unchecked' do
+ let(:merge_request) { create(:merge_request, merge_status: :unchecked) }
+
+ it 'returns :unchecked' do
+ expect(detailed_merge_status).to eq(:unchecked)
+ end
+ end
+
+ context 'when merge checks are a success' do
+ let(:merge_request) { create(:merge_request) }
+
+ it 'returns :mergeable' do
+ expect(detailed_merge_status).to eq(:mergeable)
+ end
+ end
+
+ context 'when merge status have a failure' do
+ let(:merge_request) { create(:merge_request) }
+
+ before do
+ merge_request.close!
+ end
+
+ it 'returns the failure reason' do
+ expect(detailed_merge_status).to eq(:not_open)
+ end
+ end
+ end
+
describe '#mergeable_state?' do
it_behaves_like 'for mergeable_state'
@@ -4660,6 +4716,37 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
+ describe '#in_locked_state' do
+ let(:merge_request) { create(:merge_request, :opened) }
+
+ context 'when the merge request does not change state' do
+ it 'returns to previous state and has no errors on the object' do
+ expect(merge_request.opened?).to eq(true)
+
+ merge_request.in_locked_state do
+ expect(merge_request.locked?).to eq(true)
+ end
+
+ expect(merge_request.opened?).to eq(true)
+ expect(merge_request.errors).to be_empty
+ end
+ end
+
+ context 'when the merge request is merged while locked' do
+ it 'becomes merged and has no errors on the object' do
+ expect(merge_request.opened?).to eq(true)
+
+ merge_request.in_locked_state do
+ expect(merge_request.locked?).to eq(true)
+ merge_request.mark_as_merged!
+ end
+
+ expect(merge_request.merged?).to eq(true)
+ expect(merge_request.errors).to be_empty
+ end
+ end
+ end
+
describe '#cleanup_refs' do
subject { merge_request.cleanup_refs(only: only) }
@@ -5047,6 +5134,12 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
+ describe '#merge_blocked_by_other_mrs?' do
+ it 'returns false when there is no blocking merge requests' do
+ expect(subject.merge_blocked_by_other_mrs?).to be_falsy
+ end
+ end
+
describe '#merge_request_reviewers_with' do
let_it_be(:reviewer1) { create(:user) }
let_it_be(:reviewer2) { create(:user) }
diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb
index 72a57b6076a..af1383b68bf 100644
--- a/spec/models/milestone_spec.rb
+++ b/spec/models/milestone_spec.rb
@@ -257,7 +257,7 @@ RSpec.describe Milestone do
let(:milestone) { create(:milestone, title: 'foo', description: 'bar') }
it 'returns milestones with a matching title' do
- expect(described_class.search_title(milestone.title)) .to eq([milestone])
+ expect(described_class.search_title(milestone.title)).to eq([milestone])
end
it 'returns milestones with a partially matching title' do
@@ -272,7 +272,7 @@ RSpec.describe Milestone do
it 'searches only on the title and ignores milestones with a matching description' do
create(:milestone, title: 'bar', description: 'foo')
- expect(described_class.search_title(milestone.title)) .to eq([milestone])
+ expect(described_class.search_title(milestone.title)).to eq([milestone])
end
end
diff --git a/spec/models/ml/candidate_metric_spec.rb b/spec/models/ml/candidate_metric_spec.rb
new file mode 100644
index 00000000000..5ee6030fb8e
--- /dev/null
+++ b/spec/models/ml/candidate_metric_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ml::CandidateMetric do
+ describe 'associations' do
+ it { is_expected.to belong_to(:candidate) }
+ end
+end
diff --git a/spec/models/ml/candidate_param_spec.rb b/spec/models/ml/candidate_param_spec.rb
new file mode 100644
index 00000000000..ff38e471219
--- /dev/null
+++ b/spec/models/ml/candidate_param_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ml::CandidateParam do
+ describe 'associations' do
+ it { is_expected.to belong_to(:candidate) }
+ end
+end
diff --git a/spec/models/ml/candidate_spec.rb b/spec/models/ml/candidate_spec.rb
new file mode 100644
index 00000000000..a48e291fa55
--- /dev/null
+++ b/spec/models/ml/candidate_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ml::Candidate do
+ describe 'associations' do
+ it { is_expected.to belong_to(:experiment) }
+ it { is_expected.to belong_to(:user) }
+ it { is_expected.to have_many(:params) }
+ it { is_expected.to have_many(:metrics) }
+ end
+end
diff --git a/spec/models/ml/experiment_spec.rb b/spec/models/ml/experiment_spec.rb
new file mode 100644
index 00000000000..dca5280a8fe
--- /dev/null
+++ b/spec/models/ml/experiment_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ml::Experiment do
+ describe 'associations' do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:user) }
+ it { is_expected.to have_many(:candidates) }
+ end
+end
diff --git a/spec/models/namespace/detail_spec.rb b/spec/models/namespace/detail_spec.rb
new file mode 100644
index 00000000000..1bb756c441b
--- /dev/null
+++ b/spec/models/namespace/detail_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Namespace::Detail, type: :model do
+ describe 'associations' do
+ it { is_expected.to belong_to :namespace }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:namespace) }
+ end
+
+ context 'when namespace description changes' do
+ let(:namespace) { create(:namespace, description: "old") }
+
+ it 'changes namespace details description' do
+ expect { namespace.update!(description: "new") }
+ .to change { namespace.namespace_details.description }.from("old").to("new")
+ end
+ end
+
+ context 'when project description changes' do
+ let(:project) { create(:project, description: "old") }
+
+ it 'changes project namespace details description' do
+ expect { project.update!(description: "new") }
+ .to change { project.project_namespace.namespace_details.description }.from("old").to("new")
+ end
+ end
+
+ context 'when group description changes' do
+ let(:group) { create(:group, description: "old") }
+
+ it 'changes group namespace details description' do
+ expect { group.update!(description: "new") }
+ .to change { group.namespace_details.description }.from("old").to("new")
+ end
+ end
+end
diff --git a/spec/models/namespace/root_storage_statistics_spec.rb b/spec/models/namespace/root_storage_statistics_spec.rb
index d2ee0b40ed6..14ac08b545a 100644
--- a/spec/models/namespace/root_storage_statistics_spec.rb
+++ b/spec/models/namespace/root_storage_statistics_spec.rb
@@ -100,8 +100,8 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model do
it_behaves_like 'does not include personal snippets'
context 'with subgroups' do
- let(:subgroup1) { create(:group, parent: namespace)}
- let(:subgroup2) { create(:group, parent: subgroup1)}
+ let(:subgroup1) { create(:group, parent: namespace) }
+ let(:subgroup2) { create(:group, parent: subgroup1) }
let(:project1) { create(:project, namespace: subgroup1) }
let(:project2) { create(:project, namespace: subgroup2) }
diff --git a/spec/models/namespace/traversal_hierarchy_spec.rb b/spec/models/namespace/traversal_hierarchy_spec.rb
index 51932ab943c..918ff6aa154 100644
--- a/spec/models/namespace/traversal_hierarchy_spec.rb
+++ b/spec/models/namespace/traversal_hierarchy_spec.rb
@@ -85,7 +85,11 @@ RSpec.describe Namespace::TraversalHierarchy, type: :model do
it { expect { subject }.to raise_error(ActiveRecord::Deadlocked) }
it 'increment db_deadlock counter' do
- expect { subject rescue nil }.to change { db_deadlock_total('Namespace#sync_traversal_ids!') }.by(1)
+ expect do
+ subject
+ rescue StandardError
+ nil
+ end.to change { db_deadlock_total('Namespace#sync_traversal_ids!') }.by(1)
end
end
end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 664cdb27290..71ce3afda44 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe Namespace do
include ProjectForksHelper
- include GitHelpers
include ReloadHelpers
let_it_be(:group_sti_name) { Group.sti_name }
@@ -23,6 +22,7 @@ RSpec.describe Namespace do
it { is_expected.to have_one :root_storage_statistics }
it { is_expected.to have_one :aggregation_schedule }
it { is_expected.to have_one :namespace_settings }
+ it { is_expected.to have_one :namespace_details }
it { is_expected.to have_one(:namespace_statistics) }
it { is_expected.to have_many :custom_emoji }
it { is_expected.to have_one :package_setting_relation }
@@ -31,6 +31,7 @@ RSpec.describe Namespace do
it { is_expected.to have_many :pending_builds }
it { is_expected.to have_one :namespace_route }
it { is_expected.to have_many :namespace_members }
+ it { is_expected.to have_many :member_roles }
it { is_expected.to have_one :cluster_enabled_grant }
it { is_expected.to have_many(:work_items) }
@@ -373,14 +374,6 @@ RSpec.describe Namespace do
context 'linear' do
it_behaves_like 'namespace traversal scopes'
-
- context 'without inner join ancestors query' do
- before do
- stub_feature_flags(use_traversal_ids_for_ancestor_scopes_with_inner_join: false)
- end
-
- it_behaves_like 'namespace traversal scopes'
- end
end
shared_examples 'makes recursive queries' do
@@ -1075,9 +1068,9 @@ RSpec.describe Namespace do
it 'updates project full path in .git/config' do
parent.update!(path: 'mygroup_new')
- expect(project_rugged(project_in_parent_group).config['gitlab.fullpath']).to eq "mygroup_new/#{project_in_parent_group.path}"
- expect(project_rugged(hashed_project_in_subgroup).config['gitlab.fullpath']).to eq "mygroup_new/mysubgroup/#{hashed_project_in_subgroup.path}"
- expect(project_rugged(legacy_project_in_subgroup).config['gitlab.fullpath']).to eq "mygroup_new/mysubgroup/#{legacy_project_in_subgroup.path}"
+ expect(project_in_parent_group.reload.repository.full_path).to eq "mygroup_new/#{project_in_parent_group.path}"
+ expect(hashed_project_in_subgroup.reload.repository.full_path).to eq "mygroup_new/mysubgroup/#{hashed_project_in_subgroup.path}"
+ expect(legacy_project_in_subgroup.reload.repository.full_path).to eq "mygroup_new/mysubgroup/#{legacy_project_in_subgroup.path}"
end
it 'updates the project storage location' do
@@ -1091,14 +1084,6 @@ RSpec.describe Namespace do
expect(repository_hashed_project_in_subgroup.reload.disk_path).to eq hashed_project_in_subgroup.disk_path
expect(repository_legacy_project_in_subgroup.reload.disk_path).to eq "mygroup_moved/mysubgroup/#{legacy_project_in_subgroup.path}"
end
-
- def project_rugged(project)
- # Routes are loaded when creating the projects, so we need to manually
- # reload them for the below code to be aware of the above UPDATE.
- project.route.reload
-
- rugged_repo(project.repository)
- end
end
end
@@ -1556,7 +1541,7 @@ RSpec.describe Namespace do
describe '#share_with_group_lock with subgroups' do
context 'when creating a subgroup' do
- let(:subgroup) { create(:group, parent: root_group )}
+ let(:subgroup) { create(:group, parent: root_group ) }
context 'under a parent with "Share with group lock" enabled' do
let(:root_group) { create(:group, share_with_group_lock: true) }
@@ -1577,7 +1562,7 @@ RSpec.describe Namespace do
context 'when enabling the parent group "Share with group lock"' do
let(:root_group) { create(:group) }
- let!(:subgroup) { create(:group, parent: root_group )}
+ let!(:subgroup) { create(:group, parent: root_group ) }
it 'the subgroup "Share with group lock" becomes enabled' do
root_group.update!(share_with_group_lock: true)
@@ -1590,7 +1575,7 @@ RSpec.describe Namespace do
let(:root_group) { create(:group, share_with_group_lock: true) }
context 'and the subgroup "Share with group lock" is enabled' do
- let(:subgroup) { create(:group, parent: root_group, share_with_group_lock: true )}
+ let(:subgroup) { create(:group, parent: root_group, share_with_group_lock: true ) }
it 'the subgroup "Share with group lock" does not change' do
root_group.update!(share_with_group_lock: false)
@@ -1600,7 +1585,7 @@ RSpec.describe Namespace do
end
context 'but the subgroup "Share with group lock" is disabled' do
- let(:subgroup) { create(:group, parent: root_group )}
+ let(:subgroup) { create(:group, parent: root_group ) }
it 'the subgroup "Share with group lock" does not change' do
root_group.update!(share_with_group_lock: false)
@@ -1615,7 +1600,7 @@ RSpec.describe Namespace do
let(:root_group) { create(:group, share_with_group_lock: true) }
context 'when the subgroup "Share with group lock" is enabled' do
- let(:subgroup) { create(:group, share_with_group_lock: true )}
+ let(:subgroup) { create(:group, share_with_group_lock: true ) }
it 'the subgroup "Share with group lock" does not change' do
subgroup.parent = root_group
@@ -1626,7 +1611,7 @@ RSpec.describe Namespace do
end
context 'when the subgroup "Share with group lock" is disabled' do
- let(:subgroup) { create(:group)}
+ let(:subgroup) { create(:group) }
it 'the subgroup "Share with group lock" becomes enabled' do
subgroup.parent = root_group
@@ -1641,7 +1626,7 @@ RSpec.describe Namespace do
let(:root_group) { create(:group) }
context 'when the subgroup "Share with group lock" is enabled' do
- let(:subgroup) { create(:group, share_with_group_lock: true )}
+ let(:subgroup) { create(:group, share_with_group_lock: true ) }
it 'the subgroup "Share with group lock" does not change' do
subgroup.parent = root_group
@@ -1652,7 +1637,7 @@ RSpec.describe Namespace do
end
context 'when the subgroup "Share with group lock" is disabled' do
- let(:subgroup) { create(:group)}
+ let(:subgroup) { create(:group) }
it 'the subgroup "Share with group lock" does not change' do
subgroup.parent = root_group
@@ -2027,7 +2012,7 @@ RSpec.describe Namespace do
end
with_them do
- let(:namespace) { build(:namespace, shared_runners_enabled: shared_runners_enabled, allow_descendants_override_disabled_shared_runners: allow_descendants_override_disabled_shared_runners)}
+ let(:namespace) { build(:namespace, shared_runners_enabled: shared_runners_enabled, allow_descendants_override_disabled_shared_runners: allow_descendants_override_disabled_shared_runners) }
it 'returns the result' do
expect(namespace.shared_runners_setting).to eq(shared_runners_setting)
@@ -2051,7 +2036,7 @@ RSpec.describe Namespace do
end
with_them do
- let(:namespace) { build(:namespace, shared_runners_enabled: shared_runners_enabled, allow_descendants_override_disabled_shared_runners: allow_descendants_override_disabled_shared_runners)}
+ let(:namespace) { build(:namespace, shared_runners_enabled: shared_runners_enabled, allow_descendants_override_disabled_shared_runners: allow_descendants_override_disabled_shared_runners) }
it 'returns the result' do
expect(namespace.shared_runners_setting_higher_than?(other_setting)).to eq(result)
@@ -2282,9 +2267,8 @@ RSpec.describe Namespace do
stub_feature_flags(namespace_storage_limit_bypass_date_check: false)
end
- # Date TBD: https://gitlab.com/gitlab-org/gitlab/-/issues/350632
- it 'returns nil' do
- expect(namespace.storage_enforcement_date).to be(nil)
+ it 'returns correct date' do
+ expect(namespace.storage_enforcement_date).to eql(Date.new(2022, 10, 19))
end
context 'when :storage_banner_bypass_date_check is enabled' do
diff --git a/spec/models/namespaces/project_namespace_spec.rb b/spec/models/namespaces/project_namespace_spec.rb
index c995571c3c9..78403db7fa8 100644
--- a/spec/models/namespaces/project_namespace_spec.rb
+++ b/spec/models/namespaces/project_namespace_spec.rb
@@ -5,6 +5,14 @@ require 'spec_helper'
RSpec.describe Namespaces::ProjectNamespace, type: :model do
describe 'relationships' do
it { is_expected.to have_one(:project).with_foreign_key(:project_namespace_id).inverse_of(:project_namespace) }
+
+ specify do
+ project = create(:project)
+ namespace = project.project_namespace
+ namespace.reload_project
+
+ expect(namespace.project).to eq project
+ end
end
describe 'validations' do
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index fc6f7832c2c..ca558848cb0 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -823,14 +823,14 @@ RSpec.describe Note do
end
context 'with :label action' do
- let!(:metadata) {create(:system_note_metadata, note: note, action: :label)}
+ let!(:metadata) { create(:system_note_metadata, note: note, action: :label) }
it_behaves_like 'system_note_metadata includes note action'
it { expect(note.system_note_with_references?).to be_falsy }
context 'with cross reference label note' do
- let(:label) { create(:label, project: issue.project)}
+ let(:label) { create(:label, project: issue.project) }
let(:note) { create(:system_note, note: "added #{label.to_reference} label", noteable: issue, project: issue.project) }
it { expect(note.system_note_with_references?).to be_truthy }
@@ -838,14 +838,14 @@ RSpec.describe Note do
end
context 'with :milestone action' do
- let!(:metadata) {create(:system_note_metadata, note: note, action: :milestone)}
+ let!(:metadata) { create(:system_note_metadata, note: note, action: :milestone) }
it_behaves_like 'system_note_metadata includes note action'
it { expect(note.system_note_with_references?).to be_falsy }
context 'with cross reference milestone note' do
- let(:milestone) { create(:milestone, project: issue.project)}
+ let(:milestone) { create(:milestone, project: issue.project) }
let(:note) { create(:system_note, note: "added #{milestone.to_reference} milestone", noteable: issue, project: issue.project) }
it { expect(note.system_note_with_references?).to be_truthy }
@@ -1130,7 +1130,7 @@ RSpec.describe Note do
end
describe '#cache_markdown_field' do
- let(:html) { '<p>some html</p>'}
+ let(:html) { '<p>some html</p>' }
before do
allow(Banzai::Renderer).to receive(:cacheless_render_field).and_call_original
@@ -1792,4 +1792,68 @@ RSpec.describe Note do
end
end
end
+
+ shared_examples 'note that replaces task for checklist item in body text' do
+ subject { note.public_send(field_name) }
+
+ context 'when note is not a system note' do
+ let(:note) { create(:note, note: original_note_body) }
+
+ it { is_expected.to eq(unchanged_note_body) }
+ end
+
+ context 'when note is a system note' do
+ context 'when note noteable_type is not Issue' do
+ let(:note) { create(:note, :system, :on_merge_request, note: original_note_body) }
+
+ it { is_expected.to eq(unchanged_note_body) }
+ end
+
+ context 'when note noteable_type is Issue' do
+ let(:note) { create(:note, :system, :on_issue, note: original_note_body) }
+
+ it { is_expected.to eq(expected_text_replacement) }
+ end
+ end
+ end
+
+ describe '#note' do
+ let(:field_name) { :note }
+
+ it_behaves_like 'note that replaces task for checklist item in body text' do
+ let(:original_note_body) { 'marked the task **task 1** as completed' }
+ let(:unchanged_note_body) { original_note_body }
+ let(:expected_text_replacement) { 'marked the checklist item **task 1** as completed' }
+ end
+
+ it_behaves_like 'note that replaces task for checklist item in body text' do
+ let(:original_note_body) { 'marked the task **task 1** as incomplete' }
+ let(:unchanged_note_body) { original_note_body }
+ let(:expected_text_replacement) { 'marked the checklist item **task 1** as incomplete' }
+ end
+ end
+
+ describe '#note_html' do
+ let(:field_name) { :note_html }
+
+ it_behaves_like 'note that replaces task for checklist item in body text' do
+ let(:original_note_body) { 'marked the task **task 1** as completed' }
+ let(:unchanged_note_body) { '<p data-sourcepos="1:1-1:48" dir="auto">marked the task <strong>task 1</strong> as completed</p>' }
+ let(:expected_text_replacement) { '<p data-sourcepos="1:1-1:48" dir="auto">marked the checklist item <strong>task 1</strong> as completed</p>' }
+
+ before do
+ note.update_columns(note_html: unchanged_note_body)
+ end
+ end
+
+ it_behaves_like 'note that replaces task for checklist item in body text' do
+ let(:original_note_body) { 'marked the task **task 1** as incomplete' }
+ let(:unchanged_note_body) { '<p data-sourcepos="1:1-1:48" dir="auto">marked the task <strong>task 1</strong> as incomplete</p>' }
+ let(:expected_text_replacement) { '<p data-sourcepos="1:1-1:48" dir="auto">marked the checklist item <strong>task 1</strong> as incomplete</p>' }
+
+ before do
+ note.update_columns(note_html: unchanged_note_body)
+ end
+ end
+ end
end
diff --git a/spec/models/oauth_access_token_spec.rb b/spec/models/oauth_access_token_spec.rb
index 2b47da1ebe1..544f6643712 100644
--- a/spec/models/oauth_access_token_spec.rb
+++ b/spec/models/oauth_access_token_spec.rb
@@ -10,27 +10,6 @@ RSpec.describe OauthAccessToken do
let(:token) { create(:oauth_access_token, application_id: app_one.id) }
describe 'scopes' do
- describe '.distinct_resource_owner_counts' do
- let(:tokens) { described_class.all }
-
- before do
- token
- create_list(:oauth_access_token, 2, resource_owner: user, application_id: app_two.id)
- end
-
- it 'returns unique owners' do
- expect(tokens.count).to eq(3)
- expect(tokens.distinct_resource_owner_counts([app_one])).to eq({ app_one.id => 1 })
- expect(tokens.distinct_resource_owner_counts([app_two])).to eq({ app_two.id => 1 })
- expect(tokens.distinct_resource_owner_counts([app_three])).to eq({})
- expect(tokens.distinct_resource_owner_counts([app_one, app_two]))
- .to eq({
- app_one.id => 1,
- app_two.id => 1
- })
- end
- end
-
describe '.latest_per_application' do
let!(:app_two_token1) { create(:oauth_access_token, application: app_two) }
let!(:app_two_token2) { create(:oauth_access_token, application: app_two) }
@@ -43,4 +22,51 @@ RSpec.describe OauthAccessToken do
end
end
end
+
+ describe 'Doorkeeper secret storing' do
+ it 'stores the token in hashed format' do
+ expect(token.token).not_to eq(token.plaintext_token)
+ end
+
+ it 'does not allow falling back to plaintext token comparison' do
+ expect(described_class.by_token(token.token)).to be_nil
+ end
+
+ it 'finds a token by plaintext token' do
+ expect(described_class.by_token(token.plaintext_token)).to be_a(OauthAccessToken)
+ end
+
+ context 'when the token is stored in plaintext' do
+ let(:plaintext_token) { Devise.friendly_token(20) }
+
+ before do
+ token.update_column(:token, plaintext_token)
+ end
+
+ it 'falls back to plaintext token comparison' do
+ expect(described_class.by_token(plaintext_token)).to be_a(OauthAccessToken)
+ end
+ end
+
+ context 'when hash_oauth_secrets is disabled' do
+ let(:hashed_token) { create(:oauth_access_token, application_id: app_one.id) }
+
+ before do
+ hashed_token
+ stub_feature_flags(hash_oauth_tokens: false)
+ end
+
+ it 'stores the token in plaintext' do
+ expect(token.token).to eq(token.plaintext_token)
+ end
+
+ it 'finds a token by plaintext token' do
+ expect(described_class.by_token(token.plaintext_token)).to be_a(OauthAccessToken)
+ end
+
+ it 'does not find a token that was previously stored as hashed' do
+ expect(described_class.by_token(hashed_token.plaintext_token)).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/models/onboarding_progress_spec.rb b/spec/models/onboarding_progress_spec.rb
index 80a39404d10..9688dd01c71 100644
--- a/spec/models/onboarding_progress_spec.rb
+++ b/spec/models/onboarding_progress_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe OnboardingProgress do
describe 'validations' do
describe 'namespace_is_root_namespace' do
- subject(:onboarding_progress) { build(:onboarding_progress, namespace: namespace)}
+ subject(:onboarding_progress) { build(:onboarding_progress, namespace: namespace) }
context 'when associated namespace is root' do
it { is_expected.to be_valid }
diff --git a/spec/models/packages/cleanup/policy_spec.rb b/spec/models/packages/cleanup/policy_spec.rb
index a37042520e7..0b6dff472c1 100644
--- a/spec/models/packages/cleanup/policy_spec.rb
+++ b/spec/models/packages/cleanup/policy_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe Packages::Cleanup::Policy, type: :model do
describe 'validations' do
it { is_expected.to validate_presence_of(:project) }
+
it do
is_expected
.to validate_inclusion_of(:keep_n_duplicated_package_files)
diff --git a/spec/models/packages/conan/metadatum_spec.rb b/spec/models/packages/conan/metadatum_spec.rb
index d00723e8e43..92c8b126639 100644
--- a/spec/models/packages/conan/metadatum_spec.rb
+++ b/spec/models/packages/conan/metadatum_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Packages::Conan::Metadatum, type: :model do
end
describe 'validations' do
- let(:fifty_one_characters) { 'f_a' * 17}
+ let(:fifty_one_characters) { 'f_a' * 17 }
it { is_expected.to validate_presence_of(:package) }
it { is_expected.to validate_presence_of(:package_username) }
diff --git a/spec/models/packages/package_file_spec.rb b/spec/models/packages/package_file_spec.rb
index 82f5b44f38f..9554fc3bb1b 100644
--- a/spec/models/packages/package_file_spec.rb
+++ b/spec/models/packages/package_file_spec.rb
@@ -126,7 +126,7 @@ RSpec.describe Packages::PackageFile, type: :model do
describe '.with_conan_package_reference' do
let_it_be(:non_matching_package_file) { create(:package_file, :nuget) }
let_it_be(:metadatum) { create(:conan_file_metadatum, :package_file) }
- let_it_be(:reference) { metadatum.conan_package_reference}
+ let_it_be(:reference) { metadatum.conan_package_reference }
it 'returns matching packages' do
expect(described_class.with_conan_package_reference(reference))
@@ -150,8 +150,8 @@ RSpec.describe Packages::PackageFile, type: :model do
context 'Debian scopes' do
let_it_be(:debian_changes) { debian_package.package_files.last }
- let_it_be(:debian_deb) { create(:debian_package_file, package: debian_package)}
- let_it_be(:debian_udeb) { create(:debian_package_file, :udeb, package: debian_package)}
+ let_it_be(:debian_deb) { create(:debian_package_file, package: debian_package) }
+ let_it_be(:debian_udeb) { create(:debian_package_file, :udeb, package: debian_package) }
let_it_be(:debian_contrib) do
create(:debian_package_file, package: debian_package).tap do |pf|
diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb
index 06f02f021cf..526c57d08b0 100644
--- a/spec/models/packages/package_spec.rb
+++ b/spec/models/packages/package_spec.rb
@@ -131,7 +131,7 @@ RSpec.describe Packages::Package, type: :model do
context 'conan package' do
subject { build_stubbed(:conan_package) }
- let(:fifty_one_characters) {'f_b' * 17}
+ let(:fifty_one_characters) { 'f_b' * 17 }
it { is_expected.to allow_value('foo+bar').for(:name) }
it { is_expected.to allow_value('foo_bar').for(:name) }
@@ -243,7 +243,7 @@ RSpec.describe Packages::Package, type: :model do
context 'conan package' do
subject { build_stubbed(:conan_package) }
- let(:fifty_one_characters) {'1.2' * 17}
+ let(:fifty_one_characters) { '1.2' * 17 }
it { is_expected.to allow_value('1.2').for(:version) }
it { is_expected.to allow_value('1.2.3-beta').for(:version) }
@@ -441,7 +441,7 @@ RSpec.describe Packages::Package, type: :model do
context 'npm package' do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, namespace: group) }
- let_it_be(:second_project) { create(:project, namespace: group)}
+ let_it_be(:second_project) { create(:project, namespace: group) }
let(:package) { build(:npm_package, project: project, name: name) }
diff --git a/spec/models/personal_access_token_spec.rb b/spec/models/personal_access_token_spec.rb
index 69866d497a1..f3ef347121e 100644
--- a/spec/models/personal_access_token_spec.rb
+++ b/spec/models/personal_access_token_spec.rb
@@ -193,6 +193,20 @@ RSpec.describe PersonalAccessToken do
end
describe 'scopes' do
+ describe '.active' do
+ let_it_be(:revoked_token) { create(:personal_access_token, :revoked) }
+ let_it_be(:not_revoked_false_token) { create(:personal_access_token, revoked: false) }
+ let_it_be(:not_revoked_nil_token) { create(:personal_access_token, revoked: nil) }
+ let_it_be(:expired_token) { create(:personal_access_token, :expired) }
+ let_it_be(:not_expired_token) { create(:personal_access_token) }
+ let_it_be(:never_expires_token) { create(:personal_access_token, expires_at: nil) }
+
+ it 'includes non-revoked and non-expired tokens' do
+ expect(described_class.active)
+ .to match_array([not_revoked_false_token, not_revoked_nil_token, not_expired_token, never_expires_token])
+ end
+ end
+
describe '.expiring_and_not_notified' do
let_it_be(:expired_token) { create(:personal_access_token, expires_at: 2.days.ago) }
let_it_be(:revoked_token) { create(:personal_access_token, revoked: true) }
@@ -251,7 +265,7 @@ RSpec.describe PersonalAccessToken do
describe '.simple_sorts' do
it 'includes overridden keys' do
- expect(described_class.simple_sorts.keys).to include(*%w(expires_at_asc expires_at_desc))
+ expect(described_class.simple_sorts.keys).to include(*%w(expires_at_asc expires_at_desc expires_at_asc_id_desc))
end
end
@@ -270,5 +284,13 @@ RSpec.describe PersonalAccessToken do
expect(described_class.order_expires_at_desc).to match [later_token, earlier_token]
end
end
+
+ describe '.order_expires_at_asc_id_desc' do
+ let_it_be(:earlier_token_2) { create(:personal_access_token, expires_at: 2.days.ago) }
+
+ it 'returns ordered list in combination of expires_at ascending and id descending' do
+ expect(described_class.order_expires_at_asc_id_desc).to eq [earlier_token_2, earlier_token, later_token]
+ end
+ end
end
end
diff --git a/spec/models/postgresql/replication_slot_spec.rb b/spec/models/postgresql/replication_slot_spec.rb
index 63a19541ab5..35c166ab064 100644
--- a/spec/models/postgresql/replication_slot_spec.rb
+++ b/spec/models/postgresql/replication_slot_spec.rb
@@ -116,7 +116,7 @@ RSpec.describe Postgresql::ReplicationSlot do
describe '#slots_retained_bytes' do
it 'returns the number of retained bytes' do
- slot = described_class.slots_retained_bytes.find {|x| x['slot_name'] == 'test_slot' }
+ slot = described_class.slots_retained_bytes.find { |x| x['slot_name'] == 'test_slot' }
expect(slot).not_to be_nil
expect(slot['retained_bytes']).to be_nil
diff --git a/spec/models/preloaders/labels_preloader_spec.rb b/spec/models/preloaders/labels_preloader_spec.rb
index 94de00bb94c..86e64d114c7 100644
--- a/spec/models/preloaders/labels_preloader_spec.rb
+++ b/spec/models/preloaders/labels_preloader_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Preloaders::LabelsPreloader do
let_it_be(:user) { create(:user) }
shared_examples 'an efficient database query' do
- let(:subscriptions) { labels.each { |l| create(:subscription, subscribable: l, project: l.project, user: user) }}
+ let(:subscriptions) { labels.each { |l| create(:subscription, subscribable: l, project: l.project, user: user) } }
it 'does not make n+1 queries' do
first_label = labels_with_preloaded_data.first
diff --git a/spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb b/spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb
index 2060e6cd44a..5e2aaa8b456 100644
--- a/spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb
+++ b/spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Preloaders::UserMaxAccessLevelInGroupsPreloader do
context 'when the preloader is used', :request_store do
context 'when user has indirect access to groups' do
- let_it_be(:child_maintainer) { create(:group, :private, parent: group1).tap {|g| g.add_maintainer(user)} }
+ let_it_be(:child_maintainer) { create(:group, :private, parent: group1).tap { |g| g.add_maintainer(user) } }
let_it_be(:child_indirect_access) { create(:group, :private, parent: group1) }
let(:groups) { [group1, group2, group3, child_maintainer, child_indirect_access] }
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index f46a1646554..98b202299a8 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe Project, factory_default: :keep do
include ProjectForksHelper
- include GitHelpers
include ExternalAuthorizationServiceHelpers
include ReloadHelpers
include StubGitlabCalls
@@ -45,6 +44,7 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_one(:mattermost_integration) }
it { is_expected.to have_one(:hangouts_chat_integration) }
it { is_expected.to have_one(:unify_circuit_integration) }
+ it { is_expected.to have_one(:pumble_integration) }
it { is_expected.to have_one(:webex_teams_integration) }
it { is_expected.to have_one(:packagist_integration) }
it { is_expected.to have_one(:pushover_integration) }
@@ -148,6 +148,7 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:build_trace_chunks).through(:builds).dependent(:restrict_with_error) }
it { is_expected.to have_many(:secure_files).class_name('Ci::SecureFile').dependent(:restrict_with_error) }
it { is_expected.to have_one(:build_artifacts_size_refresh).class_name('Projects::BuildArtifactsSizeRefresh') }
+ it { is_expected.to have_many(:project_callouts).class_name('Users::ProjectCallout').with_foreign_key(:project_id) }
# GitLab Pages
it { is_expected.to have_many(:pages_domains) }
@@ -832,6 +833,9 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to delegate_method(:last_pipeline).to(:commit).allow_nil }
it { is_expected.to delegate_method(:container_registry_enabled?).to(:project_feature) }
it { is_expected.to delegate_method(:container_registry_access_level).to(:project_feature) }
+ it { is_expected.to delegate_method(:environments_access_level).to(:project_feature) }
+ it { is_expected.to delegate_method(:feature_flags_access_level).to(:project_feature) }
+ it { is_expected.to delegate_method(:releases_access_level).to(:project_feature) }
describe 'read project settings' do
%i(
@@ -873,6 +877,12 @@ RSpec.describe Project, factory_default: :keep do
end
end
+ describe '#ci_allow_fork_pipelines_to_run_in_parent_project?' do
+ it_behaves_like 'a ci_cd_settings predicate method', prefix: 'ci_' do
+ let(:delegated_method) { :allow_fork_pipelines_to_run_in_parent_project? }
+ end
+ end
+
describe '#ci_job_token_scope_enabled?' do
it_behaves_like 'a ci_cd_settings predicate method', prefix: 'ci_' do
let(:delegated_method) { :job_token_scope_enabled? }
@@ -5741,16 +5751,18 @@ RSpec.describe Project, factory_default: :keep do
describe '#set_full_path' do
let_it_be(:project) { create(:project, :repository) }
+ let(:repository) { project.repository.raw }
+
it 'writes full path in .git/config when key is missing' do
project.set_full_path
- expect(rugged_config['gitlab.fullpath']).to eq project.full_path
+ expect(repository.full_path).to eq project.full_path
end
it 'updates full path in .git/config when key is present' do
project.set_full_path(gl_full_path: 'old/path')
- expect { project.set_full_path }.to change { rugged_config['gitlab.fullpath'] }.from('old/path').to(project.full_path)
+ expect { project.set_full_path }.to change { repository.full_path }.from('old/path').to(project.full_path)
end
it 'does not raise an error with an empty repository' do
@@ -5880,7 +5892,7 @@ RSpec.describe Project, factory_default: :keep do
end
describe '#has_active_hooks?' do
- let_it_be(:project) { create(:project) }
+ let_it_be_with_refind(:project) { create(:project) }
it { expect(project.has_active_hooks?).to be_falsey }
@@ -7471,7 +7483,7 @@ RSpec.describe Project, factory_default: :keep do
end
with_them do
- it { is_expected.to eq expected_result}
+ it { is_expected.to eq expected_result }
end
end
@@ -7488,7 +7500,7 @@ RSpec.describe Project, factory_default: :keep do
end
with_them do
- it { is_expected.to eq expected_result}
+ it { is_expected.to eq expected_result }
end
context 'for a different package type' do
@@ -7511,7 +7523,7 @@ RSpec.describe Project, factory_default: :keep do
end
with_them do
- it { is_expected.to eq expected_result}
+ it { is_expected.to eq expected_result }
end
end
end
@@ -8240,58 +8252,52 @@ RSpec.describe Project, factory_default: :keep do
end
describe '#work_items_feature_flag_enabled?' do
- shared_examples 'project checking work_items feature flag' do
- context 'when work_items FF is disabled globally' do
- before do
- stub_feature_flags(work_items: false)
- end
+ let_it_be(:group_project) { create(:project, :in_subgroup) }
- it { is_expected.to be_falsey }
+ it_behaves_like 'checks parent group feature flag' do
+ let(:feature_flag_method) { :work_items_feature_flag_enabled? }
+ let(:feature_flag) { :work_items }
+ let(:subject_project) { group_project }
+ end
+
+ context 'when feature flag is enabled for the project' do
+ subject { subject_project.work_items_feature_flag_enabled? }
+
+ before do
+ stub_feature_flags(work_items: subject_project)
end
- context 'when work_items FF is enabled for the project' do
- before do
- stub_feature_flags(work_items: project)
- end
+ context 'when project belongs to a group' do
+ let(:subject_project) { group_project }
it { is_expected.to be_truthy }
end
- context 'when work_items FF is enabled globally' do
+ context 'when project does not belong to a group' do
+ let(:subject_project) { create(:project, namespace: create(:namespace)) }
+
it { is_expected.to be_truthy }
end
end
+ end
- subject { project.work_items_feature_flag_enabled? }
-
- context 'when a project does not belong to a group' do
- let_it_be(:project) { create(:project, namespace: namespace) }
+ describe '#work_items_mvc_2_feature_flag_enabled?' do
+ let_it_be(:group_project) { create(:project, :in_subgroup) }
- it_behaves_like 'project checking work_items feature flag'
+ it_behaves_like 'checks parent group feature flag' do
+ let(:feature_flag_method) { :work_items_mvc_2_feature_flag_enabled? }
+ let(:feature_flag) { :work_items_mvc_2 }
+ let(:subject_project) { group_project }
end
+ end
- context 'when project belongs to a group' do
- let_it_be(:root_group) { create(:group) }
- let_it_be(:group) { create(:group, parent: root_group) }
- let_it_be(:project) { create(:project, group: group) }
-
- it_behaves_like 'project checking work_items feature flag'
-
- context 'when work_items FF is enabled for the root group' do
- before do
- stub_feature_flags(work_items: root_group)
- end
-
- it { is_expected.to be_truthy }
- end
+ describe '#work_items_create_from_markdown_feature_flag_enabled?' do
+ let_it_be(:group_project) { create(:project, :in_subgroup) }
- context 'when work_items FF is enabled for the group' do
- before do
- stub_feature_flags(work_items: group)
- end
-
- it { is_expected.to be_truthy }
- end
+ it_behaves_like 'checks parent group feature flag' do
+ let(:feature_flag_method) { :work_items_create_from_markdown_feature_flag_enabled? }
+ let(:feature_flag) { :work_items_create_from_markdown }
+ let(:subject_project) { group_project }
end
end
@@ -8428,6 +8434,23 @@ RSpec.describe Project, factory_default: :keep do
end
end
+ describe '#destroy_deployment_by_id' do
+ let(:project) { create(:project, :repository) }
+
+ let!(:deployment) { create(:deployment, :created, project: project) }
+ let!(:old_deployment) { create(:deployment, :created, project: project, finished_at: 1.year.ago) }
+
+ it 'will call fast_destroy_all on a specific deployment by id' do
+ expect(Deployment).to receive(:fast_destroy_all).and_call_original
+
+ expect do
+ project.destroy_deployment_by_id(project.deployments.first.id)
+ end.to change { project.deployments.count }.by(-1)
+
+ expect(project.deployments).to match_array([old_deployment])
+ end
+ end
+
private
def finish_job(export_job)
@@ -8435,10 +8458,6 @@ RSpec.describe Project, factory_default: :keep do
export_job.finish
end
- def rugged_config
- rugged_repo(project.repository).config
- end
-
def create_pipeline(project, status = 'success')
create(:ci_pipeline, project: project,
sha: project.commit.sha,
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index 53175a2f840..f4edc68457b 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -26,31 +26,20 @@ RSpec.describe ProjectStatistics do
end
describe 'statistics columns' do
- it "support values up to 8 exabytes" do
- statistics.update!(
- commit_count: 8.exabytes - 1,
- repository_size: 2.exabytes,
- wiki_size: 1.exabytes,
- lfs_objects_size: 2.exabytes,
- build_artifacts_size: 1.exabyte,
- snippets_size: 1.exabyte,
- pipeline_artifacts_size: 512.petabytes - 1,
- uploads_size: 512.petabytes,
- container_registry_size: 12.petabytes
- )
-
- statistics.reload
-
- expect(statistics.commit_count).to eq(8.exabytes - 1)
- expect(statistics.repository_size).to eq(2.exabytes)
- expect(statistics.wiki_size).to eq(1.exabytes)
- expect(statistics.lfs_objects_size).to eq(2.exabytes)
- expect(statistics.build_artifacts_size).to eq(1.exabyte)
- expect(statistics.storage_size).to eq(8.exabytes - 1)
- expect(statistics.snippets_size).to eq(1.exabyte)
- expect(statistics.pipeline_artifacts_size).to eq(512.petabytes - 1)
- expect(statistics.uploads_size).to eq(512.petabytes)
- expect(statistics.container_registry_size).to eq(12.petabytes)
+ it "supports bigint values" do
+ expect do
+ statistics.update!(
+ commit_count: 3.gigabytes,
+ repository_size: 3.gigabytes,
+ wiki_size: 3.gigabytes,
+ lfs_objects_size: 3.gigabytes,
+ build_artifacts_size: 3.gigabytes,
+ snippets_size: 3.gigabytes,
+ pipeline_artifacts_size: 3.gigabytes,
+ uploads_size: 3.gigabytes,
+ container_registry_size: 3.gigabytes
+ )
+ end.not_to raise_error
end
end
diff --git a/spec/models/projects/import_export/relation_export_spec.rb b/spec/models/projects/import_export/relation_export_spec.rb
index c74ca82e161..8643fbc7b46 100644
--- a/spec/models/projects/import_export/relation_export_spec.rb
+++ b/spec/models/projects/import_export/relation_export_spec.rb
@@ -20,4 +20,36 @@ RSpec.describe Projects::ImportExport::RelationExport, type: :model do
it { is_expected.to validate_length_of(:jid).is_at_most(255) }
it { is_expected.to validate_length_of(:export_error).is_at_most(300) }
end
+
+ describe '.by_relation' do
+ it 'returns export relations filtered by relation name' do
+ project_relation_export_1 = create(:project_relation_export, relation: 'labels')
+ project_relation_export_2 = create(:project_relation_export, relation: 'labels')
+ create(:project_relation_export, relation: 'uploads')
+
+ relations = described_class.by_relation('labels').to_a
+
+ expect(relations).to match_array([project_relation_export_1, project_relation_export_2])
+ end
+ end
+
+ describe '.relation_names_list' do
+ it 'includes extra relations list' do
+ expect(described_class.relation_names_list).to include(
+ 'design_repository', 'lfs_objects', 'repository', 'snippets_repository', 'uploads', 'wiki_repository'
+ )
+ end
+
+ it 'includes root tree relation name project' do
+ expect(described_class.relation_names_list).to include('project')
+ end
+
+ it 'includes project tree top level relation nodes' do
+ expect(described_class.relation_names_list).to include('milestones', 'issues', 'snippets', 'releases')
+ end
+
+ it 'includes project tree nested relation nodes' do
+ expect(described_class.relation_names_list).not_to include('events', 'notes')
+ end
+ end
end
diff --git a/spec/models/projects/topic_spec.rb b/spec/models/projects/topic_spec.rb
index fc9d9bef437..f9659ef352c 100644
--- a/spec/models/projects/topic_spec.rb
+++ b/spec/models/projects/topic_spec.rb
@@ -30,6 +30,17 @@ RSpec.describe Projects::Topic do
end
describe 'scopes' do
+ describe 'without_assigned_projects' do
+ let_it_be(:unassigned_topic) { create(:topic, name: 'unassigned topic') }
+ let_it_be(:project) { create(:project, :public, topic_list: 'topic') }
+
+ it 'returns topics without assigned projects' do
+ topics = described_class.without_assigned_projects
+
+ expect(topics).to contain_exactly(unassigned_topic)
+ end
+ end
+
describe 'order_by_non_private_projects_count' do
let!(:topic1) { create(:topic, name: 'topicB') }
let!(:topic2) { create(:topic, name: 'topicC') }
diff --git a/spec/models/protected_branch_spec.rb b/spec/models/protected_branch_spec.rb
index a3fc09b31fb..3936e7127b8 100644
--- a/spec/models/protected_branch_spec.rb
+++ b/spec/models/protected_branch_spec.rb
@@ -167,36 +167,130 @@ RSpec.describe ProtectedBranch do
expect(described_class.protected?(project, nil)).to eq(false)
end
- context 'with caching', :use_clean_rails_memory_store_caching do
+ context 'with caching', :use_clean_rails_redis_caching do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:protected_branch) { create(:protected_branch, project: project, name: "“jawn”") }
+ let(:feature_flag) { true }
+ let(:dry_run) { true }
+
+ shared_examples_for 'hash based cache implementation' do
+ it 'calls only hash based cache implementation' do
+ expect_next_instance_of(ProtectedBranches::CacheService) do |instance|
+ expect(instance).to receive(:fetch).with('missing-branch', anything).and_call_original
+ end
+
+ expect(Rails.cache).not_to receive(:fetch)
+
+ described_class.protected?(project, 'missing-branch', dry_run: dry_run)
+ end
+ end
+
before do
- allow(described_class).to receive(:matching).with(protected_branch.name, protected_refs: anything).once.and_call_original
+ stub_feature_flags(hash_based_cache_for_protected_branches: feature_flag)
+ allow(described_class).to receive(:matching).and_call_original
# the original call works and warms the cache
- described_class.protected?(project, protected_branch.name)
+ described_class.protected?(project, protected_branch.name, dry_run: dry_run)
end
- it 'correctly invalidates a cache' do
- expect(described_class).to receive(:matching).with(protected_branch.name, protected_refs: anything).once.and_call_original
+ context 'Dry-run: true' do
+ it 'recalculates a fresh value every time in order to check the cache is not returning stale data' do
+ expect(described_class).to receive(:matching).with(protected_branch.name, protected_refs: anything).twice
+
+ 2.times { described_class.protected?(project, protected_branch.name) }
+ end
- create(:protected_branch, project: project, name: "bar")
- # the cache is invalidated because the project has been "updated"
- expect(described_class.protected?(project, protected_branch.name)).to eq(true)
+ it_behaves_like 'hash based cache implementation'
end
- it 'correctly uses the cached version' do
- expect(described_class).not_to receive(:matching)
- expect(described_class.protected?(project, protected_branch.name)).to eq(true)
+ context 'Dry-run: false' do
+ let(:dry_run) { false }
+
+ it 'correctly invalidates a cache' do
+ expect(described_class).to receive(:matching).with(protected_branch.name, protected_refs: anything).exactly(3).times.and_call_original
+
+ create_params = { name: 'bar', merge_access_levels_attributes: [{ access_level: Gitlab::Access::DEVELOPER }] }
+ branch = ProtectedBranches::CreateService.new(project, project.owner, create_params).execute
+ expect(described_class.protected?(project, protected_branch.name, dry_run: dry_run)).to eq(true)
+
+ ProtectedBranches::UpdateService.new(project, project.owner, name: 'ber').execute(branch)
+ expect(described_class.protected?(project, protected_branch.name, dry_run: dry_run)).to eq(true)
+
+ ProtectedBranches::DestroyService.new(project, project.owner).execute(branch)
+ expect(described_class.protected?(project, protected_branch.name, dry_run: dry_run)).to eq(true)
+ end
+
+ it_behaves_like 'hash based cache implementation'
+
+ context 'when project is updated' do
+ it 'does not invalidate a cache' do
+ expect(described_class).not_to receive(:matching).with(protected_branch.name, protected_refs: anything)
+
+ project.touch
+
+ described_class.protected?(project, protected_branch.name, dry_run: dry_run)
+ end
+ end
+
+ context 'when other project protected branch is updated' do
+ it 'does not invalidate the current project cache' do
+ expect(described_class).not_to receive(:matching).with(protected_branch.name, protected_refs: anything)
+
+ another_project = create(:project)
+ ProtectedBranches::CreateService.new(another_project, another_project.owner, name: 'bar').execute
+
+ described_class.protected?(project, protected_branch.name, dry_run: dry_run)
+ end
+ end
+
+ it 'correctly uses the cached version' do
+ expect(described_class).not_to receive(:matching)
+
+ expect(described_class.protected?(project, protected_branch.name, dry_run: dry_run)).to eq(true)
+ end
end
- it 'sets expires_in for a cache key' do
- cache_key = described_class.protected_ref_cache_key(project, protected_branch.name)
+ context 'when feature flag hash_based_cache_for_protected_branches is off' do
+ let(:feature_flag) { false }
- expect(Rails.cache).to receive(:fetch).with(cache_key, expires_in: 1.hour)
+ it 'does not call hash based cache implementation' do
+ expect(ProtectedBranches::CacheService).not_to receive(:new)
+ expect(Rails.cache).to receive(:fetch).and_call_original
+
+ described_class.protected?(project, 'missing-branch')
+ end
+
+ it 'correctly invalidates a cache' do
+ expect(described_class).to receive(:matching).with(protected_branch.name, protected_refs: anything).once.and_call_original
+
+ create(:protected_branch, project: project, name: "bar")
+ # the cache is invalidated because the project has been "updated"
+ expect(described_class.protected?(project, protected_branch.name)).to eq(true)
+ end
- described_class.protected?(project, protected_branch.name)
+ it 'sets expires_in of 1 hour for the Rails cache key' do
+ cache_key = described_class.protected_ref_cache_key(project, protected_branch.name)
+
+ expect(Rails.cache).to receive(:fetch).with(cache_key, expires_in: 1.hour)
+
+ described_class.protected?(project, protected_branch.name)
+ end
+
+ context 'when project is updated' do
+ it 'invalidates Rails cache' do
+ expect(described_class).to receive(:matching).with(protected_branch.name, protected_refs: anything).once.and_call_original
+
+ project.touch
+
+ described_class.protected?(project, protected_branch.name)
+ end
+ end
+
+ it 'correctly uses the cached version' do
+ expect(described_class).not_to receive(:matching)
+ expect(described_class.protected?(project, protected_branch.name)).to eq(true)
+ end
end
end
end
diff --git a/spec/models/release_highlight_spec.rb b/spec/models/release_highlight_spec.rb
index 14a43df4229..3555dfba769 100644
--- a/spec/models/release_highlight_spec.rb
+++ b/spec/models/release_highlight_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache do
let(:page) { 3 }
it 'responds with paginated results' do
- expect(subject[:items].first['title']).to eq('bright')
+ expect(subject[:items].first['name']).to eq('bright')
expect(subject[:next_page]).to eq(4)
end
end
@@ -37,7 +37,7 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache do
let(:page) { 4 }
it 'responds with paginated results and no next_page' do
- expect(subject[:items].first['title']).to eq("It's gonna be a bright")
+ expect(subject[:items].first['name']).to eq("It's gonna be a bright")
expect(subject[:next_page]).to eq(nil)
end
end
@@ -63,12 +63,12 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache do
it 'returns platform specific items' do
expect(subject[:items].count).to eq(1)
- expect(subject[:items].first['title']).to eq("bright and sunshinin' day")
+ expect(subject[:items].first['name']).to eq("bright and sunshinin' day")
expect(subject[:next_page]).to eq(2)
end
- it 'parses the body as markdown and returns html, and links are target="_blank"' do
- expect(subject[:items].first['body']).to match('<p data-sourcepos="1:1-1:62" dir="auto">bright and sunshinin\' <a href="https://en.wikipedia.org/wiki/Day" rel="nofollow noreferrer noopener" target="_blank">day</a></p>')
+ it 'parses the description as markdown and returns html, and links are target="_blank"' do
+ expect(subject[:items].first['description']).to match('<p data-sourcepos="1:1-1:62" dir="auto">bright and sunshinin\' <a href="https://en.wikipedia.org/wiki/Day" rel="nofollow noreferrer noopener" target="_blank">day</a></p>')
end
it 'logs an error if theres an error parsing markdown for an item, and skips it' do
@@ -83,7 +83,7 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache do
it 'responds with a different set of data' do
expect(subject[:items].count).to eq(1)
- expect(subject[:items].first['title']).to eq("I think I can make it now the pain is gone")
+ expect(subject[:items].first['name']).to eq("I think I can make it now the pain is gone")
end
end
@@ -171,7 +171,7 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache do
items = described_class.load_items(page: 2)
expect(items.count).to eq(1)
- expect(items.first['title']).to eq("View epics on a board")
+ expect(items.first['name']).to eq("View epics on a board")
end
end
end
diff --git a/spec/models/release_spec.rb b/spec/models/release_spec.rb
index 83d7596ff51..180a76ff593 100644
--- a/spec/models/release_spec.rb
+++ b/spec/models/release_spec.rb
@@ -233,6 +233,6 @@ RSpec.describe Release do
let_it_be(:milestone_2) { create(:milestone, project: project, title: 'Milestone 2') }
let_it_be(:release) { create(:release, project: project, milestones: [milestone_1, milestone_2]) }
- it { expect(release.milestone_titles).to eq("#{milestone_1.title}, #{milestone_2.title}")}
+ it { expect(release.milestone_titles).to eq("#{milestone_1.title}, #{milestone_2.title}") }
end
end
diff --git a/spec/models/releases/link_spec.rb b/spec/models/releases/link_spec.rb
index 74ef38f482b..4910de61c22 100644
--- a/spec/models/releases/link_spec.rb
+++ b/spec/models/releases/link_spec.rb
@@ -127,7 +127,7 @@ RSpec.describe Releases::Link do
describe 'FILEPATH_REGEX with table' do
using RSpec::Parameterized::TableSyntax
- let(:link) { build(:release_link)}
+ let(:link) { build(:release_link) }
where(:reason, :filepath, :result) do
'cannot contain `//`' | '/https//www.example.com' | be_invalid
diff --git a/spec/models/remote_mirror_spec.rb b/spec/models/remote_mirror_spec.rb
index 51351c9fdd1..429ad550626 100644
--- a/spec/models/remote_mirror_spec.rb
+++ b/spec/models/remote_mirror_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe RemoteMirror, :mailer do
- include GitHelpers
-
before do
stub_feature_flags(remote_mirror_no_delay: false)
end
@@ -96,16 +94,6 @@ RSpec.describe RemoteMirror, :mailer do
expect(mirror.url).to eq('http://foo:bar@test.com')
expect(mirror.credentials).to eq({ user: 'foo', password: 'bar' })
end
-
- it 'does not update the repository config if credentials changed' do
- mirror = create_mirror(url: 'http://foo:bar@test.com')
- repo = mirror.project.repository
- old_config = rugged_repo(repo).config
-
- mirror.update_attribute(:url, 'http://foo:baz@test.com')
-
- expect(rugged_repo(repo).config.to_hash).to eq(old_config.to_hash)
- end
end
end
@@ -231,7 +219,7 @@ RSpec.describe RemoteMirror, :mailer do
end
describe '#hard_retry!' do
- let(:remote_mirror) { create(:remote_mirror).tap {|mirror| mirror.update_column(:url, 'invalid') } }
+ let(:remote_mirror) { create(:remote_mirror).tap { |mirror| mirror.update_column(:url, 'invalid') } }
it 'transitions an invalid mirror to the to_retry state' do
remote_mirror.hard_retry!('Invalid')
@@ -242,7 +230,7 @@ RSpec.describe RemoteMirror, :mailer do
end
describe '#hard_fail!' do
- let(:remote_mirror) { create(:remote_mirror).tap {|mirror| mirror.update_column(:url, 'invalid') } }
+ let(:remote_mirror) { create(:remote_mirror).tap { |mirror| mirror.update_column(:url, 'invalid') } }
it 'transitions an invalid mirror to the failed state' do
remote_mirror.hard_fail!('Invalid')
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index b3fbe75a526..530b03714b4 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -1469,6 +1469,20 @@ RSpec.describe Repository do
expect(repository.find_branch(branch_name)).to be_nil
end
end
+
+ it 'expires branches cache' do
+ expect(repository).to receive(:expire_branches_cache)
+
+ subject
+ end
+
+ context 'when expire_cache: false' do
+ it 'does not expire branches cache' do
+ expect(repository).not_to receive(:expire_branches_cache)
+
+ repository.add_branch(user, branch_name, target, expire_cache: false)
+ end
+ end
end
shared_examples 'asymmetric cached method' do |method|
@@ -2263,10 +2277,34 @@ RSpec.describe Repository do
.with(%i(branch_names merged_branch_names branch_count has_visible_content? has_ambiguous_refs?))
.and_call_original
+ expect_next_instance_of(ProtectedBranches::CacheService) do |cache_service|
+ expect(cache_service).to receive(:refresh)
+ end
+
repository.expire_branches_cache
end
end
+ describe '#expire_protected_branches_cache' do
+ it 'expires the cache' do
+ expect_next_instance_of(ProtectedBranches::CacheService) do |cache_service|
+ expect(cache_service).to receive(:refresh)
+ end
+
+ repository.expire_protected_branches_cache
+ end
+
+ context 'when repository does not have a project' do
+ let!(:snippet) { create(:personal_snippet, :repository) }
+
+ it 'does not expire the cache' do
+ expect(ProtectedBranches::CacheService).not_to receive(:new)
+
+ snippet.repository.expire_protected_branches_cache
+ end
+ end
+ end
+
describe '#expire_tags_cache' do
it 'expires the cache' do
expect(repository).to receive(:expire_method_caches)
@@ -3123,7 +3161,7 @@ RSpec.describe Repository do
it 'after_create is not executed' do
expect(repository).not_to receive(:after_create)
- expect {repository.create_from_bundle(valid_bundle_path)}.to raise_error(::Gitlab::Git::BundleFile::InvalidBundleError)
+ expect { repository.create_from_bundle(valid_bundle_path) }.to raise_error(::Gitlab::Git::BundleFile::InvalidBundleError)
end
end
end
diff --git a/spec/models/snippet_input_action_collection_spec.rb b/spec/models/snippet_input_action_collection_spec.rb
index 3ec206bd031..269a9e1c787 100644
--- a/spec/models/snippet_input_action_collection_spec.rb
+++ b/spec/models/snippet_input_action_collection_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe SnippetInputActionCollection do
it { is_expected.to delegate_method(:[]).to(:actions) }
describe '#to_commit_actions' do
- subject { described_class.new(data).to_commit_actions}
+ subject { described_class.new(data).to_commit_actions }
it 'translates all actions to commit actions' do
transformed_action = action.merge(action: action_name.to_sym)
@@ -22,14 +22,14 @@ RSpec.describe SnippetInputActionCollection do
end
describe '#valid?' do
- subject { described_class.new(data).valid?}
+ subject { described_class.new(data).valid? }
it 'returns true' do
expect(subject).to be true
end
context 'when any of the actions is invalid' do
- let(:data) { [action, { action: 'foo' }, action]}
+ let(:data) { [action, { action: 'foo' }, action] }
it 'returns false' do
expect(subject).to be false
diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb
index a54edc8510e..38bd189f6f4 100644
--- a/spec/models/snippet_spec.rb
+++ b/spec/models/snippet_spec.rb
@@ -571,8 +571,8 @@ RSpec.describe Snippet do
context 'when some blobs are not retrievable from repository' do
let(:snippet) { create(:snippet, :repository) }
let(:container) { double(:container) }
- let(:retrievable_filename) { 'retrievable_file'}
- let(:unretrievable_filename) { 'unretrievable_file'}
+ let(:retrievable_filename) { 'retrievable_file' }
+ let(:unretrievable_filename) { 'unretrievable_file' }
before do
allow(snippet).to receive(:list_files).and_return([retrievable_filename, unretrievable_filename])
diff --git a/spec/models/u2f_registration_spec.rb b/spec/models/u2f_registration_spec.rb
index 6bb9ccfcf35..1fab3882c2a 100644
--- a/spec/models/u2f_registration_spec.rb
+++ b/spec/models/u2f_registration_spec.rb
@@ -6,23 +6,68 @@ RSpec.describe U2fRegistration do
let_it_be(:user) { create(:user) }
let(:u2f_registration_name) { 'u2f_device' }
+ let(:app_id) { FFaker::BaconIpsum.characters(5) }
+ let(:device) { U2F::FakeU2F.new(app_id) }
- let(:u2f_registration) do
- device = U2F::FakeU2F.new(FFaker::BaconIpsum.characters(5))
- create(:u2f_registration, name: u2f_registration_name,
- user: user,
- certificate: Base64.strict_encode64(device.cert_raw),
- key_handle: U2F.urlsafe_encode64(device.key_handle_raw),
- public_key: Base64.strict_encode64(device.origin_public_key_raw))
+ describe '.authenticate' do
+ context 'when registration is found' do
+ it 'returns true' do
+ create_u2f_registration
+ device_challenge = U2F.urlsafe_encode64(SecureRandom.random_bytes(32))
+ sign_response_json = device.sign_response(device_challenge)
+
+ response = U2fRegistration.authenticate(
+ user,
+ app_id,
+ sign_response_json,
+ device_challenge
+ )
+
+ expect(response).to eq true
+ end
+ end
+
+ context 'when registration not found' do
+ it 'returns nil' do
+ device_challenge = U2F.urlsafe_encode64(SecureRandom.random_bytes(32))
+ sign_response_json = device.sign_response(device_challenge)
+
+ # data is valid but user does not have any u2f_registrations
+ response = U2fRegistration.authenticate(
+ user,
+ app_id,
+ sign_response_json,
+ device_challenge
+ )
+
+ expect(response).to eq nil
+ end
+ end
+
+ context 'when args passed in are invalid' do
+ it 'returns false' do
+ some_app_id = 123
+ invalid_json = 'invalid JSON'
+ challenges = 'whatever'
+
+ response = U2fRegistration.authenticate(
+ user,
+ some_app_id,
+ invalid_json,
+ challenges
+ )
+
+ expect(response).to eq false
+ end
+ end
end
describe 'callbacks' do
- describe '#create_webauthn_registration' do
+ describe 'after create' do
shared_examples_for 'creates webauthn registration' do
it 'creates webauthn registration' do
- created_record = u2f_registration
-
- webauthn_registration = WebauthnRegistration.where(u2f_registration_id: created_record.id)
+ u2f_registration = create_u2f_registration
+ webauthn_registration = WebauthnRegistration.where(u2f_registration_id: u2f_registration.id)
expect(webauthn_registration).to exist
end
end
@@ -52,8 +97,45 @@ RSpec.describe U2fRegistration do
receive(:track_exception).with(kind_of(StandardError),
u2f_registration_id: 123))
- u2f_registration
+ create_u2f_registration
end
end
+
+ describe 'after update' do
+ context 'when counter is updated' do
+ it 'updates the webauthn registration counter to be the same value' do
+ u2f_registration = create_u2f_registration
+ new_counter = u2f_registration.counter + 1
+ webauthn_registration = WebauthnRegistration.find_by(u2f_registration_id: u2f_registration.id)
+
+ u2f_registration.update!(counter: new_counter)
+
+ expect(u2f_registration.reload.counter).to eq(new_counter)
+ expect(webauthn_registration.reload.counter).to eq(new_counter)
+ end
+ end
+
+ context 'when sign count of registration is not updated' do
+ it 'does not update the counter' do
+ u2f_registration = create_u2f_registration
+ webauthn_registration = WebauthnRegistration.find_by(u2f_registration_id: u2f_registration.id)
+
+ expect do
+ u2f_registration.update!(name: 'a new name')
+ end.not_to change { webauthn_registration.counter }
+ end
+ end
+ end
+ end
+
+ def create_u2f_registration
+ create(
+ :u2f_registration,
+ name: u2f_registration_name,
+ user: user,
+ certificate: Base64.strict_encode64(device.cert_raw),
+ key_handle: U2F.urlsafe_encode64(device.key_handle_raw),
+ public_key: Base64.strict_encode64(device.origin_public_key_raw)
+ )
end
end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index ae6ebdbc6fd..69cd51137b5 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -137,6 +137,7 @@ RSpec.describe User do
it { is_expected.to have_many(:callouts).class_name('Users::Callout') }
it { is_expected.to have_many(:group_callouts).class_name('Users::GroupCallout') }
it { is_expected.to have_many(:namespace_callouts).class_name('Users::NamespaceCallout') }
+ it { is_expected.to have_many(:project_callouts).class_name('Users::ProjectCallout') }
describe '#user_detail' do
it 'does not persist `user_detail` by default' do
@@ -1082,20 +1083,6 @@ RSpec.describe User do
end
end
- describe '.by_id_and_login' do
- let_it_be(:user) { create(:user) }
-
- it 'finds a user regardless of case' do
- expect(described_class.by_id_and_login(user.id, user.username.upcase))
- .to contain_exactly(user)
- end
-
- it 'finds a user when login is an email address regardless of case' do
- expect(described_class.by_id_and_login(user.id, user.email.upcase))
- .to contain_exactly(user)
- end
- end
-
describe '.for_todos' do
let_it_be(:user1) { create(:user) }
let_it_be(:user2) { create(:user) }
@@ -1792,9 +1779,10 @@ RSpec.describe User do
describe '#generate_password' do
it 'does not generate password by default' do
- user = create(:user, password: 'abcdefghe')
+ password = User.random_password
+ user = create(:user, password: password)
- expect(user.password).to eq('abcdefghe')
+ expect(user.password).to eq(password)
end
end
@@ -2831,162 +2819,144 @@ RSpec.describe User do
end
end
- shared_examples '.search examples' do
- describe '.search' do
- let_it_be(:user) { create(:user, name: 'user', username: 'usern', email: 'email@example.com') }
- let_it_be(:public_email) do
- create(:email, :confirmed, user: user, email: 'publicemail@example.com').tap do |email|
- user.update!(public_email: email.email)
- end
+ describe '.search' do
+ let_it_be(:user) { create(:user, name: 'user', username: 'usern', email: 'email@example.com') }
+ let_it_be(:public_email) do
+ create(:email, :confirmed, user: user, email: 'publicemail@example.com').tap do |email|
+ user.update!(public_email: email.email)
end
+ end
- let_it_be(:user2) { create(:user, name: 'user name', username: 'username', email: 'someemail@example.com') }
- let_it_be(:user3) { create(:user, name: 'us', username: 'se', email: 'foo@example.com') }
- let_it_be(:unconfirmed_user) { create(:user, :unconfirmed, name: 'not verified', username: 'notverified') }
-
- let_it_be(:unconfirmed_secondary_email) { create(:email, user: user, email: 'alias@example.com') }
- let_it_be(:confirmed_secondary_email) { create(:email, :confirmed, user: user, email: 'alias2@example.com') }
+ let_it_be(:user2) { create(:user, name: 'user name', username: 'username', email: 'someemail@example.com') }
+ let_it_be(:user3) { create(:user, name: 'us', username: 'se', email: 'foo@example.com') }
+ let_it_be(:unconfirmed_user) { create(:user, :unconfirmed, name: 'not verified', username: 'notverified') }
- describe 'name user and email relative ordering' do
- let_it_be(:named_alexander) { create(:user, name: 'Alexander Person', username: 'abcd', email: 'abcd@example.com') }
- let_it_be(:username_alexand) { create(:user, name: 'Joao Alexander', username: 'Alexand', email: 'joao@example.com') }
+ let_it_be(:unconfirmed_secondary_email) { create(:email, user: user, email: 'alias@example.com') }
+ let_it_be(:confirmed_secondary_email) { create(:email, :confirmed, user: user, email: 'alias2@example.com') }
- it 'prioritizes exact matches' do
- expect(described_class.search('Alexand')).to eq([username_alexand, named_alexander])
- end
+ describe 'name user and email relative ordering' do
+ let_it_be(:named_alexander) { create(:user, name: 'Alexander Person', username: 'abcd', email: 'abcd@example.com') }
+ let_it_be(:username_alexand) { create(:user, name: 'Joao Alexander', username: 'Alexand', email: 'joao@example.com') }
- it 'falls back to ordering by name' do
- expect(described_class.search('Alexander')).to eq([named_alexander, username_alexand])
- end
+ it 'prioritizes exact matches' do
+ expect(described_class.search('Alexand')).to eq([username_alexand, named_alexander])
end
- describe 'name matching' do
- it 'returns users with a matching name with exact match first' do
- expect(described_class.search(user.name)).to eq([user, user2])
- end
-
- it 'returns users with a partially matching name' do
- expect(described_class.search(user.name[0..2])).to eq([user, user2])
- end
-
- it 'returns users with a matching name regardless of the casing' do
- expect(described_class.search(user2.name.upcase)).to eq([user2])
- end
+ it 'falls back to ordering by name' do
+ expect(described_class.search('Alexander')).to eq([named_alexander, username_alexand])
+ end
+ end
- it 'returns users with a exact matching name shorter than 3 chars' do
- expect(described_class.search(user3.name)).to eq([user3])
- end
+ describe 'name matching' do
+ it 'returns users with a matching name with exact match first' do
+ expect(described_class.search(user.name)).to eq([user, user2])
+ end
- it 'returns users with a exact matching name shorter than 3 chars regardless of the casing' do
- expect(described_class.search(user3.name.upcase)).to eq([user3])
- end
+ it 'returns users with a partially matching name' do
+ expect(described_class.search(user.name[0..2])).to eq([user, user2])
+ end
- context 'when use_minimum_char_limit is false' do
- it 'returns users with a partially matching name' do
- expect(described_class.search('u', use_minimum_char_limit: false)).to eq([user3, user, user2])
- end
- end
+ it 'returns users with a matching name regardless of the casing' do
+ expect(described_class.search(user2.name.upcase)).to eq([user2])
end
- describe 'email matching' do
- it 'returns users with a matching public email' do
- expect(described_class.search(user.public_email)).to match_array([user])
- end
+ it 'returns users with a exact matching name shorter than 3 chars' do
+ expect(described_class.search(user3.name)).to eq([user3])
+ end
- it 'does not return users with a partially matching public email' do
- expect(described_class.search(user.public_email[1...-1])).to be_empty
- end
+ it 'returns users with a exact matching name shorter than 3 chars regardless of the casing' do
+ expect(described_class.search(user3.name.upcase)).to eq([user3])
+ end
- it 'returns users with a matching public email regardless of the casing' do
- expect(described_class.search(user.public_email.upcase)).to match_array([user])
+ context 'when use_minimum_char_limit is false' do
+ it 'returns users with a partially matching name' do
+ expect(described_class.search('u', use_minimum_char_limit: false)).to eq([user3, user, user2])
end
+ end
+ end
- it 'does not return users with a matching private email' do
- expect(described_class.search(user.email)).to be_empty
-
- expect(described_class.search(unconfirmed_secondary_email.email)).to be_empty
- expect(described_class.search(confirmed_secondary_email.email)).to be_empty
- end
+ describe 'email matching' do
+ it 'returns users with a matching public email' do
+ expect(described_class.search(user.public_email)).to match_array([user])
+ end
- context 'with private emails search' do
- it 'returns users with matching private primary email' do
- expect(described_class.search(user.email, with_private_emails: true)).to match_array([user])
- end
+ it 'does not return users with a partially matching public email' do
+ expect(described_class.search(user.public_email[1...-1])).to be_empty
+ end
- it 'returns users with matching private unconfirmed primary email' do
- expect(described_class.search(unconfirmed_user.email, with_private_emails: true)).to match_array([unconfirmed_user])
- end
+ it 'returns users with a matching public email regardless of the casing' do
+ expect(described_class.search(user.public_email.upcase)).to match_array([user])
+ end
- it 'returns users with matching private confirmed secondary email' do
- expect(described_class.search(confirmed_secondary_email.email, with_private_emails: true)).to match_array([user])
- end
+ it 'does not return users with a matching private email' do
+ expect(described_class.search(user.email)).to be_empty
- it 'does not return users with matching private unconfirmed secondary email' do
- expect(described_class.search(unconfirmed_secondary_email.email, with_private_emails: true)).to be_empty
- end
- end
+ expect(described_class.search(unconfirmed_secondary_email.email)).to be_empty
+ expect(described_class.search(confirmed_secondary_email.email)).to be_empty
end
- describe 'username matching' do
- it 'returns users with a matching username' do
- expect(described_class.search(user.username)).to eq([user, user2])
+ context 'with private emails search' do
+ it 'returns users with matching private primary email' do
+ expect(described_class.search(user.email, with_private_emails: true)).to match_array([user])
end
- it 'returns users with a matching username starting with a @' do
- expect(described_class.search("@#{user.username}")).to eq([user, user2])
+ it 'returns users with matching private unconfirmed primary email' do
+ expect(described_class.search(unconfirmed_user.email, with_private_emails: true)).to match_array([unconfirmed_user])
end
- it 'returns users with a partially matching username' do
- expect(described_class.search(user.username[0..2])).to eq([user, user2])
+ it 'returns users with matching private confirmed secondary email' do
+ expect(described_class.search(confirmed_secondary_email.email, with_private_emails: true)).to match_array([user])
end
- it 'returns users with a partially matching username starting with @' do
- expect(described_class.search("@#{user.username[0..2]}")).to eq([user, user2])
+ it 'does not return users with matching private unconfirmed secondary email' do
+ expect(described_class.search(unconfirmed_secondary_email.email, with_private_emails: true)).to be_empty
end
+ end
+ end
- it 'returns users with a matching username regardless of the casing' do
- expect(described_class.search(user2.username.upcase)).to eq([user2])
- end
+ describe 'username matching' do
+ it 'returns users with a matching username' do
+ expect(described_class.search(user.username)).to eq([user, user2])
+ end
- it 'returns users with a exact matching username shorter than 3 chars' do
- expect(described_class.search(user3.username)).to eq([user3])
- end
+ it 'returns users with a matching username starting with a @' do
+ expect(described_class.search("@#{user.username}")).to eq([user, user2])
+ end
- it 'returns users with a exact matching username shorter than 3 chars regardless of the casing' do
- expect(described_class.search(user3.username.upcase)).to eq([user3])
- end
+ it 'returns users with a partially matching username' do
+ expect(described_class.search(user.username[0..2])).to eq([user, user2])
+ end
- context 'when use_minimum_char_limit is false' do
- it 'returns users with a partially matching username' do
- expect(described_class.search('se', use_minimum_char_limit: false)).to eq([user3, user, user2])
- end
- end
+ it 'returns users with a partially matching username starting with @' do
+ expect(described_class.search("@#{user.username[0..2]}")).to eq([user, user2])
end
- it 'returns no matches for an empty string' do
- expect(described_class.search('')).to be_empty
+ it 'returns users with a matching username regardless of the casing' do
+ expect(described_class.search(user2.username.upcase)).to eq([user2])
end
- it 'returns no matches for nil' do
- expect(described_class.search(nil)).to be_empty
+ it 'returns users with a exact matching username shorter than 3 chars' do
+ expect(described_class.search(user3.username)).to eq([user3])
end
- end
- end
- context 'when the use_keyset_aware_user_search_query FF is on' do
- before do
- stub_feature_flags(use_keyset_aware_user_search_query: true)
- end
+ it 'returns users with a exact matching username shorter than 3 chars regardless of the casing' do
+ expect(described_class.search(user3.username.upcase)).to eq([user3])
+ end
- it_behaves_like '.search examples'
- end
+ context 'when use_minimum_char_limit is false' do
+ it 'returns users with a partially matching username' do
+ expect(described_class.search('se', use_minimum_char_limit: false)).to eq([user3, user, user2])
+ end
+ end
+ end
- context 'when the use_keyset_aware_user_search_query FF is off' do
- before do
- stub_feature_flags(use_keyset_aware_user_search_query: false)
+ it 'returns no matches for an empty string' do
+ expect(described_class.search('')).to be_empty
end
- it_behaves_like '.search examples'
+ it 'returns no matches for nil' do
+ expect(described_class.search(nil)).to be_empty
+ end
end
describe '.user_search_minimum_char_limit' do
@@ -3019,17 +2989,53 @@ RSpec.describe User do
end
end
+ shared_examples "find user by login" do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:invalid_login) { "#{user.username}-NOT-EXISTS" }
+
+ context 'when login is nil or empty' do
+ it 'returns nil' do
+ expect(login_method(nil)).to be_nil
+ expect(login_method('')).to be_nil
+ end
+ end
+
+ context 'when login is invalid' do
+ it 'returns nil' do
+ expect(login_method(invalid_login)).to be_nil
+ end
+ end
+
+ context 'when login is username' do
+ it 'returns user' do
+ expect(login_method(user.username)).to eq(user)
+ expect(login_method(user.username.downcase)).to eq(user)
+ expect(login_method(user.username.upcase)).to eq(user)
+ end
+ end
+
+ context 'when login is email' do
+ it 'returns user' do
+ expect(login_method(user.email)).to eq(user)
+ expect(login_method(user.email.downcase)).to eq(user)
+ expect(login_method(user.email.upcase)).to eq(user)
+ end
+ end
+ end
+
describe '.by_login' do
- let(:username) { 'John' }
- let!(:user) { create(:user, username: username) }
+ it_behaves_like "find user by login" do
+ def login_method(login)
+ described_class.by_login(login).take
+ end
+ end
+ end
- it 'gets the correct user' do
- expect(described_class.by_login(user.email.upcase)).to eq user
- expect(described_class.by_login(user.email)).to eq user
- expect(described_class.by_login(username.downcase)).to eq user
- expect(described_class.by_login(username)).to eq user
- expect(described_class.by_login(nil)).to be_nil
- expect(described_class.by_login('')).to be_nil
+ describe '.find_by_login' do
+ it_behaves_like "find user by login" do
+ def login_method(login)
+ described_class.find_by_login(login)
+ end
end
end
@@ -5120,7 +5126,6 @@ RSpec.describe User do
expect(cache_mock).to receive(:delete).with(['users', user.id, 'assigned_open_merge_requests_count'])
expect(cache_mock).to receive(:delete).with(['users', user.id, 'review_requested_open_merge_requests_count'])
- expect(cache_mock).to receive(:delete).with(['users', user.id, 'attention_requested_open_merge_requests_count'])
allow(Rails).to receive(:cache).and_return(cache_mock)
@@ -5128,20 +5133,6 @@ RSpec.describe User do
end
end
- describe '#invalidate_attention_requested_count' do
- let(:user) { build_stubbed(:user) }
-
- it 'invalidates cache for issue counter' do
- cache_mock = double
-
- expect(cache_mock).to receive(:delete).with(['users', user.id, 'attention_requested_open_merge_requests_count'])
-
- allow(Rails).to receive(:cache).and_return(cache_mock)
-
- user.invalidate_attention_requested_count
- end
- end
-
describe '#invalidate_personal_projects_count' do
let(:user) { build_stubbed(:user) }
@@ -5228,43 +5219,6 @@ RSpec.describe User do
end
end
- describe '#attention_requested_open_merge_requests_count' do
- let(:user) { create(:user) }
- let(:project) { create(:project, :public) }
- let(:archived_project) { create(:project, :public, :archived) }
-
- before do
- mr1 = create(:merge_request, source_project: project, author: user, reviewers: [user])
- mr2 = create(:merge_request, :closed, source_project: project, author: user, reviewers: [user])
- mr3 = create(:merge_request, source_project: archived_project, author: user, reviewers: [user])
-
- mr1.find_reviewer(user).update!(state: :attention_requested)
- mr2.find_reviewer(user).update!(state: :attention_requested)
- mr3.find_reviewer(user).update!(state: :attention_requested)
- end
-
- it 'returns number of open merge requests from non-archived projects' do
- expect(Rails.cache).not_to receive(:fetch)
- expect(user.attention_requested_open_merge_requests_count(force: true)).to eq 1
- end
-
- context 'when uncached_mr_attention_requests_count is disabled' do
- before do
- stub_feature_flags(uncached_mr_attention_requests_count: false)
- end
-
- it 'fetches from cache' do
- expect(Rails.cache).to receive(:fetch).with(
- user.attention_request_cache_key,
- force: false,
- expires_in: described_class::COUNT_CACHE_VALIDITY_PERIOD
- ).and_call_original
-
- expect(user.attention_requested_open_merge_requests_count).to eq 1
- end
- end
- end
-
describe '#assigned_open_issues_count' do
it 'returns number of open issues from non-archived projects' do
user = create(:user)
@@ -6158,8 +6112,9 @@ RSpec.describe User do
end
context 'user with a bcrypt password hash' do
- # Plaintext password 'eiFubohV6iro'
- let(:encrypted_password) { '$2a$10$xLTxCKOa75IU4RQGqqOrTuZOgZdJEzfSzjG6ZSEi/C31TB/yLZYpi' }
+ # Manually set a 'known' encrypted password
+ let(:password) { User.random_password }
+ let(:encrypted_password) { Devise::Encryptor.digest(User, password) }
let(:user) { create(:user, encrypted_password: encrypted_password) }
shared_examples 'not re-encrypting with PBKDF2' do
@@ -6171,9 +6126,12 @@ RSpec.describe User do
end
context 'using the wrong password' do
+ # password 'WRONG PASSWORD' will not match the bcrypt hash
let(:password) { 'WRONG PASSWORD' }
+ let(:encrypted_password) { Devise::Encryptor.digest(User, User.random_password) }
it { is_expected.to be_falsey }
+
it_behaves_like 'not re-encrypting with PBKDF2'
context 'when pbkdf2_password_encryption is disabled' do
@@ -6182,13 +6140,12 @@ RSpec.describe User do
end
it { is_expected.to be_falsey }
+
it_behaves_like 'not re-encrypting with PBKDF2'
end
end
context 'using the correct password' do
- let(:password) { 'eiFubohV6iro' }
-
it { is_expected.to be_truthy }
it 'validates the password and re-encrypts with PBKDF2' do
@@ -6207,6 +6164,7 @@ RSpec.describe User do
end
it { is_expected.to be_truthy }
+
it_behaves_like 'not re-encrypting with PBKDF2'
end
@@ -6216,14 +6174,18 @@ RSpec.describe User do
end
it { is_expected.to be_truthy }
+
it_behaves_like 'not re-encrypting with PBKDF2'
end
end
end
context 'user with password hash that is neither PBKDF2 nor BCrypt' do
- let(:user) { create(:user, encrypted_password: '$argon2i$v=19$m=512,t=4,p=2$eM+ZMyYkpDRGaI3xXmuNcQ$c5DeJg3eb5dskVt1mDdxfw') }
- let(:password) { 'password' }
+ # Manually calculated User.random_password
+ let(:password) { "gg_w215TmVXGWSt7RJKXwYTVz886f6SDM3zvzztaJf2mX9ttUE8gRkNJSbWyWRLqxz4LFzxBekPe75ydDcGauE9wqg-acKMRT-WpSYjTm1Rdx-tnssE7CQByJcnxwWNH" }
+ # Created with https://argon2.online/ using 'aaaaaaaa' as the salt
+ let(:encrypted_password) { "$argon2i$v=19$m=512,t=4,p=2$YWFhYWFhYWE$PvJscKO5XRlevcgRReUg6w" }
+ let(:user) { create(:user, encrypted_password: encrypted_password) }
it { is_expected.to be_falsey }
@@ -6240,7 +6202,7 @@ RSpec.describe User do
# These entire test section can be removed once the :pbkdf2_password_encryption feature flag is removed.
describe '#password=' do
let(:user) { create(:user) }
- let(:password) { 'Oot5iechahqu' }
+ let(:password) { User.random_password }
def compare_bcrypt_password(user, password)
Devise::Encryptor.compare(User, user.encrypted_password, password)
@@ -6422,7 +6384,7 @@ RSpec.describe User do
end
context 'when password_automatically_set is true' do
- let(:user) { create(:omniauth_user, provider: 'ldap')}
+ let(:user) { create(:omniauth_user, provider: 'ldap') }
it_behaves_like 'password expired not applicable'
end
@@ -6701,6 +6663,40 @@ RSpec.describe User do
end
end
+ describe '#dismissed_callout_for_project?' do
+ let_it_be(:user, refind: true) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:feature_name) { Users::ProjectCallout.feature_names.each_key.first }
+
+ context 'when no callout dismissal record exists' do
+ it 'returns false when no ignore_dismissal_earlier_than provided' do
+ expect(user.dismissed_callout_for_project?(feature_name: feature_name, project: project)).to eq false
+ end
+ end
+
+ context 'when dismissed callout exists' do
+ before_all do
+ create(:project_callout,
+ user: user,
+ project_id: project.id,
+ feature_name: feature_name,
+ dismissed_at: 4.months.ago)
+ end
+
+ it 'returns true when no ignore_dismissal_earlier_than provided' do
+ expect(user.dismissed_callout_for_project?(feature_name: feature_name, project: project)).to eq true
+ end
+
+ it 'returns true when ignore_dismissal_earlier_than is earlier than dismissed_at' do
+ expect(user.dismissed_callout_for_project?(feature_name: feature_name, project: project, ignore_dismissal_earlier_than: 6.months.ago)).to eq true
+ end
+
+ it 'returns false when ignore_dismissal_earlier_than is later than dismissed_at' do
+ expect(user.dismissed_callout_for_project?(feature_name: feature_name, project: project, ignore_dismissal_earlier_than: 3.months.ago)).to eq false
+ end
+ end
+ end
+
describe '#find_or_initialize_group_callout' do
let_it_be(:user, refind: true) { create(:user) }
let_it_be(:group) { create(:group) }
@@ -6745,6 +6741,50 @@ RSpec.describe User do
end
end
+ describe '#find_or_initialize_project_callout' do
+ let_it_be(:user, refind: true) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:feature_name) { Users::ProjectCallout.feature_names.each_key.first }
+
+ subject(:callout_with_source) do
+ user.find_or_initialize_project_callout(feature_name, project.id)
+ end
+
+ context 'when callout exists' do
+ let!(:callout) do
+ create(:project_callout, user: user, feature_name: feature_name, project_id: project.id)
+ end
+
+ it 'returns existing callout' do
+ expect(callout_with_source).to eq(callout)
+ end
+ end
+
+ context 'when callout does not exist' do
+ context 'when feature name is valid' do
+ it 'initializes a new callout' do
+ expect(callout_with_source).to be_a_new(Users::ProjectCallout)
+ end
+
+ it 'is valid' do
+ expect(callout_with_source).to be_valid
+ end
+ end
+
+ context 'when feature name is not valid' do
+ let(:feature_name) { 'notvalid' }
+
+ it 'initializes a new callout' do
+ expect(callout_with_source).to be_a_new(Users::ProjectCallout)
+ end
+
+ it 'is not valid' do
+ expect(callout_with_source).not_to be_valid
+ end
+ end
+ end
+ end
+
describe '#hook_attrs' do
let(:user) { create(:user) }
let(:user_attributes) do
@@ -7374,4 +7414,12 @@ RSpec.describe User do
expect(another_user.mr_attention_requests_enabled?).to be(false)
end
end
+
+ describe 'user age' do
+ let(:user) { create(:user, created_at: Date.yesterday) }
+
+ it 'returns age in days' do
+ expect(user.account_age_in_days).to be(1)
+ end
+ end
end
diff --git a/spec/models/user_status_spec.rb b/spec/models/user_status_spec.rb
index 87d1fa14aca..663df9712ab 100644
--- a/spec/models/user_status_spec.rb
+++ b/spec/models/user_status_spec.rb
@@ -47,4 +47,30 @@ RSpec.describe UserStatus do
end
end
end
+
+ describe '#customized?' do
+ it 'is customized when message text is present' do
+ subject.message = 'My custom status'
+
+ expect(subject).to be_customized
+ end
+
+ it 'is not customized when message text is absent' do
+ subject.message = nil
+
+ expect(subject).not_to be_customized
+ end
+
+ it 'is customized without message but with custom emoji' do
+ subject.emoji = 'bow'
+
+ expect(subject).to be_customized
+ end
+
+ it 'is not customized without message but with default custom emoji' do
+ subject.emoji = 'speech_balloon'
+
+ expect(subject).not_to be_customized
+ end
+ end
end
diff --git a/spec/models/users/calloutable_spec.rb b/spec/models/users/calloutable_spec.rb
index 01603d8bbd6..791fe1c1bc4 100644
--- a/spec/models/users/calloutable_spec.rb
+++ b/spec/models/users/calloutable_spec.rb
@@ -15,8 +15,8 @@ RSpec.describe Users::Calloutable do
describe '#dismissed_after?' do
let(:some_feature_name) { Users::Callout.feature_names.keys.second }
- let(:callout_dismissed_month_ago) { create(:callout, feature_name: some_feature_name, dismissed_at: 1.month.ago )}
- let(:callout_dismissed_day_ago) { create(:callout, feature_name: some_feature_name, dismissed_at: 1.day.ago )}
+ let(:callout_dismissed_month_ago) { create(:callout, feature_name: some_feature_name, dismissed_at: 1.month.ago ) }
+ let(:callout_dismissed_day_ago) { create(:callout, feature_name: some_feature_name, dismissed_at: 1.day.ago ) }
it 'returns whether a callout dismissed after specified date' do
expect(callout_dismissed_month_ago.dismissed_after?(15.days.ago)).to eq(false)
diff --git a/spec/models/users/in_product_marketing_email_spec.rb b/spec/models/users/in_product_marketing_email_spec.rb
index 7796b54babc..78de9ad8bdb 100644
--- a/spec/models/users/in_product_marketing_email_spec.rb
+++ b/spec/models/users/in_product_marketing_email_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe Users::InProductMarketingEmail, type: :model do
context 'for a track+series email' do
it { is_expected.to validate_presence_of(:track) }
it { is_expected.to validate_presence_of(:series) }
+
it {
is_expected.to validate_uniqueness_of(:user_id)
.scoped_to([:track, :series]).with_message('track series email has already been sent')
@@ -30,10 +31,12 @@ RSpec.describe Users::InProductMarketingEmail, type: :model do
it { is_expected.to validate_presence_of(:campaign) }
it { is_expected.not_to validate_presence_of(:track) }
it { is_expected.not_to validate_presence_of(:series) }
+
it {
is_expected.to validate_uniqueness_of(:user_id)
.scoped_to(:campaign).with_message('campaign email has already been sent')
}
+
it { is_expected.to validate_inclusion_of(:campaign).in_array(described_class::CAMPAIGNS) }
end
diff --git a/spec/models/users/project_callout_spec.rb b/spec/models/users/project_callout_spec.rb
new file mode 100644
index 00000000000..87d865c4bdf
--- /dev/null
+++ b/spec/models/users/project_callout_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::ProjectCallout do
+ let_it_be(:user) { create_default(:user) }
+ let_it_be(:project) { create_default(:project) }
+ let_it_be(:callout) { create(:project_callout) }
+
+ it_behaves_like 'having unique enum values'
+
+ describe 'relationships' do
+ it { is_expected.to belong_to(:project) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_presence_of(:feature_name) }
+
+ it {
+ is_expected.to validate_uniqueness_of(:feature_name).scoped_to(:user_id, :project_id).ignoring_case_sensitivity
+ }
+ end
+end
diff --git a/spec/models/webauthn_registration_spec.rb b/spec/models/webauthn_registration_spec.rb
index 6813854bf6c..240e7002ca3 100644
--- a/spec/models/webauthn_registration_spec.rb
+++ b/spec/models/webauthn_registration_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe WebauthnRegistration do
it { is_expected.to validate_presence_of(:counter) }
it { is_expected.to validate_length_of(:name).is_at_least(0) }
it { is_expected.not_to allow_value(nil).for(:name) }
+
it do
is_expected.to validate_numericality_of(:counter)
.only_integer
diff --git a/spec/models/wiki_page/meta_spec.rb b/spec/models/wiki_page/meta_spec.rb
index 37a282657d9..4d1a2dc1c98 100644
--- a/spec/models/wiki_page/meta_spec.rb
+++ b/spec/models/wiki_page/meta_spec.rb
@@ -89,7 +89,7 @@ RSpec.describe WikiPage::Meta do
shared_examples 'canonical_slug setting examples' do
# Constant overhead of two queries for the transaction
let(:upper_query_limit) { query_limit + 2 }
- let(:lower_query_limit) { [upper_query_limit - 1, 0].max}
+ let(:lower_query_limit) { [upper_query_limit - 1, 0].max }
let(:other_slug) { generate(:sluggified_title) }
it 'changes it to the correct value' do
diff --git a/spec/models/work_item_spec.rb b/spec/models/work_item_spec.rb
index f33c8e0a186..e2240c225a9 100644
--- a/spec/models/work_item_spec.rb
+++ b/spec/models/work_item_spec.rb
@@ -40,10 +40,13 @@ RSpec.describe WorkItem do
subject { build(:work_item).widgets }
it 'returns instances of supported widgets' do
- is_expected.to match_array([instance_of(WorkItems::Widgets::Description),
- instance_of(WorkItems::Widgets::Hierarchy),
- instance_of(WorkItems::Widgets::Assignees),
- instance_of(WorkItems::Widgets::Weight)])
+ is_expected.to include(
+ instance_of(WorkItems::Widgets::Description),
+ instance_of(WorkItems::Widgets::Hierarchy),
+ instance_of(WorkItems::Widgets::Labels),
+ instance_of(WorkItems::Widgets::Assignees),
+ instance_of(WorkItems::Widgets::StartAndDueDate)
+ )
end
end
@@ -107,5 +110,61 @@ RSpec.describe WorkItem do
it { is_expected.to eq(false) }
end
end
+
+ describe 'confidentiality' do
+ let_it_be(:project) { create(:project) }
+
+ context 'when parent and child are confidential' do
+ let_it_be(:parent) { create(:work_item, confidential: true, project: project) }
+ let_it_be(:child) { create(:work_item, :task, confidential: true, project: project) }
+ let_it_be(:link) { create(:parent_link, work_item: child, work_item_parent: parent) }
+
+ it 'does not allow to make child non-confidential' do
+ child.confidential = false
+
+ expect(child).not_to be_valid
+ expect(child.errors[:confidential])
+ .to include('associated parent is confidential and can not have non-confidential children.')
+ end
+
+ it 'allows to make parent non-confidential' do
+ parent.confidential = false
+
+ expect(parent).to be_valid
+ end
+ end
+
+ context 'when parent and child are non-confidential' do
+ let_it_be(:parent) { create(:work_item, project: project) }
+ let_it_be(:child) { create(:work_item, :task, project: project) }
+ let_it_be(:link) { create(:parent_link, work_item: child, work_item_parent: parent) }
+
+ it 'does not allow to make parent confidential' do
+ parent.confidential = true
+
+ expect(parent).not_to be_valid
+ expect(parent.errors[:confidential])
+ .to include('confidential parent can not be used if there are non-confidential children.')
+ end
+
+ it 'allows to make child confidential' do
+ child.confidential = true
+
+ expect(child).to be_valid
+ end
+ end
+
+ context 'when creating new child' do
+ let_it_be(:child) { build(:work_item, project: project) }
+
+ it 'does not allow to set confidential parent' do
+ child.work_item_parent = create(:work_item, confidential: true, project: project)
+
+ expect(child).not_to be_valid
+ expect(child.errors[:confidential])
+ .to include('associated parent is confidential and can not have non-confidential children.')
+ end
+ end
+ end
end
end
diff --git a/spec/models/work_items/parent_link_spec.rb b/spec/models/work_items/parent_link_spec.rb
index a16b15bbfc9..070b2eef86a 100644
--- a/spec/models/work_items/parent_link_spec.rb
+++ b/spec/models/work_items/parent_link_spec.rb
@@ -69,6 +69,70 @@ RSpec.describe WorkItems::ParentLink do
expect(link1).to be_valid
end
end
+
+ context 'when setting confidentiality' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:confidential_parent, :confidential_child, :valid) do
+ false | false | true
+ true | true | true
+ false | true | true
+ true | false | false
+ end
+
+ with_them do
+ before do
+ issue.confidential = confidential_parent
+ task1.confidential = confidential_child
+ end
+
+ it 'validates if child confidentiality is compatible with parent' do
+ link = build(:parent_link, work_item_parent: issue, work_item: task1)
+
+ expect(link.valid?).to eq(valid)
+ end
+ end
+ end
+ end
+ end
+
+ context 'with confidential work items' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:confidential_child) { create(:work_item, :task, confidential: true, project: project) }
+ let_it_be(:putlic_child) { create(:work_item, :task, project: project) }
+ let_it_be(:confidential_parent) { create(:work_item, confidential: true, project: project) }
+ let_it_be(:public_parent) { create(:work_item, project: project) }
+
+ describe '.has_public_children?' do
+ subject { described_class.has_public_children?(public_parent.id) }
+
+ context 'with confidential child' do
+ let_it_be(:link) { create(:parent_link, work_item_parent: public_parent, work_item: confidential_child) }
+
+ it { is_expected.to be_falsey }
+
+ context 'with also public child' do
+ let_it_be(:link) { create(:parent_link, work_item_parent: public_parent, work_item: putlic_child) }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+ end
+
+ describe '.has_confidential_parent?' do
+ subject { described_class.has_confidential_parent?(confidential_child.id) }
+
+ context 'with confidential parent' do
+ let_it_be(:link) { create(:parent_link, work_item_parent: confidential_parent, work_item: confidential_child) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'with public parent' do
+ let_it_be(:link) { create(:parent_link, work_item_parent: public_parent, work_item: confidential_child) }
+
+ it { is_expected.to be_falsey }
+ end
end
end
end
diff --git a/spec/models/work_items/type_spec.rb b/spec/models/work_items/type_spec.rb
index e91617effc0..e41df7f0f61 100644
--- a/spec/models/work_items/type_spec.rb
+++ b/spec/models/work_items/type_spec.rb
@@ -64,10 +64,13 @@ RSpec.describe WorkItems::Type do
subject { described_class.available_widgets }
it 'returns list of all possible widgets' do
- is_expected.to match_array([::WorkItems::Widgets::Description,
- ::WorkItems::Widgets::Hierarchy,
- ::WorkItems::Widgets::Assignees,
- ::WorkItems::Widgets::Weight])
+ is_expected.to include(
+ ::WorkItems::Widgets::Description,
+ ::WorkItems::Widgets::Hierarchy,
+ ::WorkItems::Widgets::Labels,
+ ::WorkItems::Widgets::Assignees,
+ ::WorkItems::Widgets::StartAndDueDate
+ )
end
end
diff --git a/spec/models/work_items/widgets/hierarchy_spec.rb b/spec/models/work_items/widgets/hierarchy_spec.rb
index ab2bcfee13f..cd528772710 100644
--- a/spec/models/work_items/widgets/hierarchy_spec.rb
+++ b/spec/models/work_items/widgets/hierarchy_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe WorkItems::Widgets::Hierarchy do
end
describe '#parent' do
- let_it_be(:parent_link) { create(:parent_link, work_item: task, work_item_parent: work_item_parent) }
+ let_it_be(:parent_link) { create(:parent_link, work_item: task, work_item_parent: work_item_parent).reload }
subject { described_class.new(parent_link.work_item).parent }
@@ -45,8 +45,8 @@ RSpec.describe WorkItems::Widgets::Hierarchy do
end
describe '#children' do
- let_it_be(:parent_link1) { create(:parent_link, work_item_parent: work_item_parent, work_item: task) }
- let_it_be(:parent_link2) { create(:parent_link, work_item_parent: work_item_parent) }
+ let_it_be(:parent_link1) { create(:parent_link, work_item_parent: work_item_parent, work_item: task).reload }
+ let_it_be(:parent_link2) { create(:parent_link, work_item_parent: work_item_parent).reload }
subject { described_class.new(work_item_parent).children }
diff --git a/spec/models/work_items/widgets/labels_spec.rb b/spec/models/work_items/widgets/labels_spec.rb
new file mode 100644
index 00000000000..15e8aaa1cf3
--- /dev/null
+++ b/spec/models/work_items/widgets/labels_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::Widgets::Labels do
+ let_it_be(:work_item) { create(:work_item, labels: [create(:label)]) }
+
+ describe '.type' do
+ subject { described_class.type }
+
+ it { is_expected.to eq(:labels) }
+ end
+
+ describe '#type' do
+ subject { described_class.new(work_item).type }
+
+ it { is_expected.to eq(:labels) }
+ end
+
+ describe '#labels' do
+ subject { described_class.new(work_item).labels }
+
+ it { is_expected.to eq(work_item.labels) }
+ end
+
+ describe '#allowScopedLabels' do
+ subject { described_class.new(work_item).allows_scoped_labels? }
+
+ it { is_expected.to eq(work_item.allows_scoped_labels?) }
+ end
+end
diff --git a/spec/models/work_items/widgets/start_and_due_date_spec.rb b/spec/models/work_items/widgets/start_and_due_date_spec.rb
new file mode 100644
index 00000000000..b023cc73e0f
--- /dev/null
+++ b/spec/models/work_items/widgets/start_and_due_date_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::Widgets::StartAndDueDate do
+ let_it_be(:work_item) { create(:work_item, start_date: Date.today, due_date: 1.week.from_now) }
+
+ describe '.type' do
+ subject { described_class.type }
+
+ it { is_expected.to eq(:start_and_due_date) }
+ end
+
+ describe '#type' do
+ subject { described_class.new(work_item).type }
+
+ it { is_expected.to eq(:start_and_due_date) }
+ end
+
+ describe '#start_date' do
+ subject { described_class.new(work_item).start_date }
+
+ it { is_expected.to eq(work_item.start_date) }
+ end
+
+ describe '#due_date' do
+ subject { described_class.new(work_item).due_date }
+
+ it { is_expected.to eq(work_item.due_date) }
+ end
+end