Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-12-20 16:37:47 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-12-20 16:37:47 +0300
commitaee0a117a889461ce8ced6fcf73207fe017f1d99 (patch)
tree891d9ef189227a8445d83f35c1b0fc99573f4380 /spec/models
parent8d46af3258650d305f53b819eabf7ab18d22f59e (diff)
Add latest changes from gitlab-org/gitlab@14-6-stable-eev14.6.0-rc42
Diffstat (limited to 'spec/models')
-rw-r--r--spec/models/active_session_spec.rb535
-rw-r--r--spec/models/analytics/cycle_analytics/project_stage_spec.rb25
-rw-r--r--spec/models/application_setting_spec.rb28
-rw-r--r--spec/models/bulk_imports/entity_spec.rb56
-rw-r--r--spec/models/chat_name_spec.rb4
-rw-r--r--spec/models/ci/build_spec.rb49
-rw-r--r--spec/models/ci/job_artifact_spec.rb4
-rw-r--r--spec/models/ci/namespace_mirror_spec.rb94
-rw-r--r--spec/models/ci/pending_build_spec.rb12
-rw-r--r--spec/models/ci/pipeline_spec.rb96
-rw-r--r--spec/models/ci/project_mirror_spec.rb36
-rw-r--r--spec/models/ci/runner_namespace_spec.rb6
-rw-r--r--spec/models/ci/runner_project_spec.rb6
-rw-r--r--spec/models/ci/runner_spec.rb271
-rw-r--r--spec/models/ci/stage_spec.rb12
-rw-r--r--spec/models/clusters/agent_spec.rb33
-rw-r--r--spec/models/clusters/agent_token_spec.rb32
-rw-r--r--spec/models/clusters/agents/activity_event_spec.rb32
-rw-r--r--spec/models/clusters/applications/runner_spec.rb57
-rw-r--r--spec/models/clusters/platforms/kubernetes_spec.rb29
-rw-r--r--spec/models/commit_signatures/gpg_signature_spec.rb (renamed from spec/models/gpg_signature_spec.rb)4
-rw-r--r--spec/models/commit_signatures/x509_commit_signature_spec.rb (renamed from spec/models/x509_commit_signature_spec.rb)2
-rw-r--r--spec/models/commit_spec.rb62
-rw-r--r--spec/models/commit_status_spec.rb86
-rw-r--r--spec/models/concerns/after_commit_queue_spec.rb128
-rw-r--r--spec/models/concerns/case_sensitivity_spec.rb5
-rw-r--r--spec/models/concerns/group_descendant_spec.rb6
-rw-r--r--spec/models/concerns/loose_foreign_key_spec.rb66
-rw-r--r--spec/models/concerns/participable_spec.rb74
-rw-r--r--spec/models/concerns/routable_spec.rb117
-rw-r--r--spec/models/concerns/sha_attribute_spec.rb2
-rw-r--r--spec/models/concerns/transactions_spec.rb21
-rw-r--r--spec/models/container_repository_spec.rb33
-rw-r--r--spec/models/customer_relations/contact_spec.rb23
-rw-r--r--spec/models/customer_relations/issue_contact_spec.rb28
-rw-r--r--spec/models/deployment_metrics_spec.rb2
-rw-r--r--spec/models/deployment_spec.rb114
-rw-r--r--spec/models/dev_ops_report/metric_spec.rb7
-rw-r--r--spec/models/environment_spec.rb6
-rw-r--r--spec/models/event_spec.rb2
-rw-r--r--spec/models/external_pull_request_spec.rb4
-rw-r--r--spec/models/group_spec.rb45
-rw-r--r--spec/models/hooks/web_hook_spec.rb87
-rw-r--r--spec/models/incident_management/issuable_escalation_status_spec.rb1
-rw-r--r--spec/models/instance_configuration_spec.rb6
-rw-r--r--spec/models/integrations/jira_spec.rb2
-rw-r--r--spec/models/integrations/microsoft_teams_spec.rb13
-rw-r--r--spec/models/integrations/shimo_spec.rb22
-rw-r--r--spec/models/issue/email_spec.rb19
-rw-r--r--spec/models/issue_spec.rb31
-rw-r--r--spec/models/lfs_objects_project_spec.rb22
-rw-r--r--spec/models/loose_foreign_keys/deleted_record_spec.rb149
-rw-r--r--spec/models/member_spec.rb2
-rw-r--r--spec/models/merge_request/metrics_spec.rb6
-rw-r--r--spec/models/merge_request_assignee_spec.rb12
-rw-r--r--spec/models/merge_request_reviewer_spec.rb12
-rw-r--r--spec/models/merge_request_spec.rb18
-rw-r--r--spec/models/namespace/traversal_hierarchy_spec.rb2
-rw-r--r--spec/models/namespace_spec.rb158
-rw-r--r--spec/models/packages/build_info_spec.rb42
-rw-r--r--spec/models/packages/conan/metadatum_spec.rb49
-rw-r--r--spec/models/postgresql/replication_slot_spec.rb20
-rw-r--r--spec/models/preloaders/group_root_ancestor_preloader_spec.rb63
-rw-r--r--spec/models/project_authorization_spec.rb67
-rw-r--r--spec/models/project_spec.rb183
-rw-r--r--spec/models/project_team_spec.rb1
-rw-r--r--spec/models/repository_spec.rb10
-rw-r--r--spec/models/snippet_spec.rb45
-rw-r--r--spec/models/terraform/state_version_spec.rb5
-rw-r--r--spec/models/timelog_spec.rb12
-rw-r--r--spec/models/u2f_registration_spec.rb19
-rw-r--r--spec/models/user_spec.rb218
-rw-r--r--spec/models/users/callout_spec.rb (renamed from spec/models/user_callout_spec.rb)4
-rw-r--r--spec/models/users/calloutable_spec.rb (renamed from spec/models/concerns/calloutable_spec.rb)10
-rw-r--r--spec/models/work_item/type_spec.rb4
75 files changed, 2645 insertions, 923 deletions
diff --git a/spec/models/active_session_spec.rb b/spec/models/active_session_spec.rb
index 2fd7b127500..751d31ad95a 100644
--- a/spec/models/active_session_spec.rb
+++ b/spec/models/active_session_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
+RSpec.describe ActiveSession, :clean_gitlab_redis_sessions do
+ let(:lookup_key) { described_class.lookup_key_name(user.id) }
let(:user) do
create(:user).tap do |user|
user.current_sign_in_at = Time.current
@@ -43,52 +44,88 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
end
describe '.list' do
+ def make_session(id)
+ described_class.new(session_id: id)
+ end
+
it 'returns all sessions by user' do
- Gitlab::Redis::SharedState.with do |redis|
- redis.set("session:user:gitlab:#{user.id}:6919a6f1bb119dd7396fadc38fd18d0d", Marshal.dump({ session_id: 'a' }))
- redis.set("session:user:gitlab:#{user.id}:59822c7d9fcdfa03725eff41782ad97d", Marshal.dump({ session_id: 'b' }))
- redis.set("session:user:gitlab:9999:5c8611e4f9c69645ad1a1492f4131358", '')
+ Gitlab::Redis::Sessions.with do |redis|
+ # Some deprecated sessions
+ redis.set(described_class.key_name_v1(user.id, "6919a6f1bb119dd7396fadc38fd18d0d"), Marshal.dump(make_session('a')))
+ redis.set(described_class.key_name_v1(user.id, "59822c7d9fcdfa03725eff41782ad97d"), Marshal.dump(make_session('b')))
+ # Some new sessions
+ redis.set(described_class.key_name(user.id, 'some-unique-id-x'), make_session('c').dump)
+ redis.set(described_class.key_name(user.id, 'some-unique-id-y'), make_session('d').dump)
+ # Some red herrings
+ redis.set(described_class.key_name(9999, "5c8611e4f9c69645ad1a1492f4131358"), 'irrelevant')
+ redis.set(described_class.key_name_v1(9999, "5c8611e4f9c69645ad1a1492f4131358"), 'irrelevant')
redis.sadd(
- "session:lookup:user:gitlab:#{user.id}",
+ lookup_key,
%w[
6919a6f1bb119dd7396fadc38fd18d0d
59822c7d9fcdfa03725eff41782ad97d
+ some-unique-id-x
+ some-unique-id-y
]
)
end
- expect(ActiveSession.list(user)).to match_array [{ session_id: 'a' }, { session_id: 'b' }]
+ expect(described_class.list(user)).to contain_exactly(
+ have_attributes(session_id: 'a'),
+ have_attributes(session_id: 'b'),
+ have_attributes(session_id: 'c'),
+ have_attributes(session_id: 'd')
+ )
end
- it 'does not return obsolete entries and cleans them up' do
- Gitlab::Redis::SharedState.with do |redis|
- redis.set("session:user:gitlab:#{user.id}:6919a6f1bb119dd7396fadc38fd18d0d", Marshal.dump({ session_id: 'a' }))
+ shared_examples 'ignoring obsolete entries' do
+ let(:session_id) { '6919a6f1bb119dd7396fadc38fd18d0d' }
+ let(:session) { described_class.new(session_id: 'a') }
- redis.sadd(
- "session:lookup:user:gitlab:#{user.id}",
- %w[
- 6919a6f1bb119dd7396fadc38fd18d0d
- 59822c7d9fcdfa03725eff41782ad97d
- ]
- )
- end
+ it 'does not return obsolete entries and cleans them up' do
+ Gitlab::Redis::Sessions.with do |redis|
+ redis.set(session_key, serialized_session)
+
+ redis.sadd(
+ lookup_key,
+ [
+ session_id,
+ '59822c7d9fcdfa03725eff41782ad97d'
+ ]
+ )
+ end
- expect(ActiveSession.list(user)).to eq [{ session_id: 'a' }]
+ expect(ActiveSession.list(user)).to contain_exactly(session)
- Gitlab::Redis::SharedState.with do |redis|
- expect(redis.sscan_each("session:lookup:user:gitlab:#{user.id}").to_a).to eq ['6919a6f1bb119dd7396fadc38fd18d0d']
+ Gitlab::Redis::Sessions.with do |redis|
+ expect(redis.sscan_each(lookup_key)).to contain_exactly session_id
+ end
end
end
- it 'returns an empty array if the use does not have any active session' do
- expect(ActiveSession.list(user)).to eq []
+ context 'when the current session is in the old format' do
+ let(:session_key) { described_class.key_name_v1(user.id, session_id) }
+ let(:serialized_session) { Marshal.dump(session) }
+
+ it_behaves_like 'ignoring obsolete entries'
+ end
+
+ context 'when the current session is in the new format' do
+ let(:session_key) { described_class.key_name(user.id, session_id) }
+ let(:serialized_session) { session.dump }
+
+ it_behaves_like 'ignoring obsolete entries'
+ end
+
+ it 'returns an empty array if the user does not have any active session' do
+ expect(ActiveSession.list(user)).to be_empty
end
end
describe '.list_sessions' do
it 'uses the ActiveSession lookup to return original sessions' do
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::Sessions.with do |redis|
# Emulate redis-rack: https://github.com/redis-store/redis-rack/blob/c75f7f1a6016ee224e2615017fbfee964f23a837/lib/rack/session/redis.rb#L88
redis.set("session:gitlab:#{rack_session.private_id}", Marshal.dump({ _csrf_token: 'abcd' }))
@@ -107,19 +144,17 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
describe '.session_ids_for_user' do
it 'uses the user lookup table to return session ids' do
- session_ids = ['59822c7d9fcdfa03725eff41782ad97d']
-
- Gitlab::Redis::SharedState.with do |redis|
- redis.sadd("session:lookup:user:gitlab:#{user.id}", session_ids)
+ Gitlab::Redis::Sessions.with do |redis|
+ redis.sadd(lookup_key, %w[a b c])
end
- expect(ActiveSession.session_ids_for_user(user.id).map(&:to_s)).to eq(session_ids)
+ expect(ActiveSession.session_ids_for_user(user.id).map(&:to_s)).to match_array(%w[a b c])
end
end
describe '.sessions_from_ids' do
it 'uses the ActiveSession lookup to return original sessions' do
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::Sessions.with do |redis|
# Emulate redis-rack: https://github.com/redis-store/redis-rack/blob/c75f7f1a6016ee224e2615017fbfee964f23a837/lib/rack/session/redis.rb#L88
redis.set("session:gitlab:#{rack_session.private_id}", Marshal.dump({ _csrf_token: 'abcd' }))
end
@@ -128,7 +163,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
end
it 'avoids a redis lookup for an empty array' do
- expect(Gitlab::Redis::SharedState).not_to receive(:with)
+ expect(Gitlab::Redis::Sessions).not_to receive(:with)
expect(ActiveSession.sessions_from_ids([])).to eq([])
end
@@ -137,7 +172,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
stub_const('ActiveSession::SESSION_BATCH_SIZE', 1)
redis = double(:redis)
- expect(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis)
+ expect(Gitlab::Redis::Sessions).to receive(:with).and_yield(redis)
sessions = %w[session-a session-b]
mget_responses = sessions.map { |session| [Marshal.dump(session)]}
@@ -151,49 +186,67 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
it 'sets a new redis entry for the user session and a lookup entry' do
ActiveSession.set(user, request)
- Gitlab::Redis::SharedState.with do |redis|
+ session_id = "2::418729c72310bbf349a032f0bb6e3fce9f5a69df8f000d8ae0ac5d159d8f21ae"
+
+ Gitlab::Redis::Sessions.with do |redis|
expect(redis.scan_each.to_a).to include(
- "session:user:gitlab:#{user.id}:2::418729c72310bbf349a032f0bb6e3fce9f5a69df8f000d8ae0ac5d159d8f21ae",
- "session:lookup:user:gitlab:#{user.id}"
+ described_class.key_name(user.id, session_id), # current session
+ described_class.key_name_v1(user.id, session_id), # support for mixed deployment
+ lookup_key
)
end
end
it 'adds timestamps and information from the request' do
- Timecop.freeze(Time.zone.parse('2018-03-12 09:06')) do
- ActiveSession.set(user, request)
+ time = Time.zone.parse('2018-03-12 09:06')
- session = ActiveSession.list(user)
+ travel_to(time) do
+ described_class.set(user, request)
- expect(session.count).to eq 1
- expect(session.first).to have_attributes(
+ sessions = described_class.list(user)
+
+ expect(sessions).to contain_exactly have_attributes(
ip_address: '127.0.0.1',
browser: 'Mobile Safari',
os: 'iOS',
device_name: 'iPhone 6',
device_type: 'smartphone',
- created_at: Time.zone.parse('2018-03-12 09:06'),
- updated_at: Time.zone.parse('2018-03-12 09:06')
+ created_at: eq(time),
+ updated_at: eq(time)
)
end
end
+ it 'is possible to log in only using the old session key' do
+ session_id = "2::418729c72310bbf349a032f0bb6e3fce9f5a69df8f000d8ae0ac5d159d8f21ae"
+ ActiveSession.set(user, request)
+
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.del(described_class.key_name(user.id, session_id))
+ end
+
+ sessions = ActiveSession.list(user)
+
+ expect(sessions).to be_present
+ end
+
it 'keeps the created_at from the login on consecutive requests' do
- now = Time.zone.parse('2018-03-12 09:06')
+ created_at = Time.zone.parse('2018-03-12 09:06')
+ updated_at = created_at + 1.minute
- Timecop.freeze(now) do
+ travel_to(created_at) do
ActiveSession.set(user, request)
+ end
- Timecop.freeze(now + 1.minute) do
- ActiveSession.set(user, request)
+ travel_to(updated_at) do
+ ActiveSession.set(user, request)
- session = ActiveSession.list(user)
+ session = ActiveSession.list(user)
- expect(session.first).to have_attributes(
- created_at: Time.zone.parse('2018-03-12 09:06'),
- updated_at: Time.zone.parse('2018-03-12 09:07')
- )
- end
+ expect(session.first).to have_attributes(
+ created_at: eq(created_at),
+ updated_at: eq(updated_at)
+ )
end
end
end
@@ -201,22 +254,20 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
describe '.destroy_session' do
shared_examples 'removes all session data' do
before do
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::Sessions.with do |redis|
redis.set("session:user:gitlab:#{user.id}:#{active_session_lookup_key}", '')
# Emulate redis-rack: https://github.com/redis-store/redis-rack/blob/c75f7f1a6016ee224e2615017fbfee964f23a837/lib/rack/session/redis.rb#L88
redis.set("session:gitlab:#{rack_session.private_id}", '')
- redis.set(described_class.key_name(user.id, active_session_lookup_key),
- Marshal.dump(active_session))
- redis.sadd(described_class.lookup_key_name(user.id),
- active_session_lookup_key)
+ redis.set(session_key, serialized_session)
+ redis.sadd(lookup_key, active_session_lookup_key)
end
end
it 'removes the devise session' do
subject
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::Sessions.with do |redis|
expect(redis.scan_each(match: "session:gitlab:*").to_a).to be_empty
end
end
@@ -224,15 +275,15 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
it 'removes the lookup entry' do
subject
- Gitlab::Redis::SharedState.with do |redis|
- expect(redis.scan_each(match: "session:lookup:user:gitlab:#{user.id}").to_a).to be_empty
+ Gitlab::Redis::Sessions.with do |redis|
+ expect(redis.scan_each(match: lookup_key).to_a).to be_empty
end
end
it 'removes the ActiveSession' do
subject
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::Sessions.with do |redis|
expect(redis.scan_each(match: "session:user:gitlab:*").to_a).to be_empty
end
end
@@ -253,7 +304,19 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
let(:active_session) { ActiveSession.new(session_private_id: rack_session.private_id) }
let(:active_session_lookup_key) { rack_session.private_id }
- include_examples 'removes all session data'
+ context 'when using old session key serialization' do
+ let(:session_key) { described_class.key_name_v1(user.id, active_session_lookup_key) }
+ let(:serialized_session) { Marshal.dump(active_session) }
+
+ include_examples 'removes all session data'
+ end
+
+ context 'when using new session key serialization' do
+ let(:session_key) { described_class.key_name(user.id, active_session_lookup_key) }
+ let(:serialized_session) { active_session.dump }
+
+ include_examples 'removes all session data'
+ end
end
end
end
@@ -265,19 +328,17 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
ActiveSession.destroy_all_but_current(user, nil)
end
- context 'with user sessions' do
+ shared_examples 'with user sessions' do
let(:current_session_id) { '6919a6f1bb119dd7396fadc38fd18d0d' }
before do
- Gitlab::Redis::SharedState.with do |redis|
+ Gitlab::Redis::Sessions.with do |redis|
# setup for current user
[current_session_id, '59822c7d9fcdfa03725eff41782ad97d'].each do |session_public_id|
session_private_id = Rack::Session::SessionId.new(session_public_id).private_id
active_session = ActiveSession.new(session_private_id: session_private_id)
- redis.set(described_class.key_name(user.id, session_private_id),
- Marshal.dump(active_session))
- redis.sadd(described_class.lookup_key_name(user.id),
- session_private_id)
+ redis.set(key_name(user.id, session_private_id), dump_session(active_session))
+ redis.sadd(lookup_key, session_private_id)
end
# setup for unrelated user
@@ -285,10 +346,8 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
session_private_id = Rack::Session::SessionId.new('5c8611e4f9c69645ad1a1492f4131358').private_id
active_session = ActiveSession.new(session_private_id: session_private_id)
- redis.set(described_class.key_name(unrelated_user_id, session_private_id),
- Marshal.dump(active_session))
- redis.sadd(described_class.lookup_key_name(unrelated_user_id),
- session_private_id)
+ redis.set(key_name(unrelated_user_id, session_private_id), dump_session(active_session))
+ redis.sadd(described_class.lookup_key_name(unrelated_user_id), session_private_id)
end
end
@@ -303,19 +362,17 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
session_private_id = Rack::Session::SessionId.new(current_session_id).private_id
ActiveSession.destroy_all_but_current(user, request.session)
- Gitlab::Redis::SharedState.with do |redis|
- expect(
- redis.smembers(described_class.lookup_key_name(user.id))
- ).to eq([session_private_id])
+ Gitlab::Redis::Sessions.with do |redis|
+ expect(redis.smembers(lookup_key)).to contain_exactly session_private_id
end
end
it 'does not remove impersonated sessions' do
impersonated_session_id = '6919a6f1bb119dd7396fadc38fd18eee'
- Gitlab::Redis::SharedState.with do |redis|
- redis.set(described_class.key_name(user.id, impersonated_session_id),
- Marshal.dump(ActiveSession.new(session_id: Rack::Session::SessionId.new(impersonated_session_id), is_impersonated: true)))
- redis.sadd(described_class.lookup_key_name(user.id), impersonated_session_id)
+ Gitlab::Redis::Sessions.with do |redis|
+ redis.set(key_name(user.id, impersonated_session_id),
+ dump_session(ActiveSession.new(session_id: Rack::Session::SessionId.new(impersonated_session_id), is_impersonated: true)))
+ redis.sadd(lookup_key, impersonated_session_id)
end
expect { ActiveSession.destroy_all_but_current(user, request.session) }.to change { ActiveSession.session_ids_for_user(user.id).size }.from(3).to(2)
@@ -323,155 +380,289 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
expect(ActiveSession.session_ids_for_user(9999).size).to eq(1)
end
end
- end
- describe '.cleanup' do
- before do
- stub_const("ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS", 5)
- end
+ context 'with legacy sessions' do
+ def key_name(user_id, id)
+ described_class.key_name_v1(user_id, id)
+ end
- it 'removes obsolete lookup entries' do
- Gitlab::Redis::SharedState.with do |redis|
- redis.set("session:user:gitlab:#{user.id}:6919a6f1bb119dd7396fadc38fd18d0d", '')
- redis.sadd("session:lookup:user:gitlab:#{user.id}", '6919a6f1bb119dd7396fadc38fd18d0d')
- redis.sadd("session:lookup:user:gitlab:#{user.id}", '59822c7d9fcdfa03725eff41782ad97d')
+ def dump_session(session)
+ Marshal.dump(session)
end
- ActiveSession.cleanup(user)
+ it_behaves_like 'with user sessions'
+ end
- Gitlab::Redis::SharedState.with do |redis|
- expect(redis.smembers("session:lookup:user:gitlab:#{user.id}")).to eq ['6919a6f1bb119dd7396fadc38fd18d0d']
+ context 'with new sessions' do
+ def key_name(user_id, id)
+ described_class.key_name(user_id, id)
+ end
+
+ def dump_session(session)
+ session.dump
end
+
+ it_behaves_like 'with user sessions'
end
+ end
- it 'does not bail if there are no lookup entries' do
- ActiveSession.cleanup(user)
+ describe '.cleanup' do
+ before do
+ stub_const("ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS", 5)
end
- context 'cleaning up old sessions' do
- let(:max_number_of_sessions_plus_one) { ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS + 1 }
- let(:max_number_of_sessions_plus_two) { ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS + 2 }
+ shared_examples 'cleaning up' do
+ context 'when removing obsolete sessions' do
+ let(:current_session_id) { '6919a6f1bb119dd7396fadc38fd18d0d' }
- before do
- Gitlab::Redis::SharedState.with do |redis|
- (1..max_number_of_sessions_plus_two).each do |number|
- redis.set(
- "session:user:gitlab:#{user.id}:#{number}",
- Marshal.dump(ActiveSession.new(session_id: number.to_s, updated_at: number.days.ago))
- )
- redis.sadd(
- "session:lookup:user:gitlab:#{user.id}",
- "#{number}"
- )
+ it 'removes obsolete lookup entries' do
+ Gitlab::Redis::Sessions.with do |redis|
+ redis.set(session_key, '')
+ redis.sadd(lookup_key, current_session_id)
+ redis.sadd(lookup_key, '59822c7d9fcdfa03725eff41782ad97d')
+ end
+
+ ActiveSession.cleanup(user)
+
+ Gitlab::Redis::Sessions.with do |redis|
+ expect(redis.smembers(lookup_key)).to contain_exactly current_session_id
end
end
end
- it 'removes obsolete active sessions entries' do
+ it 'does not bail if there are no lookup entries' do
ActiveSession.cleanup(user)
+ end
- Gitlab::Redis::SharedState.with do |redis|
- sessions = redis.scan_each(match: "session:user:gitlab:#{user.id}:*").to_a
+ context 'cleaning up old sessions' do
+ let(:max_number_of_sessions_plus_one) { ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS + 1 }
+ let(:max_number_of_sessions_plus_two) { ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS + 2 }
+
+ before do
+ Gitlab::Redis::Sessions.with do |redis|
+ max_number_of_sessions_plus_two.times do |number|
+ redis.set(
+ key_name(user.id, number),
+ dump_session(ActiveSession.new(session_id: number.to_s, updated_at: number.days.ago))
+ )
+ redis.sadd(lookup_key, number.to_s)
+ end
+ end
+ end
+
+ it 'removes obsolete active sessions entries' do
+ ActiveSession.cleanup(user)
+
+ Gitlab::Redis::Sessions.with do |redis|
+ sessions = described_class.list(user)
- expect(sessions.count).to eq(ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS)
- expect(sessions).not_to include("session:user:gitlab:#{user.id}:#{max_number_of_sessions_plus_one}", "session:user:gitlab:#{user.id}:#{max_number_of_sessions_plus_two}")
+ expect(sessions.count).to eq(ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS)
+ expect(sessions).not_to include(
+ have_attributes(session_id: max_number_of_sessions_plus_one),
+ have_attributes(session_id: max_number_of_sessions_plus_two)
+ )
+ end
end
- end
- it 'removes obsolete lookup entries' do
- ActiveSession.cleanup(user)
+ it 'removes obsolete lookup entries' do
+ ActiveSession.cleanup(user)
- Gitlab::Redis::SharedState.with do |redis|
- lookup_entries = redis.smembers("session:lookup:user:gitlab:#{user.id}")
+ Gitlab::Redis::Sessions.with do |redis|
+ lookup_entries = redis.smembers(lookup_key)
- expect(lookup_entries.count).to eq(ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS)
- expect(lookup_entries).not_to include(max_number_of_sessions_plus_one.to_s, max_number_of_sessions_plus_two.to_s)
+ expect(lookup_entries.count).to eq(ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS)
+ expect(lookup_entries).not_to include(max_number_of_sessions_plus_one.to_s, max_number_of_sessions_plus_two.to_s)
+ end
end
- end
- it 'removes obsolete lookup entries even without active session' do
- Gitlab::Redis::SharedState.with do |redis|
- redis.sadd(
- "session:lookup:user:gitlab:#{user.id}",
- "#{max_number_of_sessions_plus_two + 1}"
- )
+ it 'removes obsolete lookup entries even without active session' do
+ Gitlab::Redis::Sessions.with do |redis|
+ redis.sadd(lookup_key, "#{max_number_of_sessions_plus_two + 1}")
+ end
+
+ ActiveSession.cleanup(user)
+
+ Gitlab::Redis::Sessions.with do |redis|
+ lookup_entries = redis.smembers(lookup_key)
+
+ expect(lookup_entries.count).to eq(ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS)
+ expect(lookup_entries).not_to include(
+ max_number_of_sessions_plus_one.to_s,
+ max_number_of_sessions_plus_two.to_s,
+ (max_number_of_sessions_plus_two + 1).to_s
+ )
+ end
end
- ActiveSession.cleanup(user)
+ context 'when the number of active sessions is lower than the limit' do
+ before do
+ Gitlab::Redis::Sessions.with do |redis|
+ ((max_number_of_sessions_plus_two - 4)..max_number_of_sessions_plus_two).each do |number|
+ redis.del(key_name(user.id, number))
+ end
+ end
+ end
- Gitlab::Redis::SharedState.with do |redis|
- lookup_entries = redis.smembers("session:lookup:user:gitlab:#{user.id}")
+ it 'does not remove active session entries, but removes lookup entries' do
+ lookup_entries_before_cleanup = Gitlab::Redis::Sessions.with do |redis|
+ redis.smembers(lookup_key)
+ end
- expect(lookup_entries.count).to eq(ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS)
- expect(lookup_entries).not_to include(
- max_number_of_sessions_plus_one.to_s,
- max_number_of_sessions_plus_two.to_s,
- (max_number_of_sessions_plus_two + 1).to_s
- )
+ sessions_before_cleanup = described_class.list(user)
+
+ described_class.cleanup(user)
+
+ Gitlab::Redis::Sessions.with do |redis|
+ lookup_entries = redis.smembers(lookup_key)
+ sessions = described_class.list(user)
+
+ expect(sessions.count).to eq(sessions_before_cleanup.count)
+ expect(lookup_entries.count).to be < lookup_entries_before_cleanup.count
+ end
+ end
end
end
- context 'when the number of active sessions is lower than the limit' do
+ context 'cleaning up old sessions stored by Rack::Session::SessionId#private_id' do
+ let(:max_number_of_sessions_plus_one) { ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS + 1 }
+ let(:max_number_of_sessions_plus_two) { ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS + 2 }
+
before do
- Gitlab::Redis::SharedState.with do |redis|
- ((max_number_of_sessions_plus_two - 4)..max_number_of_sessions_plus_two).each do |number|
- redis.del("session:user:gitlab:#{user.id}:#{number}")
+ Gitlab::Redis::Sessions.with do |redis|
+ (1..max_number_of_sessions_plus_two).each do |number|
+ redis.set(
+ key_name(user.id, number),
+ dump_session(ActiveSession.new(session_private_id: number.to_s, updated_at: number.days.ago))
+ )
+ redis.sadd(lookup_key, number.to_s)
end
end
end
- it 'does not remove active session entries, but removes lookup entries' do
- lookup_entries_before_cleanup = Gitlab::Redis::SharedState.with do |redis|
- redis.smembers("session:lookup:user:gitlab:#{user.id}")
- end
+ it 'removes obsolete active sessions entries' do
+ described_class.cleanup(user)
- sessions_before_cleanup = Gitlab::Redis::SharedState.with do |redis|
- redis.scan_each(match: "session:user:gitlab:#{user.id}:*").to_a
- end
+ Gitlab::Redis::Sessions.with do |redis|
+ sessions = described_class.list(user)
- ActiveSession.cleanup(user)
-
- Gitlab::Redis::SharedState.with do |redis|
- lookup_entries = redis.smembers("session:lookup:user:gitlab:#{user.id}")
- sessions = redis.scan_each(match: "session:user:gitlab:#{user.id}:*").to_a
- expect(sessions.count).to eq(sessions_before_cleanup.count)
- expect(lookup_entries.count).to be < lookup_entries_before_cleanup.count
+ expect(sessions.count).to eq(described_class::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS)
+ expect(sessions).not_to include(
+ key_name(user.id, max_number_of_sessions_plus_one),
+ key_name(user.id, max_number_of_sessions_plus_two)
+ )
end
end
end
end
- context 'cleaning up old sessions stored by Rack::Session::SessionId#private_id' do
- let(:max_number_of_sessions_plus_one) { ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS + 1 }
- let(:max_number_of_sessions_plus_two) { ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS + 2 }
+ context 'with legacy sessions' do
+ let(:session_key) { described_class.key_name_v1(user.id, current_session_id) }
+
+ def key_name(user_id, session_id)
+ described_class.key_name_v1(user_id, session_id)
+ end
+
+ def dump_session(session)
+ Marshal.dump(session)
+ end
+
+ it_behaves_like 'cleaning up'
+ end
+
+ context 'with new sessions' do
+ let(:session_key) { described_class.key_name(user.id, current_session_id) }
+
+ def key_name(user_id, session_id)
+ described_class.key_name(user_id, session_id)
+ end
+
+ def dump_session(session)
+ session.dump
+ end
+
+ it_behaves_like 'cleaning up'
+ end
+ end
+
+ describe '.cleaned_up_lookup_entries' do
+ before do
+ stub_const("ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS", 5)
+ end
+
+ shared_examples 'cleaning up lookup entries' do
+ let(:current_session_id) { '6919a6f1bb119dd7396fadc38fd18d0d' }
+ let(:active_count) { 3 }
before do
Gitlab::Redis::SharedState.with do |redis|
- (1..max_number_of_sessions_plus_two).each do |number|
+ active_count.times do |number|
redis.set(
- "session:user:gitlab:#{user.id}:#{number}",
- Marshal.dump(ActiveSession.new(session_private_id: number.to_s, updated_at: number.days.ago))
- )
- redis.sadd(
- "session:lookup:user:gitlab:#{user.id}",
- "#{number}"
+ key_name(user.id, number),
+ dump_session(ActiveSession.new(session_id: number.to_s, updated_at: number.days.ago))
)
+
+ redis.sadd(lookup_key, number.to_s)
end
+
+ redis.sadd(lookup_key, (active_count + 1).to_s)
+ redis.sadd(lookup_key, (active_count + 2).to_s)
end
end
- it 'removes obsolete active sessions entries' do
- ActiveSession.cleanup(user)
+ it 'removes obsolete lookup entries' do
+ active = Gitlab::Redis::SharedState.with do |redis|
+ ActiveSession.cleaned_up_lookup_entries(redis, user)
+ end
+
+ expect(active.count).to eq(active_count)
Gitlab::Redis::SharedState.with do |redis|
- sessions = redis.scan_each(match: "session:user:gitlab:#{user.id}:*").to_a
+ lookup_entries = redis.smembers(lookup_key)
- expect(sessions.count).to eq(ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS)
- expect(sessions).not_to(
- include("session:user:gitlab:#{user.id}:#{max_number_of_sessions_plus_one}",
- "session:user:gitlab:#{user.id}:#{max_number_of_sessions_plus_two}"))
+ expect(lookup_entries.count).to eq(active_count)
+ expect(lookup_entries).not_to include(
+ (active_count + 1).to_s,
+ (active_count + 2).to_s
+ )
end
end
+
+ it 'reports the removed entries' do
+ removed = []
+ Gitlab::Redis::SharedState.with do |redis|
+ ActiveSession.cleaned_up_lookup_entries(redis, user, removed)
+ end
+
+ expect(removed.count).to eq(2)
+ end
+ end
+
+ context 'with legacy sessions' do
+ let(:session_key) { described_class.key_name_v1(user.id, current_session_id) }
+
+ def key_name(user_id, session_id)
+ described_class.key_name_v1(user_id, session_id)
+ end
+
+ def dump_session(session)
+ Marshal.dump(session)
+ end
+
+ it_behaves_like 'cleaning up lookup entries'
+ end
+
+ context 'with new sessions' do
+ let(:session_key) { described_class.key_name(user.id, current_session_id) }
+
+ def key_name(user_id, session_id)
+ described_class.key_name(user_id, session_id)
+ end
+
+ def dump_session(session)
+ session.dump
+ end
+
+ it_behaves_like 'cleaning up lookup entries'
end
end
end
diff --git a/spec/models/analytics/cycle_analytics/project_stage_spec.rb b/spec/models/analytics/cycle_analytics/project_stage_spec.rb
index 9efe90e7d41..a67f9fec443 100644
--- a/spec/models/analytics/cycle_analytics/project_stage_spec.rb
+++ b/spec/models/analytics/cycle_analytics/project_stage_spec.rb
@@ -29,4 +29,29 @@ RSpec.describe Analytics::CycleAnalytics::ProjectStage do
let(:default_params) { { project: project } }
end
end
+
+ describe '.distinct_stages_within_hierarchy' do
+ let_it_be(:top_level_group) { create(:group) }
+ let_it_be(:sub_group_1) { create(:group, parent: top_level_group) }
+ let_it_be(:sub_group_2) { create(:group, parent: sub_group_1) }
+
+ let_it_be(:project_1) { create(:project, group: sub_group_1) }
+ let_it_be(:project_2) { create(:project, group: sub_group_2) }
+ let_it_be(:project_3) { create(:project, group: top_level_group) }
+
+ let_it_be(:stage1) { create(:cycle_analytics_project_stage, project: project_1, start_event_identifier: :issue_created, end_event_identifier: :issue_deployed_to_production) }
+ let_it_be(:stage2) { create(:cycle_analytics_project_stage, project: project_3, start_event_identifier: :issue_created, end_event_identifier: :issue_deployed_to_production) }
+
+ let_it_be(:stage3) { create(:cycle_analytics_project_stage, project: project_1, start_event_identifier: :merge_request_created, end_event_identifier: :merge_request_merged) }
+ let_it_be(:stage4) { create(:cycle_analytics_project_stage, project: project_3, start_event_identifier: :merge_request_created, end_event_identifier: :merge_request_merged) }
+
+ subject(:distinct_start_and_end_event_identifiers) { described_class.distinct_stages_within_hierarchy(top_level_group).to_a.pluck(:start_event_identifier, :end_event_identifier) }
+
+ it 'returns distinct stages by start and end events (using stage_event_hash_id)' do
+ expect(distinct_start_and_end_event_identifiers).to match_array([
+ %w[issue_created issue_deployed_to_production],
+ %w[merge_request_created merge_request_merged]
+ ])
+ end
+ end
end
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 8ad83da61f3..67314084c4f 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -247,6 +247,7 @@ RSpec.describe ApplicationSetting do
end
it { is_expected.to allow_value('grpc://example.org/spam_check').for(:spam_check_endpoint_url) }
+ it { is_expected.to allow_value('tls://example.org/spam_check').for(:spam_check_endpoint_url) }
it { is_expected.not_to allow_value('https://example.org/spam_check').for(:spam_check_endpoint_url) }
it { is_expected.not_to allow_value('nonsense').for(:spam_check_endpoint_url) }
it { is_expected.not_to allow_value(nil).for(:spam_check_endpoint_url) }
@@ -259,6 +260,7 @@ RSpec.describe ApplicationSetting do
end
it { is_expected.to allow_value('grpc://example.org/spam_check').for(:spam_check_endpoint_url) }
+ it { is_expected.to allow_value('tls://example.org/spam_check').for(:spam_check_endpoint_url) }
it { is_expected.not_to allow_value('https://example.org/spam_check').for(:spam_check_endpoint_url) }
it { is_expected.not_to allow_value('nonsense').for(:spam_check_endpoint_url) }
it { is_expected.to allow_value(nil).for(:spam_check_endpoint_url) }
@@ -1239,4 +1241,30 @@ RSpec.describe ApplicationSetting do
expect(subject.kroki_formats_excalidraw).to eq(true)
end
end
+
+ describe '#static_objects_external_storage_auth_token=' do
+ subject { setting.static_objects_external_storage_auth_token = token }
+
+ let(:token) { 'Test' }
+
+ it 'stores an encrypted version of the token' do
+ subject
+
+ expect(setting[:static_objects_external_storage_auth_token]).to be_nil
+ expect(setting[:static_objects_external_storage_auth_token_encrypted]).to be_present
+ expect(setting.static_objects_external_storage_auth_token).to eq('Test')
+ end
+
+ context 'when token is empty' do
+ let(:token) { '' }
+
+ it 'removes an encrypted version of the token' do
+ subject
+
+ expect(setting[:static_objects_external_storage_auth_token]).to be_nil
+ expect(setting[:static_objects_external_storage_auth_token_encrypted]).to be_nil
+ expect(setting.static_objects_external_storage_auth_token).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb
index cc66572cd6f..e5bbac62dcc 100644
--- a/spec/models/bulk_imports/entity_spec.rb
+++ b/spec/models/bulk_imports/entity_spec.rb
@@ -252,4 +252,60 @@ RSpec.describe BulkImports::Entity, type: :model do
.to eq("/groups/#{entity.encoded_source_full_path}/export_relations/download?relation=test")
end
end
+
+ describe '#entity_type' do
+ it 'returns entity type' do
+ group_entity = build(:bulk_import_entity)
+ project_entity = build(:bulk_import_entity, :project_entity)
+
+ expect(group_entity.entity_type).to eq('group')
+ expect(project_entity.entity_type).to eq('project')
+ end
+ end
+
+ describe '#project?' do
+ it 'returns true if project entity' do
+ group_entity = build(:bulk_import_entity)
+ project_entity = build(:bulk_import_entity, :project_entity)
+
+ expect(group_entity.project?).to eq(false)
+ expect(project_entity.project?).to eq(true)
+ end
+ end
+
+ describe '#group?' do
+ it 'returns true if group entity' do
+ group_entity = build(:bulk_import_entity)
+ project_entity = build(:bulk_import_entity, :project_entity)
+
+ expect(group_entity.group?).to eq(true)
+ expect(project_entity.group?).to eq(false)
+ end
+ end
+
+ describe '#base_resource_url_path' do
+ it 'returns base entity url path' do
+ entity = build(:bulk_import_entity)
+
+ expect(entity.base_resource_url_path).to eq("/groups/#{entity.encoded_source_full_path}")
+ end
+ end
+
+ describe '#wiki_url_path' do
+ it 'returns entity wiki url path' do
+ entity = build(:bulk_import_entity)
+
+ expect(entity.wikis_url_path).to eq("/groups/#{entity.encoded_source_full_path}/wikis")
+ end
+ end
+
+ describe '#update_service' do
+ it 'returns correct update service class' do
+ group_entity = build(:bulk_import_entity)
+ project_entity = build(:bulk_import_entity, :project_entity)
+
+ expect(group_entity.update_service).to eq(::Groups::UpdateService)
+ expect(project_entity.update_service).to eq(::Projects::UpdateService)
+ end
+ end
end
diff --git a/spec/models/chat_name_spec.rb b/spec/models/chat_name_spec.rb
index 67e0f98d147..1d2ad8b4dce 100644
--- a/spec/models/chat_name_spec.rb
+++ b/spec/models/chat_name_spec.rb
@@ -46,9 +46,5 @@ RSpec.describe ChatName do
it_behaves_like 'it has loose foreign keys' do
let(:factory_name) { :chat_name }
-
- before do
- Ci::PipelineChatData # ensure that the referenced model is loaded
- end
end
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index b7de8ca4337..b9a12339e61 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Ci::Build do
it { is_expected.to have_one(:deployment) }
it { is_expected.to have_one(:runner_session) }
it { is_expected.to have_one(:trace_metadata) }
- it { is_expected.to have_many(:terraform_state_versions).dependent(:nullify).inverse_of(:build) }
+ it { is_expected.to have_many(:terraform_state_versions).inverse_of(:build) }
it { is_expected.to validate_presence_of(:ref) }
@@ -1994,6 +1994,14 @@ RSpec.describe Ci::Build do
it { is_expected.not_to be_retryable }
end
+
+ context 'when deployment is rejected' do
+ before do
+ build.drop!(:deployment_rejected)
+ end
+
+ it { is_expected.not_to be_retryable }
+ end
end
end
@@ -2498,7 +2506,7 @@ RSpec.describe Ci::Build do
it { is_expected.to start_with(project.web_url[0..6]) }
it { is_expected.to include(build.token) }
it { is_expected.to include('gitlab-ci-token') }
- it { is_expected.to include(project.web_url[7..-1]) }
+ it { is_expected.to include(project.web_url[7..]) }
end
context 'when token is empty' do
@@ -3421,10 +3429,6 @@ RSpec.describe Ci::Build do
end
describe '#scoped_variables' do
- before do
- pipeline.clear_memoization(:predefined_vars_in_builder_enabled)
- end
-
it 'records a prometheus metric' do
histogram = double(:histogram)
expect(::Gitlab::Ci::Pipeline::Metrics).to receive(:pipeline_builder_scoped_variables_histogram)
@@ -3522,22 +3526,6 @@ RSpec.describe Ci::Build do
build.scoped_variables
end
-
- context 'when ci builder feature flag is disabled' do
- before do
- stub_feature_flags(ci_predefined_vars_in_builder: false)
- end
-
- it 'does not delegate to the variable builders' do
- expect_next_instance_of(Gitlab::Ci::Variables::Builder) do |builder|
- expect(builder).not_to receive(:predefined_variables)
- end
-
- build.scoped_variables
- end
-
- it_behaves_like 'calculates scoped_variables'
- end
end
describe '#simple_variables_without_dependencies' do
@@ -3782,6 +3770,12 @@ RSpec.describe Ci::Build do
build.enqueue
end
+
+ it 'queues BuildHooksWorker' do
+ expect(BuildHooksWorker).to receive(:perform_async).with(build.id)
+
+ build.enqueue
+ end
end
describe 'state transition: pending: :running' do
@@ -4474,7 +4468,7 @@ RSpec.describe Ci::Build do
'create' => 0,
'update' => 1,
'delete' => 0,
- 'job_name' => build.options.dig(:artifacts, :name).to_s
+ 'job_name' => build.name
)
)
)
@@ -5423,4 +5417,13 @@ RSpec.describe Ci::Build do
expect(subject).to be true
end
end
+
+ it_behaves_like 'it has loose foreign keys' do
+ let(:factory_name) { :ci_build }
+ end
+
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let!(:model) { create(:ci_build, user: create(:user)) }
+ let!(:parent) { model.user }
+ end
end
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index d63f87e8943..38061e0975f 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -700,4 +700,8 @@ RSpec.describe Ci::JobArtifact do
when changes or new entries are made.
MSG
end
+
+ it_behaves_like 'it has loose foreign keys' do
+ let(:factory_name) { :ci_job_artifact }
+ end
end
diff --git a/spec/models/ci/namespace_mirror_spec.rb b/spec/models/ci/namespace_mirror_spec.rb
new file mode 100644
index 00000000000..b4c71f51377
--- /dev/null
+++ b/spec/models/ci/namespace_mirror_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::NamespaceMirror do
+ let!(:group1) { create(:group) }
+ let!(:group2) { create(:group, parent: group1) }
+ let!(:group3) { create(:group, parent: group2) }
+ let!(:group4) { create(:group, parent: group3) }
+
+ describe '.sync!' do
+ let!(:event) { namespace.sync_events.create! }
+
+ subject(:sync) { described_class.sync!(event.reload) }
+
+ context 'when namespace hierarchy does not exist in the first place' do
+ let(:namespace) { group3 }
+
+ it 'creates the hierarchy' do
+ expect { sync }.to change { described_class.count }.from(0).to(1)
+
+ expect(namespace.ci_namespace_mirror).to have_attributes(traversal_ids: [group1.id, group2.id, group3.id])
+ end
+ end
+
+ context 'when namespace hierarchy does already exist' do
+ let(:namespace) { group3 }
+
+ before do
+ described_class.create!(namespace: namespace, traversal_ids: [namespace.id])
+ end
+
+ it 'updates the hierarchy' do
+ expect { sync }.not_to change { described_class.count }
+
+ expect(namespace.ci_namespace_mirror).to have_attributes(traversal_ids: [group1.id, group2.id, group3.id])
+ end
+ end
+
+ # I did not extract this context to a `shared_context` because the behavior will change
+ # after implementing the TODO in `Ci::NamespaceMirror.sync!`
+ context 'changing the middle namespace' do
+ let(:namespace) { group2 }
+
+ before do
+ described_class.create!(namespace_id: group1.id, traversal_ids: [group1.id])
+ described_class.create!(namespace_id: group2.id, traversal_ids: [group1.id, group2.id])
+ described_class.create!(namespace_id: group3.id, traversal_ids: [group1.id, group2.id, group3.id])
+ described_class.create!(namespace_id: group4.id, traversal_ids: [group1.id, group2.id, group3.id, group4.id])
+
+ group2.update!(parent: nil)
+ end
+
+ it 'updates hierarchies for the base but wait for events for the children' do
+ expect { sync }.not_to change { described_class.count }
+
+ expect(group1.reload.ci_namespace_mirror).to have_attributes(traversal_ids: [group1.id])
+ expect(group2.reload.ci_namespace_mirror).to have_attributes(traversal_ids: [group2.id])
+ expect(group3.reload.ci_namespace_mirror).to have_attributes(traversal_ids: [group2.id, group3.id])
+ expect(group4.reload.ci_namespace_mirror).to have_attributes(traversal_ids: [group2.id, group3.id, group4.id])
+ end
+ end
+
+ context 'when the FFs sync_traversal_ids, use_traversal_ids and use_traversal_ids_for_ancestors are disabled' do
+ before do
+ stub_feature_flags(sync_traversal_ids: false,
+ use_traversal_ids: false,
+ use_traversal_ids_for_ancestors: false)
+ end
+
+ context 'changing the middle namespace' do
+ let(:namespace) { group2 }
+
+ before do
+ described_class.create!(namespace_id: group1.id, traversal_ids: [group1.id])
+ described_class.create!(namespace_id: group2.id, traversal_ids: [group1.id, group2.id])
+ described_class.create!(namespace_id: group3.id, traversal_ids: [group1.id, group2.id, group3.id])
+ described_class.create!(namespace_id: group4.id, traversal_ids: [group1.id, group2.id, group3.id, group4.id])
+
+ group2.update!(parent: nil)
+ end
+
+ it 'updates hierarchies for the base and descendants' do
+ expect { sync }.not_to change { described_class.count }
+
+ expect(group1.reload.ci_namespace_mirror).to have_attributes(traversal_ids: [group1.id])
+ expect(group2.reload.ci_namespace_mirror).to have_attributes(traversal_ids: [group2.id])
+ expect(group3.reload.ci_namespace_mirror).to have_attributes(traversal_ids: [group2.id, group3.id])
+ expect(group4.reload.ci_namespace_mirror).to have_attributes(traversal_ids: [group2.id, group3.id, group4.id])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/pending_build_spec.rb b/spec/models/ci/pending_build_spec.rb
index ad711f5622f..abf0fb443bb 100644
--- a/spec/models/ci/pending_build_spec.rb
+++ b/spec/models/ci/pending_build_spec.rb
@@ -118,7 +118,7 @@ RSpec.describe Ci::PendingBuild do
project.shared_runners_enabled = true
end
- context 'when ci_pending_builds_maintain_shared_runners_data is enabled' do
+ context 'when ci_pending_builds_maintain_denormalized_data is enabled' do
it 'sets instance_runners_enabled to true' do
described_class.upsert_from_build!(build)
@@ -150,9 +150,9 @@ RSpec.describe Ci::PendingBuild do
end
end
- context 'when ci_pending_builds_maintain_shared_runners_data is disabled' do
+ context 'when ci_pending_builds_maintain_denormalized_data is disabled' do
before do
- stub_feature_flags(ci_pending_builds_maintain_shared_runners_data: false)
+ stub_feature_flags(ci_pending_builds_maintain_denormalized_data: false)
end
it 'sets instance_runners_enabled to false' do
@@ -168,7 +168,7 @@ RSpec.describe Ci::PendingBuild do
subject(:ci_pending_build) { described_class.last }
- context 'when ci_pending_builds_maintain_tags_data is enabled' do
+ context 'when ci_pending_builds_maintain_denormalized_data is enabled' do
it 'sets tag_ids' do
described_class.upsert_from_build!(build)
@@ -176,9 +176,9 @@ RSpec.describe Ci::PendingBuild do
end
end
- context 'when ci_pending_builds_maintain_tags_data is disabled' do
+ context 'when ci_pending_builds_maintain_denormalized_data is disabled' do
before do
- stub_feature_flags(ci_pending_builds_maintain_tags_data: false)
+ stub_feature_flags(ci_pending_builds_maintain_denormalized_data: false)
end
it 'does not set tag_ids' do
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index e573a6ef780..fd9970699d7 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -28,6 +28,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
it { is_expected.to have_many(:trigger_requests) }
it { is_expected.to have_many(:variables) }
it { is_expected.to have_many(:builds) }
+ it { is_expected.to have_many(:statuses_order_id_desc) }
it { is_expected.to have_many(:bridges) }
it { is_expected.to have_many(:job_artifacts).through(:builds) }
it { is_expected.to have_many(:auto_canceled_pipelines) }
@@ -35,8 +36,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
it { is_expected.to have_many(:sourced_pipelines) }
it { is_expected.to have_many(:triggered_pipelines) }
it { is_expected.to have_many(:pipeline_artifacts) }
- it { is_expected.to have_many(:package_build_infos).dependent(:nullify).inverse_of(:pipeline) }
- it { is_expected.to have_many(:package_file_build_infos).dependent(:nullify).inverse_of(:pipeline) }
it { is_expected.to have_one(:chat_data) }
it { is_expected.to have_one(:source_pipeline) }
@@ -757,23 +756,23 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
context 'with multiple pipelines' do
before_all do
create(:ci_build, name: "rspec", coverage: 30, pipeline: pipeline)
- create(:ci_build, name: "rubocop", coverage: 40, pipeline: pipeline)
+ create(:ci_build, name: "rubocop", coverage: 35, pipeline: pipeline)
end
it "calculates average when there are two builds with coverage" do
- expect(pipeline.coverage).to eq("35.00")
+ expect(pipeline.coverage).to be_within(0.001).of(32.5)
end
it "calculates average when there are two builds with coverage and one with nil" do
create(:ci_build, pipeline: pipeline)
- expect(pipeline.coverage).to eq("35.00")
+ expect(pipeline.coverage).to be_within(0.001).of(32.5)
end
it "calculates average when there are two builds with coverage and one is retried" do
create(:ci_build, name: "rubocop", coverage: 30, pipeline: pipeline, retried: true)
- expect(pipeline.coverage).to eq("35.00")
+ expect(pipeline.coverage).to be_within(0.001).of(32.5)
end
end
@@ -1358,12 +1357,26 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
describe 'synching status to Jira' do
let(:worker) { ::JiraConnect::SyncBuildsWorker }
- %i[prepare! run! skip! drop! succeed! cancel! block! delay!].each do |event|
- context "when we call pipeline.#{event}" do
- it 'triggers a Jira synch worker' do
- expect(worker).to receive(:perform_async).with(pipeline.id, Integer)
+ context 'when Jira Connect subscription does not exist' do
+ it 'does not trigger a Jira synch worker' do
+ expect(worker).not_to receive(:perform_async)
- pipeline.send(event)
+ pipeline.prepare!
+ end
+ end
+
+ context 'when Jira Connect subscription exists' do
+ before_all do
+ create(:jira_connect_subscription, namespace: project.namespace)
+ end
+
+ %i[prepare! run! skip! drop! succeed! cancel! block! delay!].each do |event|
+ context "when we call pipeline.#{event}" do
+ it 'triggers a Jira synch worker' do
+ expect(worker).to receive(:perform_async).with(pipeline.id, Integer)
+
+ pipeline.send(event)
+ end
end
end
end
@@ -1503,10 +1516,30 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
describe 'pipeline caching' do
- it 'performs ExpirePipelinesCacheWorker' do
- expect(ExpirePipelineCacheWorker).to receive(:perform_async).with(pipeline.id)
+ context 'when expire_job_and_pipeline_cache_synchronously is enabled' do
+ before do
+ stub_feature_flags(expire_job_and_pipeline_cache_synchronously: true)
+ end
- pipeline.cancel
+ it 'executes Ci::ExpirePipelineCacheService' do
+ expect_next_instance_of(Ci::ExpirePipelineCacheService) do |service|
+ expect(service).to receive(:execute).with(pipeline)
+ end
+
+ pipeline.cancel
+ end
+ end
+
+ context 'when expire_job_and_pipeline_cache_synchronously is disabled' do
+ before do
+ stub_feature_flags(expire_job_and_pipeline_cache_synchronously: false)
+ end
+
+ it 'performs ExpirePipelinesCacheWorker' do
+ expect(ExpirePipelineCacheWorker).to receive(:perform_async).with(pipeline.id)
+
+ pipeline.cancel
+ end
end
end
@@ -3173,11 +3206,35 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
context 'when pipeline is not child nor parent' do
let_it_be(:pipeline) { create(:ci_pipeline, :created) }
- let_it_be(:build) { create(:ci_build, :with_deployment, :deploy_to_production, pipeline: pipeline) }
+ let_it_be(:build, refind: true) { create(:ci_build, :with_deployment, :deploy_to_production, pipeline: pipeline) }
it 'returns just the pipeline environment' do
expect(subject).to contain_exactly(build.deployment.environment)
end
+
+ context 'when deployment SHA is not matched' do
+ before do
+ build.deployment.update!(sha: 'old-sha')
+ end
+
+ it 'does not return environments' do
+ expect(subject).to be_empty
+ end
+ end
+ end
+
+ context 'when an associated environment does not have deployments' do
+ let_it_be(:pipeline) { create(:ci_pipeline, :created) }
+ let_it_be(:build) { create(:ci_build, :stop_review_app, pipeline: pipeline) }
+ let_it_be(:environment) { create(:environment, project: pipeline.project) }
+
+ before_all do
+ build.metadata.update!(expanded_environment_name: environment.name)
+ end
+
+ it 'does not return environments' do
+ expect(subject).to be_empty
+ end
end
context 'when pipeline is in extended family' do
@@ -4611,4 +4668,13 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
expect(pipeline.authorized_cluster_agents).to contain_exactly(agent) # cached
end
end
+
+ it_behaves_like 'it has loose foreign keys' do
+ let(:factory_name) { :ci_pipeline }
+ end
+
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let!(:model) { create(:ci_pipeline, user: create(:user)) }
+ let!(:parent) { model.user }
+ end
end
diff --git a/spec/models/ci/project_mirror_spec.rb b/spec/models/ci/project_mirror_spec.rb
new file mode 100644
index 00000000000..199285b036c
--- /dev/null
+++ b/spec/models/ci/project_mirror_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::ProjectMirror do
+ let_it_be(:group1) { create(:group) }
+ let_it_be(:group2) { create(:group) }
+
+ let!(:project) { create(:project, namespace: group2) }
+
+ describe '.sync!' do
+ let!(:event) { Projects::SyncEvent.create!(project: project) }
+
+ subject(:sync) { described_class.sync!(event.reload) }
+
+ context 'when project hierarchy does not exist in the first place' do
+ it 'creates a ci_projects record' do
+ expect { sync }.to change { described_class.count }.from(0).to(1)
+
+ expect(project.ci_project_mirror).to have_attributes(namespace_id: group2.id)
+ end
+ end
+
+ context 'when project hierarchy does already exist' do
+ before do
+ described_class.create!(project_id: project.id, namespace_id: group1.id)
+ end
+
+ it 'updates the related ci_projects record' do
+ expect { sync }.not_to change { described_class.count }
+
+ expect(project.ci_project_mirror).to have_attributes(namespace_id: group2.id)
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/runner_namespace_spec.rb b/spec/models/ci/runner_namespace_spec.rb
index 4e7cf7a3cb3..41d805adb9f 100644
--- a/spec/models/ci/runner_namespace_spec.rb
+++ b/spec/models/ci/runner_namespace_spec.rb
@@ -4,12 +4,6 @@ require 'spec_helper'
RSpec.describe Ci::RunnerNamespace do
it_behaves_like 'includes Limitable concern' do
- before do
- skip_default_enabled_yaml_check
-
- stub_feature_flags(ci_runner_limits_override: false)
- end
-
subject { build(:ci_runner_namespace, group: create(:group, :nested), runner: create(:ci_runner, :group)) }
end
end
diff --git a/spec/models/ci/runner_project_spec.rb b/spec/models/ci/runner_project_spec.rb
index fef1416a84a..13369dba2cf 100644
--- a/spec/models/ci/runner_project_spec.rb
+++ b/spec/models/ci/runner_project_spec.rb
@@ -4,12 +4,6 @@ require 'spec_helper'
RSpec.describe Ci::RunnerProject do
it_behaves_like 'includes Limitable concern' do
- before do
- skip_default_enabled_yaml_check
-
- stub_feature_flags(ci_runner_limits_override: false)
- end
-
subject { build(:ci_runner_project, project: create(:project), runner: create(:ci_runner, :project)) }
end
end
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index 2e79159cc60..5142f70fa2c 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -7,10 +7,6 @@ RSpec.describe Ci::Runner do
it_behaves_like 'it has loose foreign keys' do
let(:factory_name) { :ci_runner }
-
- before do
- Clusters::Applications::Runner # ensure that the referenced model is loaded
- end
end
describe 'groups association' do
@@ -298,26 +294,134 @@ RSpec.describe Ci::Runner do
describe '.recent' do
subject { described_class.recent }
+ let!(:runner1) { create(:ci_runner, :instance, contacted_at: nil, created_at: 2.months.ago) }
+ let!(:runner2) { create(:ci_runner, :instance, contacted_at: nil, created_at: 3.months.ago) }
+ let!(:runner3) { create(:ci_runner, :instance, contacted_at: 1.month.ago, created_at: 2.months.ago) }
+ let!(:runner4) { create(:ci_runner, :instance, contacted_at: 1.month.ago, created_at: 3.months.ago) }
+
+ it { is_expected.to eq([runner1, runner3, runner4])}
+ end
+
+ describe '.active' do
+ subject { described_class.active(active_value) }
+
+ let!(:runner1) { create(:ci_runner, :instance, active: false) }
+ let!(:runner2) { create(:ci_runner, :instance) }
+
+ context 'with active_value set to false' do
+ let(:active_value) { false }
+
+ it 'returns inactive runners' do
+ is_expected.to match_array([runner1])
+ end
+ end
+
+ context 'with active_value set to true' do
+ let(:active_value) { true }
+
+ it 'returns active runners' do
+ is_expected.to match_array([runner2])
+ end
+ end
+ end
+
+ describe '.paused' do
before do
- @runner1 = create(:ci_runner, :instance, contacted_at: nil, created_at: 2.months.ago)
- @runner2 = create(:ci_runner, :instance, contacted_at: nil, created_at: 3.months.ago)
- @runner3 = create(:ci_runner, :instance, contacted_at: 1.month.ago, created_at: 2.months.ago)
- @runner4 = create(:ci_runner, :instance, contacted_at: 1.month.ago, created_at: 3.months.ago)
- @runner5 = create(:ci_runner, :instance, contacted_at: 3.months.ago, created_at: 5.months.ago)
+ expect(described_class).to receive(:active).with(false).and_call_original
end
- it { is_expected.to eq([@runner1, @runner3, @runner4])}
+ subject { described_class.paused }
+
+ let!(:runner1) { create(:ci_runner, :instance, active: false) }
+ let!(:runner2) { create(:ci_runner, :instance) }
+
+ it 'returns inactive runners' do
+ is_expected.to match_array([runner1])
+ end
end
- describe '.online' do
- subject { described_class.online }
+ describe '.stale' do
+ subject { described_class.stale }
+
+ let!(:runner1) { create(:ci_runner, :instance, created_at: 4.months.ago, contacted_at: 3.months.ago + 10.seconds) }
+ let!(:runner2) { create(:ci_runner, :instance, created_at: 4.months.ago, contacted_at: 3.months.ago - 1.second) }
+ let!(:runner3) { create(:ci_runner, :instance, created_at: 3.months.ago - 1.second, contacted_at: nil) }
+ let!(:runner4) { create(:ci_runner, :instance, created_at: 2.months.ago, contacted_at: nil) }
+
+ it 'returns stale runners' do
+ is_expected.to match_array([runner2, runner3])
+ end
+ end
+
+ describe '#stale?', :clean_gitlab_redis_cache do
+ let(:runner) { create(:ci_runner, :instance) }
+
+ subject { runner.stale? }
before do
- @runner1 = create(:ci_runner, :instance, contacted_at: 2.hours.ago)
- @runner2 = create(:ci_runner, :instance, contacted_at: 1.second.ago)
+ allow_any_instance_of(described_class).to receive(:cached_attribute).and_call_original
+ allow_any_instance_of(described_class).to receive(:cached_attribute)
+ .with(:platform).and_return("darwin")
end
- it { is_expected.to eq([@runner2])}
+ context 'table tests' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:created_at, :contacted_at, :expected_stale?) do
+ nil | nil | false
+ 3.months.ago - 1.second | 3.months.ago - 0.001.seconds | true
+ 3.months.ago - 1.second | 3.months.ago + 1.hour | false
+ 3.months.ago - 1.second | nil | true
+ 3.months.ago + 1.hour | nil | false
+ end
+
+ with_them do
+ before do
+ runner.created_at = created_at
+ end
+
+ context 'no cache value' do
+ before do
+ stub_redis_runner_contacted_at(nil)
+ runner.contacted_at = contacted_at
+ end
+
+ specify do
+ is_expected.to eq(expected_stale?)
+ end
+ end
+
+ context 'with cache value' do
+ before do
+ runner.contacted_at = contacted_at ? contacted_at + 1.week : nil
+ stub_redis_runner_contacted_at(contacted_at.to_s)
+ end
+
+ specify do
+ is_expected.to eq(expected_stale?)
+ end
+ end
+
+ def stub_redis_runner_contacted_at(value)
+ return unless created_at
+
+ Gitlab::Redis::Cache.with do |redis|
+ cache_key = runner.send(:cache_attribute_key)
+ expect(redis).to receive(:get).with(cache_key)
+ .and_return({ contacted_at: value }.to_json).at_least(:once)
+ end
+ end
+ end
+ end
+ end
+
+ describe '.online' do
+ subject { described_class.online }
+
+ let!(:runner1) { create(:ci_runner, :instance, contacted_at: 2.hours.ago) }
+ let!(:runner2) { create(:ci_runner, :instance, contacted_at: 1.second.ago) }
+
+ it { is_expected.to match_array([runner2]) }
end
describe '#online?', :clean_gitlab_redis_cache do
@@ -344,7 +448,7 @@ RSpec.describe Ci::Runner do
it { is_expected.to be_falsey }
end
- context 'contacted long time ago time' do
+ context 'contacted long time ago' do
before do
runner.contacted_at = 1.year.ago
end
@@ -362,7 +466,7 @@ RSpec.describe Ci::Runner do
end
context 'with cache value' do
- context 'contacted long time ago time' do
+ context 'contacted long time ago' do
before do
runner.contacted_at = 1.year.ago
stub_redis_runner_contacted_at(1.year.ago.to_s)
@@ -393,12 +497,10 @@ RSpec.describe Ci::Runner do
describe '.offline' do
subject { described_class.offline }
- before do
- @runner1 = create(:ci_runner, :instance, contacted_at: 2.hours.ago)
- @runner2 = create(:ci_runner, :instance, contacted_at: 1.second.ago)
- end
+ let!(:runner1) { create(:ci_runner, :instance, contacted_at: 2.hours.ago) }
+ let!(:runner2) { create(:ci_runner, :instance, contacted_at: 1.second.ago) }
- it { is_expected.to eq([@runner1])}
+ it { is_expected.to eq([runner1]) }
end
describe '#tick_runner_queue' do
@@ -626,16 +728,33 @@ RSpec.describe Ci::Runner do
end
describe '#status' do
- let(:runner) { build(:ci_runner, :instance) }
+ let(:runner) { build(:ci_runner, :instance, created_at: 4.months.ago) }
+ let(:legacy_mode) { }
- subject { runner.status }
+ subject { runner.status(legacy_mode) }
context 'never connected' do
before do
runner.contacted_at = nil
end
- it { is_expected.to eq(:not_connected) }
+ context 'with legacy_mode enabled' do
+ let(:legacy_mode) { '14.5' }
+
+ it { is_expected.to eq(:not_connected) }
+ end
+
+ context 'with legacy_mode disabled' do
+ it { is_expected.to eq(:stale) }
+ end
+
+ context 'created recently' do
+ before do
+ runner.created_at = 1.day.ago
+ end
+
+ it { is_expected.to eq(:never_contacted) }
+ end
end
context 'inactive but online' do
@@ -644,7 +763,15 @@ RSpec.describe Ci::Runner do
runner.active = false
end
- it { is_expected.to eq(:online) }
+ context 'with legacy_mode enabled' do
+ let(:legacy_mode) { '14.5' }
+
+ it { is_expected.to eq(:paused) }
+ end
+
+ context 'with legacy_mode disabled' do
+ it { is_expected.to eq(:online) }
+ end
end
context 'contacted 1s ago' do
@@ -655,13 +782,29 @@ RSpec.describe Ci::Runner do
it { is_expected.to eq(:online) }
end
- context 'contacted long time ago' do
+ context 'contacted recently' do
before do
- runner.contacted_at = 1.year.ago
+ runner.contacted_at = (3.months - 1.hour).ago
end
it { is_expected.to eq(:offline) }
end
+
+ context 'contacted long time ago' do
+ before do
+ runner.contacted_at = (3.months + 1.second).ago
+ end
+
+ context 'with legacy_mode enabled' do
+ let(:legacy_mode) { '14.5' }
+
+ it { is_expected.to eq(:offline) }
+ end
+
+ context 'with legacy_mode disabled' do
+ it { is_expected.to eq(:stale) }
+ end
+ end
end
describe '#deprecated_rest_status' do
@@ -760,8 +903,9 @@ RSpec.describe Ci::Runner do
describe '#heartbeat' do
let(:runner) { create(:ci_runner, :project) }
+ let(:executor) { 'shell' }
- subject { runner.heartbeat(architecture: '18-bit', config: { gpus: "all" }) }
+ subject { runner.heartbeat(architecture: '18-bit', config: { gpus: "all" }, executor: executor) }
context 'when database was updated recently' do
before do
@@ -797,6 +941,26 @@ RSpec.describe Ci::Runner do
expect_redis_update
does_db_update
end
+
+ %w(custom shell docker docker-windows docker-ssh ssh parallels virtualbox docker+machine docker-ssh+machine kubernetes some-unknown-type).each do |executor|
+ context "with #{executor} executor" do
+ let(:executor) { executor }
+
+ it 'updates with expected executor type' do
+ expect_redis_update
+
+ subject
+
+ expect(runner.reload.read_attribute(:executor_type)).to eq(expected_executor_type)
+ end
+
+ def expected_executor_type
+ return 'unknown' if executor == 'some-unknown-type'
+
+ executor.gsub(/[+-]/, '_')
+ end
+ end
+ end
end
def expect_redis_update
@@ -810,6 +974,7 @@ RSpec.describe Ci::Runner do
expect { subject }.to change { runner.reload.read_attribute(:contacted_at) }
.and change { runner.reload.read_attribute(:architecture) }
.and change { runner.reload.read_attribute(:config) }
+ .and change { runner.reload.read_attribute(:executor_type) }
end
end
@@ -1194,31 +1359,43 @@ RSpec.describe Ci::Runner do
end
describe '.belonging_to_group' do
- it 'returns the specific group runner' do
- group = create(:group)
- runner = create(:ci_runner, :group, groups: [group])
- unrelated_group = create(:group)
- create(:ci_runner, :group, groups: [unrelated_group])
+ shared_examples 'returns group runners' do
+ it 'returns the specific group runner' do
+ group = create(:group)
+ runner = create(:ci_runner, :group, groups: [group])
+ unrelated_group = create(:group)
+ create(:ci_runner, :group, groups: [unrelated_group])
- expect(described_class.belonging_to_group(group.id)).to contain_exactly(runner)
- end
+ expect(described_class.belonging_to_group(group.id)).to contain_exactly(runner)
+ end
- context 'runner belonging to parent group' do
- let_it_be(:parent_group) { create(:group) }
- let_it_be(:parent_runner) { create(:ci_runner, :group, groups: [parent_group]) }
- let_it_be(:group) { create(:group, parent: parent_group) }
+ context 'runner belonging to parent group' do
+ let_it_be(:parent_group) { create(:group) }
+ let_it_be(:parent_runner) { create(:ci_runner, :group, groups: [parent_group]) }
+ let_it_be(:group) { create(:group, parent: parent_group) }
- context 'when include_parent option is passed' do
- it 'returns the group runner from the parent group' do
- expect(described_class.belonging_to_group(group.id, include_ancestors: true)).to contain_exactly(parent_runner)
+ context 'when include_parent option is passed' do
+ it 'returns the group runner from the parent group' do
+ expect(described_class.belonging_to_group(group.id, include_ancestors: true)).to contain_exactly(parent_runner)
+ end
end
- end
- context 'when include_parent option is not passed' do
- it 'does not return the group runner from the parent group' do
- expect(described_class.belonging_to_group(group.id)).to be_empty
+ context 'when include_parent option is not passed' do
+ it 'does not return the group runner from the parent group' do
+ expect(described_class.belonging_to_group(group.id)).to be_empty
+ end
end
end
end
+
+ it_behaves_like 'returns group runners'
+
+ context 'when feature flag :linear_runner_ancestor_scopes is disabled' do
+ before do
+ stub_feature_flags(linear_runner_ancestor_scopes: false)
+ end
+
+ it_behaves_like 'returns group runners'
+ end
end
end
diff --git a/spec/models/ci/stage_spec.rb b/spec/models/ci/stage_spec.rb
index 5e0fcb4882f..2b6f22e68f1 100644
--- a/spec/models/ci/stage_spec.rb
+++ b/spec/models/ci/stage_spec.rb
@@ -28,6 +28,18 @@ RSpec.describe Ci::Stage, :models do
end
end
+ describe '.by_position' do
+ it 'finds stages by position' do
+ a = create(:ci_stage_entity, position: 1)
+ b = create(:ci_stage_entity, position: 2)
+ c = create(:ci_stage_entity, position: 3)
+
+ expect(described_class.by_position(1)).to contain_exactly(a)
+ expect(described_class.by_position(2)).to contain_exactly(b)
+ expect(described_class.by_position(%w[1 3])).to contain_exactly(a, c)
+ end
+ end
+
describe '.by_name' do
it 'finds stages by name' do
a = create(:ci_stage_entity, name: 'a')
diff --git a/spec/models/clusters/agent_spec.rb b/spec/models/clusters/agent_spec.rb
index f9df84e8ff4..3b521086c14 100644
--- a/spec/models/clusters/agent_spec.rb
+++ b/spec/models/clusters/agent_spec.rb
@@ -75,4 +75,37 @@ RSpec.describe Clusters::Agent do
expect(agent.has_access_to?(create(:project))).to be_falsey
end
end
+
+ describe '#active?' do
+ let_it_be(:agent) { create(:cluster_agent) }
+
+ let!(:token) { create(:cluster_agent_token, agent: agent, last_used_at: last_used_at) }
+
+ subject { agent.active? }
+
+ context 'agent has never connected' do
+ let(:last_used_at) { nil }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'agent has connected, but not recently' do
+ let(:last_used_at) { 2.hours.ago }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'agent has connected recently' do
+ let(:last_used_at) { 2.minutes.ago }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'agent has multiple tokens' do
+ let!(:inactive_token) { create(:cluster_agent_token, agent: agent, last_used_at: 2.hours.ago) }
+ let(:last_used_at) { 2.minutes.ago }
+
+ it { is_expected.to be_truthy }
+ end
+ end
end
diff --git a/spec/models/clusters/agent_token_spec.rb b/spec/models/clusters/agent_token_spec.rb
index bde4798abec..ad9f948224f 100644
--- a/spec/models/clusters/agent_token_spec.rb
+++ b/spec/models/clusters/agent_token_spec.rb
@@ -39,7 +39,9 @@ RSpec.describe Clusters::AgentToken do
end
describe '#track_usage', :clean_gitlab_redis_cache do
- let(:agent_token) { create(:cluster_agent_token) }
+ let_it_be(:agent) { create(:cluster_agent) }
+
+ let(:agent_token) { create(:cluster_agent_token, agent: agent) }
subject { agent_token.track_usage }
@@ -73,6 +75,34 @@ RSpec.describe Clusters::AgentToken do
expect_redis_update
end
end
+
+ context 'agent is inactive' do
+ before do
+ allow(agent).to receive(:active?).and_return(false)
+ end
+
+ it 'creates an activity event' do
+ expect { subject }.to change { agent.activity_events.count }
+
+ event = agent.activity_events.last
+ expect(event).to have_attributes(
+ kind: 'agent_connected',
+ level: 'info',
+ recorded_at: agent_token.reload.read_attribute(:last_used_at),
+ agent_token: agent_token
+ )
+ end
+ end
+
+ context 'agent is active' do
+ before do
+ allow(agent).to receive(:active?).and_return(true)
+ end
+
+ it 'does not create an activity event' do
+ expect { subject }.not_to change { agent.activity_events.count }
+ end
+ end
end
def expect_redis_update
diff --git a/spec/models/clusters/agents/activity_event_spec.rb b/spec/models/clusters/agents/activity_event_spec.rb
new file mode 100644
index 00000000000..18b9c82fa6a
--- /dev/null
+++ b/spec/models/clusters/agents/activity_event_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Clusters::Agents::ActivityEvent do
+ it { is_expected.to belong_to(:agent).class_name('Clusters::Agent').required }
+ it { is_expected.to belong_to(:user).optional }
+ it { is_expected.to belong_to(:agent_token).class_name('Clusters::AgentToken').optional }
+
+ it { is_expected.to validate_presence_of(:kind) }
+ it { is_expected.to validate_presence_of(:level) }
+ it { is_expected.to validate_presence_of(:recorded_at) }
+ it { is_expected.to nullify_if_blank(:detail) }
+
+ describe 'scopes' do
+ let_it_be(:agent) { create(:cluster_agent) }
+
+ describe '.in_timeline_order' do
+ let(:recorded_at) { 1.hour.ago }
+
+ let!(:event1) { create(:agent_activity_event, agent: agent, recorded_at: recorded_at) }
+ let!(:event2) { create(:agent_activity_event, agent: agent, recorded_at: Time.current) }
+ let!(:event3) { create(:agent_activity_event, agent: agent, recorded_at: recorded_at) }
+
+ subject { described_class.in_timeline_order }
+
+ it 'sorts by recorded_at: :desc, id: :desc' do
+ is_expected.to eq([event2, event3, event1])
+ end
+ end
+ end
+end
diff --git a/spec/models/clusters/applications/runner_spec.rb b/spec/models/clusters/applications/runner_spec.rb
index 806c60d5aff..434d7ad4a90 100644
--- a/spec/models/clusters/applications/runner_spec.rb
+++ b/spec/models/clusters/applications/runner_spec.rb
@@ -69,66 +69,9 @@ RSpec.describe Clusters::Applications::Runner do
expect(values).to include('privileged: true')
expect(values).to include('image: ubuntu:16.04')
expect(values).to include('resources')
- expect(values).to match(/runnerToken: ['"]?#{Regexp.escape(ci_runner.token)}/)
expect(values).to match(/gitlabUrl: ['"]?#{Regexp.escape(Gitlab::Routing.url_helpers.root_url)}/)
end
- context 'without a runner' do
- let(:application) { create(:clusters_applications_runner, runner: nil, cluster: cluster) }
- let(:runner) { application.runner }
-
- shared_examples 'runner creation' do
- it 'creates a runner' do
- expect { subject }.to change { Ci::Runner.count }.by(1)
- end
-
- it 'uses the new runner token' do
- expect(values).to match(/runnerToken: '?#{Regexp.escape(runner.token)}/)
- end
- end
-
- context 'project cluster' do
- let(:project) { create(:project) }
- let(:cluster) { create(:cluster, :with_installed_helm, projects: [project]) }
-
- include_examples 'runner creation'
-
- it 'creates a project runner' do
- subject
-
- runner_projects = Project.where(id: runner.runner_projects.pluck(:project_id))
- expect(runner).to be_project_type
- expect(runner_projects).to match_array [project]
- end
- end
-
- context 'group cluster' do
- let(:group) { create(:group) }
- let(:cluster) { create(:cluster, :with_installed_helm, cluster_type: :group_type, groups: [group]) }
-
- include_examples 'runner creation'
-
- it 'creates a group runner' do
- subject
-
- expect(runner).to be_group_type
- expect(runner.runner_namespaces.pluck(:namespace_id)).to match_array [group.id]
- end
- end
-
- context 'instance cluster' do
- let(:cluster) { create(:cluster, :with_installed_helm, :instance) }
-
- include_examples 'runner creation'
-
- it 'creates an instance runner' do
- subject
-
- expect(runner).to be_instance_type
- end
- end
- end
-
context 'with duplicated values on vendor/runner/values.yaml' do
let(:stub_values) do
{
diff --git a/spec/models/clusters/platforms/kubernetes_spec.rb b/spec/models/clusters/platforms/kubernetes_spec.rb
index a4cae93ff84..b298bf2c8bb 100644
--- a/spec/models/clusters/platforms/kubernetes_spec.rb
+++ b/spec/models/clusters/platforms/kubernetes_spec.rb
@@ -201,7 +201,7 @@ RSpec.describe Clusters::Platforms::Kubernetes do
it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::KubeClient) }
context 'ca_pem is a single certificate' do
- let(:ca_pem) { File.read(Rails.root.join('spec/fixtures/clusters/ca_certificate.pem')) }
+ let(:ca_pem) { File.read(Rails.root.join('spec/fixtures/clusters/root_certificate.pem')) }
let(:kubernetes) do
build(:cluster_platform_kubernetes,
:configured,
@@ -228,21 +228,22 @@ RSpec.describe Clusters::Platforms::Kubernetes do
ca_pem: cert_chain)
end
- it 'includes chain of certificates' do
- cert1_file = File.read(Rails.root.join('spec/fixtures/clusters/root_certificate.pem'))
- cert1 = OpenSSL::X509::Certificate.new(cert1_file)
-
- cert2_file = File.read(Rails.root.join('spec/fixtures/clusters/intermediate_certificate.pem'))
- cert2 = OpenSSL::X509::Certificate.new(cert2_file)
-
- cert3_file = File.read(Rails.root.join('spec/fixtures/clusters/ca_certificate.pem'))
- cert3 = OpenSSL::X509::Certificate.new(cert3_file)
+ where(:fixture_path) do
+ %w[
+ spec/fixtures/clusters/root_certificate.pem
+ spec/fixtures/clusters/intermediate_certificate.pem
+ spec/fixtures/clusters/leaf_certificate.pem
+ ]
+ end
- cert_store = kubernetes.kubeclient.kubeclient_options[:ssl_options][:cert_store]
+ with_them do
+ it 'includes chain of certificates' do
+ cert_store = kubernetes.kubeclient.kubeclient_options[:ssl_options][:cert_store]
+ cert_file = File.read(Rails.root.join(fixture_path))
+ certificate = OpenSSL::X509::Certificate.new(cert_file)
- expect(cert_store.verify(cert1)).to be true
- expect(cert_store.verify(cert2)).to be true
- expect(cert_store.verify(cert3)).to be true
+ expect(cert_store.verify(certificate)).to be true
+ end
end
end
end
diff --git a/spec/models/gpg_signature_spec.rb b/spec/models/commit_signatures/gpg_signature_spec.rb
index 7a1799c670e..9646e974f40 100644
--- a/spec/models/gpg_signature_spec.rb
+++ b/spec/models/commit_signatures/gpg_signature_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GpgSignature do
+RSpec.describe CommitSignatures::GpgSignature do
let(:commit_sha) { '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33' }
let!(:project) { create(:project, :repository, path: 'sample-project') }
let!(:commit) { create(:commit, project: project, sha: commit_sha) }
@@ -13,7 +13,7 @@ RSpec.describe GpgSignature do
it_behaves_like 'having unique enum values'
describe 'associations' do
- it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:project).required }
it { is_expected.to belong_to(:gpg_key) }
it { is_expected.to belong_to(:gpg_key_subkey) }
end
diff --git a/spec/models/x509_commit_signature_spec.rb b/spec/models/commit_signatures/x509_commit_signature_spec.rb
index 2efb77c96ad..076f209e1b7 100644
--- a/spec/models/x509_commit_signature_spec.rb
+++ b/spec/models/commit_signatures/x509_commit_signature_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe X509CommitSignature do
+RSpec.describe CommitSignatures::X509CommitSignature do
let(:commit_sha) { '189a6c924013fc3fe40d6f1ec1dc20214183bc97' }
let(:project) { create(:project, :public, :repository) }
let!(:commit) { create(:commit, project: project, sha: commit_sha) }
diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb
index ac0ae17f8f7..2176eea75bc 100644
--- a/spec/models/commit_spec.rb
+++ b/spec/models/commit_spec.rb
@@ -676,68 +676,18 @@ eos
describe '.diff_max_files' do
subject(:diff_max_files) { described_class.diff_max_files }
- let(:increased_diff_limits) { false }
- let(:configurable_diff_limits) { false }
-
- before do
- stub_feature_flags(increased_diff_limits: increased_diff_limits, configurable_diff_limits: configurable_diff_limits)
- end
-
- context 'when increased_diff_limits is enabled' do
- let(:increased_diff_limits) { true }
-
- it 'returns 3000' do
- expect(diff_max_files).to eq(3000)
- end
- end
-
- context 'when configurable_diff_limits is enabled' do
- let(:configurable_diff_limits) { true }
-
- it 'returns the current settings' do
- Gitlab::CurrentSettings.update!(diff_max_files: 1234)
- expect(diff_max_files).to eq(1234)
- end
- end
-
- context 'when neither feature flag is enabled' do
- it 'returns 1000' do
- expect(diff_max_files).to eq(1000)
- end
+ it 'returns the current settings' do
+ Gitlab::CurrentSettings.update!(diff_max_files: 1234)
+ expect(diff_max_files).to eq(1234)
end
end
describe '.diff_max_lines' do
subject(:diff_max_lines) { described_class.diff_max_lines }
- let(:increased_diff_limits) { false }
- let(:configurable_diff_limits) { false }
-
- before do
- stub_feature_flags(increased_diff_limits: increased_diff_limits, configurable_diff_limits: configurable_diff_limits)
- end
-
- context 'when increased_diff_limits is enabled' do
- let(:increased_diff_limits) { true }
-
- it 'returns 100000' do
- expect(diff_max_lines).to eq(100000)
- end
- end
-
- context 'when configurable_diff_limits is enabled' do
- let(:configurable_diff_limits) { true }
-
- it 'returns the current settings' do
- Gitlab::CurrentSettings.update!(diff_max_lines: 65321)
- expect(diff_max_lines).to eq(65321)
- end
- end
-
- context 'when neither feature flag is enabled' do
- it 'returns 50000' do
- expect(diff_max_lines).to eq(50000)
- end
+ it 'returns the current settings' do
+ Gitlab::CurrentSettings.update!(diff_max_lines: 65321)
+ expect(diff_max_lines).to eq(65321)
end
end
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index 59d14574c02..665a2a936af 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -46,10 +46,28 @@ RSpec.describe CommitStatus do
describe 'status state machine' do
let!(:commit_status) { create(:commit_status, :running, project: project) }
- it 'invalidates the cache after a transition' do
- expect(ExpireJobCacheWorker).to receive(:perform_async).with(commit_status.id)
+ context 'when expire_job_and_pipeline_cache_synchronously is enabled' do
+ before do
+ stub_feature_flags(expire_job_and_pipeline_cache_synchronously: true)
+ end
+
+ it 'invalidates the cache after a transition' do
+ expect(commit_status).to receive(:expire_etag_cache!)
- commit_status.success!
+ commit_status.success!
+ end
+ end
+
+ context 'when expire_job_and_pipeline_cache_synchronously is disabled' do
+ before do
+ stub_feature_flags(expire_job_and_pipeline_cache_synchronously: false)
+ end
+
+ it 'invalidates the cache after a transition' do
+ expect(ExpireJobCacheWorker).to receive(:perform_async).with(commit_status.id)
+
+ commit_status.success!
+ end
end
describe 'transitioning to running' do
@@ -97,32 +115,6 @@ RSpec.describe CommitStatus do
end
end
- describe '.updated_before' do
- let!(:lookback) { 5.days.ago }
- let!(:timeout) { 1.day.ago }
- let!(:before_lookback) { lookback - 1.hour }
- let!(:after_lookback) { lookback + 1.hour }
- let!(:before_timeout) { timeout - 1.hour }
- let!(:after_timeout) { timeout + 1.hour }
-
- subject { described_class.updated_before(lookback: lookback, timeout: timeout) }
-
- def create_build_with_set_timestamps(created_at:, updated_at:)
- travel_to(created_at) { create(:ci_build, created_at: Time.current) }.tap do |build|
- travel_to(updated_at) { build.update!(status: :failed) }
- end
- end
-
- it 'finds builds updated and created in the window between lookback and timeout' do
- build_in_lookback_timeout_window = create_build_with_set_timestamps(created_at: after_lookback, updated_at: before_timeout)
- build_outside_lookback_window = create_build_with_set_timestamps(created_at: before_lookback, updated_at: before_timeout)
- build_outside_timeout_window = create_build_with_set_timestamps(created_at: after_lookback, updated_at: after_timeout)
-
- expect(subject).to contain_exactly(build_in_lookback_timeout_window)
- expect(subject).not_to include(build_outside_lookback_window, build_outside_timeout_window)
- end
- end
-
describe '.scheduled_at_before' do
let!(:never_scheduled) { create(:commit_status) }
let!(:stale_scheduled) { create(:commit_status, scheduled_at: 1.day.ago) }
@@ -773,6 +765,14 @@ RSpec.describe CommitStatus do
it_behaves_like 'incrementing failure reason counter'
end
+
+ context 'when status is manual' do
+ let(:commit_status) { create(:commit_status, :manual) }
+
+ it 'is able to be dropped' do
+ expect { commit_status.drop! }.to change { commit_status.status }.from('manual').to('failed')
+ end
+ end
end
describe 'ensure stage assignment' do
@@ -958,4 +958,32 @@ RSpec.describe CommitStatus do
expect(build_from_other_pipeline.reload).to have_attributes(retried: false, processed: false)
end
end
+
+ describe '.bulk_insert_tags!' do
+ let(:statuses) { double('statuses') }
+ let(:tag_list_by_build) { double('tag list') }
+ let(:inserter) { double('inserter') }
+
+ it 'delegates to bulk insert class' do
+ expect(Gitlab::Ci::Tags::BulkInsert)
+ .to receive(:new)
+ .with(statuses, tag_list_by_build)
+ .and_return(inserter)
+
+ expect(inserter).to receive(:insert!)
+
+ described_class.bulk_insert_tags!(statuses, tag_list_by_build)
+ end
+ end
+
+ describe '#expire_etag_cache!' do
+ it 'expires the etag cache' do
+ expect_next_instance_of(Gitlab::EtagCaching::Store) do |etag_store|
+ job_path = Gitlab::Routing.url_helpers.project_build_path(project, commit_status.id, format: :json)
+ expect(etag_store).to receive(:touch).with(job_path)
+ end
+
+ commit_status.expire_etag_cache!
+ end
+ end
end
diff --git a/spec/models/concerns/after_commit_queue_spec.rb b/spec/models/concerns/after_commit_queue_spec.rb
new file mode 100644
index 00000000000..40cddde333e
--- /dev/null
+++ b/spec/models/concerns/after_commit_queue_spec.rb
@@ -0,0 +1,128 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AfterCommitQueue do
+ describe '#run_after_commit' do
+ it 'runs after record is saved' do
+ called = false
+ test_proc = proc { called = true }
+
+ project = build(:project)
+ project.run_after_commit(&test_proc)
+
+ expect(called).to be false
+
+ # save! is run in its own transaction
+ project.save!
+
+ expect(called).to be true
+ end
+
+ it 'runs after transaction is committed' do
+ called = false
+ test_proc = proc { called = true }
+
+ project = build(:project)
+
+ Project.transaction do
+ project.run_after_commit(&test_proc)
+
+ project.save!
+
+ expect(called).to be false
+ end
+
+ expect(called).to be true
+ end
+ end
+
+ describe '#run_after_commit_or_now' do
+ it 'runs immediately if not within a transction' do
+ called = false
+ test_proc = proc { called = true }
+
+ project = build(:project)
+
+ project.run_after_commit_or_now(&test_proc)
+
+ expect(called).to be true
+ end
+
+ it 'runs after transaction has completed' do
+ called = false
+ test_proc = proc { called = true }
+
+ project = build(:project)
+
+ Project.transaction do
+ # Add this record to the current transaction so that after commit hooks
+ # are called
+ Project.connection.add_transaction_record(project)
+
+ project.run_after_commit_or_now(&test_proc)
+
+ project.save!
+
+ expect(called).to be false
+ end
+
+ expect(called).to be true
+ end
+
+ context 'multiple databases - Ci::ApplicationRecord models' do
+ before do
+ skip_if_multiple_databases_not_setup
+
+ table_sql = <<~SQL
+ CREATE TABLE _test_ci_after_commit_queue (
+ id serial NOT NULL PRIMARY KEY);
+ SQL
+
+ ::Ci::ApplicationRecord.connection.execute(table_sql)
+ end
+
+ let(:ci_klass) do
+ Class.new(Ci::ApplicationRecord) do
+ self.table_name = '_test_ci_after_commit_queue'
+
+ include AfterCommitQueue
+
+ def self.name
+ 'TestCiAfterCommitQueue'
+ end
+ end
+ end
+
+ let(:ci_record) { ci_klass.new }
+
+ it 'runs immediately if not within a transaction' do
+ called = false
+ test_proc = proc { called = true }
+
+ ci_record.run_after_commit_or_now(&test_proc)
+
+ expect(called).to be true
+ end
+
+ it 'runs after transaction has completed' do
+ called = false
+ test_proc = proc { called = true }
+
+ Ci::ApplicationRecord.transaction do
+ # Add this record to the current transaction so that after commit hooks
+ # are called
+ Ci::ApplicationRecord.connection.add_transaction_record(ci_record)
+
+ ci_record.run_after_commit_or_now(&test_proc)
+
+ ci_record.save!
+
+ expect(called).to be false
+ end
+
+ expect(called).to be true
+ end
+ end
+ end
+end
diff --git a/spec/models/concerns/case_sensitivity_spec.rb b/spec/models/concerns/case_sensitivity_spec.rb
index 269f9577267..6e624c687c4 100644
--- a/spec/models/concerns/case_sensitivity_spec.rb
+++ b/spec/models/concerns/case_sensitivity_spec.rb
@@ -9,11 +9,12 @@ RSpec.describe CaseSensitivity do
Class.new(ActiveRecord::Base) do
include CaseSensitivity
self.table_name = 'namespaces'
+ self.inheritance_column = :_type_disabled
end
end
- let_it_be(:model_1) { model.create!(path: 'mOdEl-1', name: 'mOdEl 1') }
- let_it_be(:model_2) { model.create!(path: 'mOdEl-2', name: 'mOdEl 2') }
+ let_it_be(:model_1) { model.create!(path: 'mOdEl-1', name: 'mOdEl 1', type: Namespaces::UserNamespace.sti_name) }
+ let_it_be(:model_2) { model.create!(path: 'mOdEl-2', name: 'mOdEl 2', type: Group.sti_name) }
it 'finds a single instance by a single attribute regardless of case' do
expect(model.iwhere(path: 'MODEL-1')).to contain_exactly(model_1)
diff --git a/spec/models/concerns/group_descendant_spec.rb b/spec/models/concerns/group_descendant_spec.rb
index b29fa910ee6..d593d829dca 100644
--- a/spec/models/concerns/group_descendant_spec.rb
+++ b/spec/models/concerns/group_descendant_spec.rb
@@ -19,14 +19,16 @@ RSpec.describe GroupDescendant do
query_count = ActiveRecord::QueryRecorder.new { test_group.hierarchy }.count
- expect(query_count).to eq(1)
+ # use_traversal_ids_for_ancestors_upto actor based feature flag check adds an extra query.
+ expect(query_count).to eq(2)
end
it 'only queries once for the ancestors when a top is given' do
test_group = create(:group, parent: subsub_group).reload
recorder = ActiveRecord::QueryRecorder.new { test_group.hierarchy(subgroup) }
- expect(recorder.count).to eq(1)
+ # use_traversal_ids_for_ancestors_upto actor based feature flag check adds an extra query.
+ expect(recorder.count).to eq(2)
end
it 'builds a hierarchy for a group' do
diff --git a/spec/models/concerns/loose_foreign_key_spec.rb b/spec/models/concerns/loose_foreign_key_spec.rb
deleted file mode 100644
index 42da69eb75e..00000000000
--- a/spec/models/concerns/loose_foreign_key_spec.rb
+++ /dev/null
@@ -1,66 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe LooseForeignKey do
- let(:project_klass) do
- Class.new(ApplicationRecord) do
- include LooseForeignKey
-
- self.table_name = 'projects'
-
- loose_foreign_key :issues, :project_id, on_delete: :async_delete
- loose_foreign_key 'merge_requests', 'project_id', 'on_delete' => 'async_nullify'
- end
- end
-
- it 'exposes the loose foreign key definitions' do
- definitions = project_klass.loose_foreign_key_definitions
-
- tables = definitions.map(&:to_table)
- expect(tables).to eq(%w[issues merge_requests])
- end
-
- it 'casts strings to symbol' do
- definition = project_klass.loose_foreign_key_definitions.last
-
- expect(definition.from_table).to eq('projects')
- expect(definition.to_table).to eq('merge_requests')
- expect(definition.column).to eq('project_id')
- expect(definition.on_delete).to eq(:async_nullify)
- end
-
- context 'validation' do
- context 'on_delete validation' do
- let(:invalid_class) do
- Class.new(ApplicationRecord) do
- include LooseForeignKey
-
- self.table_name = 'projects'
-
- loose_foreign_key :issues, :project_id, on_delete: :async_delete
- loose_foreign_key :merge_requests, :project_id, on_delete: :async_nullify
- loose_foreign_key :merge_requests, :project_id, on_delete: :destroy
- end
- end
-
- it 'raises error when invalid `on_delete` option was given' do
- expect { invalid_class }.to raise_error /Invalid on_delete option given: destroy/
- end
- end
-
- context 'inheritance validation' do
- let(:inherited_project_class) do
- Class.new(Project) do
- include LooseForeignKey
-
- loose_foreign_key :issues, :project_id, on_delete: :async_delete
- end
- end
-
- it 'raises error when loose_foreign_key is defined in a child ActiveRecord model' do
- expect { inherited_project_class }.to raise_error /Please define the loose_foreign_key on the Project class/
- end
- end
- end
-end
diff --git a/spec/models/concerns/participable_spec.rb b/spec/models/concerns/participable_spec.rb
index 903c7ae16b6..50cf7377b99 100644
--- a/spec/models/concerns/participable_spec.rb
+++ b/spec/models/concerns/participable_spec.rb
@@ -51,7 +51,9 @@ RSpec.describe Participable do
end
it 'supports attributes returning another Participable' do
- other_model = Class.new { include Participable }
+ other_model = Class.new do
+ include Participable
+ end
other_model.participant(:bar)
model.participant(:foo)
@@ -115,6 +117,76 @@ RSpec.describe Participable do
end
end
+ describe '#visible_participants' do
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(anything, :read_class, anything) { readable }
+ end
+
+ let(:readable) { true }
+
+ it 'returns the list of participants' do
+ model.participant(:foo)
+ model.participant(:bar)
+
+ user1 = build(:user)
+ user2 = build(:user)
+ user3 = build(:user)
+ project = build(:project, :public)
+ instance = model.new
+
+ allow(instance).to receive_message_chain(:model_name, :element) { 'class' }
+ expect(instance).to receive(:foo).and_return(user2)
+ expect(instance).to receive(:bar).and_return(user3)
+ expect(instance).to receive(:project).thrice.and_return(project)
+
+ participants = instance.visible_participants(user1)
+
+ expect(participants).to include(user2)
+ expect(participants).to include(user3)
+ end
+
+ context 'when Participable is not readable by the user' do
+ let(:readable) { false }
+
+ it 'does not return unavailable participants' do
+ model.participant(:bar)
+
+ instance = model.new
+ user1 = build(:user)
+ user2 = build(:user)
+ project = build(:project, :public)
+
+ allow(instance).to receive_message_chain(:model_name, :element) { 'class' }
+ allow(instance).to receive(:bar).and_return(user2)
+ expect(instance).to receive(:project).thrice.and_return(project)
+
+ expect(instance.visible_participants(user1)).to be_empty
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(verify_participants_access: false)
+ end
+
+ it 'returns unavailable participants' do
+ model.participant(:bar)
+
+ instance = model.new
+ user1 = build(:user)
+ user2 = build(:user)
+ project = build(:project, :public)
+
+ allow(instance).to receive_message_chain(:model_name, :element) { 'class' }
+ allow(instance).to receive(:bar).and_return(user2)
+ expect(instance).to receive(:project).thrice.and_return(project)
+
+ expect(instance.visible_participants(user1)).to match_array([user2])
+ end
+ end
+ end
+ end
+
describe '#participant?' do
let(:instance) { model.new }
diff --git a/spec/models/concerns/routable_spec.rb b/spec/models/concerns/routable_spec.rb
index 0a433a8cf4f..2330147b376 100644
--- a/spec/models/concerns/routable_spec.rb
+++ b/spec/models/concerns/routable_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.shared_examples '.find_by_full_path' do
+RSpec.shared_examples 'routable resource' do
describe '.find_by_full_path', :aggregate_failures do
it 'finds records by their full path' do
expect(described_class.find_by_full_path(record.full_path)).to eq(record)
@@ -52,13 +52,27 @@ RSpec.shared_examples '.find_by_full_path' do
end
end
-RSpec.describe Routable do
- it_behaves_like '.find_by_full_path' do
- let_it_be(:record) { create(:group) }
+RSpec.shared_examples 'routable resource with parent' do
+ it_behaves_like 'routable resource'
+
+ describe '#full_path' do
+ it { expect(record.full_path).to eq "#{record.parent.full_path}/#{record.path}" }
+
+ it 'hits the cache when not preloaded' do
+ forcibly_hit_cached_lookup(record, :full_path)
+
+ expect(record.full_path).to eq("#{record.parent.full_path}/#{record.path}")
+ end
end
- it_behaves_like '.find_by_full_path' do
- let_it_be(:record) { create(:project) }
+ describe '#full_name' do
+ it { expect(record.full_name).to eq "#{record.parent.human_name} / #{record.name}" }
+
+ it 'hits the cache when not preloaded' do
+ forcibly_hit_cached_lookup(record, :full_name)
+
+ expect(record.full_name).to eq("#{record.parent.human_name} / #{record.name}")
+ end
end
end
@@ -66,6 +80,14 @@ RSpec.describe Group, 'Routable', :with_clean_rails_cache do
let_it_be_with_reload(:group) { create(:group, name: 'foo') }
let_it_be(:nested_group) { create(:group, parent: group) }
+ it_behaves_like 'routable resource' do
+ let_it_be(:record) { group }
+ end
+
+ it_behaves_like 'routable resource with parent' do
+ let_it_be(:record) { nested_group }
+ end
+
describe 'Validations' do
it { is_expected.to validate_presence_of(:route) }
end
@@ -119,24 +141,6 @@ RSpec.describe Group, 'Routable', :with_clean_rails_cache do
end
end
- describe '.find_by_full_path' do
- it_behaves_like '.find_by_full_path' do
- let_it_be(:record) { group }
- end
-
- it_behaves_like '.find_by_full_path' do
- let_it_be(:record) { nested_group }
- end
-
- it 'does not find projects with a matching path' do
- project = create(:project)
- redirect_route = create(:redirect_route, source: project)
-
- expect(described_class.find_by_full_path(project.full_path)).to be_nil
- expect(described_class.find_by_full_path(redirect_route.path, follow_redirects: true)).to be_nil
- end
- end
-
describe '.where_full_path_in' do
context 'without any paths' do
it 'returns an empty relation' do
@@ -195,64 +199,39 @@ RSpec.describe Group, 'Routable', :with_clean_rails_cache do
expect(group.route_loaded?).to be_truthy
end
end
-
- describe '#full_path' do
- it { expect(group.full_path).to eq(group.path) }
- it { expect(nested_group.full_path).to eq("#{group.full_path}/#{nested_group.path}") }
-
- it 'hits the cache when not preloaded' do
- forcibly_hit_cached_lookup(nested_group, :full_path)
-
- expect(nested_group.full_path).to eq("#{group.full_path}/#{nested_group.path}")
- end
- end
-
- describe '#full_name' do
- it { expect(group.full_name).to eq(group.name) }
- it { expect(nested_group.full_name).to eq("#{group.name} / #{nested_group.name}") }
-
- it 'hits the cache when not preloaded' do
- forcibly_hit_cached_lookup(nested_group, :full_name)
-
- expect(nested_group.full_name).to eq("#{group.name} / #{nested_group.name}")
- end
- end
end
RSpec.describe Project, 'Routable', :with_clean_rails_cache do
let_it_be(:namespace) { create(:namespace) }
let_it_be(:project) { create(:project, namespace: namespace) }
- it_behaves_like '.find_by_full_path' do
+ it_behaves_like 'routable resource with parent' do
let_it_be(:record) { project }
end
+end
- it 'does not find groups with a matching path' do
- group = create(:group)
- redirect_route = create(:redirect_route, source: group)
-
- expect(described_class.find_by_full_path(group.full_path)).to be_nil
- expect(described_class.find_by_full_path(redirect_route.path, follow_redirects: true)).to be_nil
- end
-
- describe '#full_path' do
- it { expect(project.full_path).to eq "#{namespace.full_path}/#{project.path}" }
-
- it 'hits the cache when not preloaded' do
- forcibly_hit_cached_lookup(project, :full_path)
-
- expect(project.full_path).to eq("#{namespace.full_path}/#{project.path}")
+RSpec.describe Namespaces::ProjectNamespace, 'Routable', :with_clean_rails_cache do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project_namespace) do
+ # For now we create only project namespace w/o project, otherwise same path
+ # would be used for project and project namespace.
+ # This can be removed when route is created automatically for project namespaces.
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/346448
+ create(:project_namespace, project: nil, parent: group,
+ visibility_level: Gitlab::VisibilityLevel::PUBLIC,
+ path: 'foo', name: 'foo').tap do |project_namespace|
+ Route.create!(source: project_namespace, path: project_namespace.full_path,
+ name: project_namespace.full_name)
end
end
- describe '#full_name' do
- it { expect(project.full_name).to eq "#{namespace.human_name} / #{project.name}" }
-
- it 'hits the cache when not preloaded' do
- forcibly_hit_cached_lookup(project, :full_name)
+ # we have couple of places where we use generic Namespace, in that case
+ # we don't want to include ProjectNamespace routes yet
+ it 'ignores project namespace when searching for generic namespace' do
+ redirect_route = create(:redirect_route, source: project_namespace)
- expect(project.full_name).to eq("#{namespace.human_name} / #{project.name}")
- end
+ expect(Namespace.find_by_full_path(project_namespace.full_path)).to be_nil
+ expect(Namespace.find_by_full_path(redirect_route.path, follow_redirects: true)).to be_nil
end
end
diff --git a/spec/models/concerns/sha_attribute_spec.rb b/spec/models/concerns/sha_attribute_spec.rb
index 220eadfab92..1bcf3dc8b61 100644
--- a/spec/models/concerns/sha_attribute_spec.rb
+++ b/spec/models/concerns/sha_attribute_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe ShaAttribute do
- let(:model) { Class.new(ApplicationRecord) { include ShaAttribute } }
+ let(:model) { Class.new(ActiveRecord::Base) { include ShaAttribute } }
before do
columns = [
diff --git a/spec/models/concerns/transactions_spec.rb b/spec/models/concerns/transactions_spec.rb
new file mode 100644
index 00000000000..404a33196e6
--- /dev/null
+++ b/spec/models/concerns/transactions_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Transactions do
+ let(:model) { build(:project) }
+
+ it 'is not in a transaction' do
+ expect(model.class).not_to be_inside_transaction
+ end
+
+ it 'is in a transaction', :aggregate_failures do
+ Project.transaction do
+ expect(model.class).to be_inside_transaction
+ end
+
+ ApplicationRecord.transaction do
+ expect(model.class).to be_inside_transaction
+ end
+ end
+end
diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb
index 846dfb30928..51fdbfebd3a 100644
--- a/spec/models/container_repository_spec.rb
+++ b/spec/models/container_repository_spec.rb
@@ -223,9 +223,9 @@ RSpec.describe ContainerRepository do
end
end
- describe '.create_from_path!' do
+ describe '.find_or_create_from_path' do
let(:repository) do
- described_class.create_from_path!(ContainerRegistry::Path.new(path))
+ described_class.find_or_create_from_path(ContainerRegistry::Path.new(path))
end
let(:repository_path) { ContainerRegistry::Path.new(path) }
@@ -291,6 +291,35 @@ RSpec.describe ContainerRepository do
expect(repository.id).to eq(container_repository.id)
end
end
+
+ context 'when many of the same repository are created at the same time' do
+ let(:path) { ContainerRegistry::Path.new(project.full_path + '/some/image') }
+
+ it 'does not throw validation errors and only creates one repository' do
+ expect { repository_creation_race(path) }.to change { ContainerRepository.count }.by(1)
+ end
+
+ it 'retrieves a persisted repository for all concurrent calls' do
+ repositories = repository_creation_race(path).map(&:value)
+
+ expect(repositories).to all(be_persisted)
+ end
+ end
+
+ def repository_creation_race(path)
+ # create a race condition - structure from https://blog.arkency.com/2015/09/testing-race-conditions/
+ wait_for_it = true
+
+ threads = Array.new(10) do |i|
+ Thread.new do
+ true while wait_for_it
+
+ ::ContainerRepository.find_or_create_from_path(path)
+ end
+ end
+ wait_for_it = false
+ threads.each(&:join)
+ end
end
describe '.build_root_repository' do
diff --git a/spec/models/customer_relations/contact_spec.rb b/spec/models/customer_relations/contact_spec.rb
index 3a2d4e2d0ca..7e26d324ac2 100644
--- a/spec/models/customer_relations/contact_spec.rb
+++ b/spec/models/customer_relations/contact_spec.rb
@@ -36,4 +36,27 @@ RSpec.describe CustomerRelations::Contact, type: :model do
expect(contact.phone).to eq('123456')
end
end
+
+ describe '#self.find_ids_by_emails' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:group_contacts) { create_list(:contact, 2, group: group) }
+ let_it_be(:other_contacts) { create_list(:contact, 2) }
+
+ it 'returns ids of contacts from group' do
+ contact_ids = described_class.find_ids_by_emails(group.id, group_contacts.pluck(:email))
+
+ expect(contact_ids).to match_array(group_contacts.pluck(:id))
+ end
+
+ it 'does not return ids of contacts from other groups' do
+ contact_ids = described_class.find_ids_by_emails(group.id, other_contacts.pluck(:email))
+
+ expect(contact_ids).to be_empty
+ end
+
+ it 'raises ArgumentError when called with too many emails' do
+ too_many_emails = described_class::MAX_PLUCK + 1
+ expect { described_class.find_ids_by_emails(group.id, Array(0..too_many_emails)) }.to raise_error(ArgumentError)
+ end
+ end
end
diff --git a/spec/models/customer_relations/issue_contact_spec.rb b/spec/models/customer_relations/issue_contact_spec.rb
index 3747d159833..474455a9884 100644
--- a/spec/models/customer_relations/issue_contact_spec.rb
+++ b/spec/models/customer_relations/issue_contact_spec.rb
@@ -4,6 +4,9 @@ require 'spec_helper'
RSpec.describe CustomerRelations::IssueContact do
let_it_be(:issue_contact, reload: true) { create(:issue_customer_relations_contact) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:issue) { create(:issue, project: project) }
subject { issue_contact }
@@ -19,9 +22,6 @@ RSpec.describe CustomerRelations::IssueContact do
let(:stubbed) { build_stubbed(:issue_customer_relations_contact) }
let(:created) { create(:issue_customer_relations_contact) }
- let(:group) { build(:group) }
- let(:project) { build(:project, group: group) }
- let(:issue) { build(:issue, project: project) }
let(:contact) { build(:contact, group: group) }
let(:for_issue) { build(:issue_customer_relations_contact, :for_issue, issue: issue) }
let(:for_contact) { build(:issue_customer_relations_contact, :for_contact, contact: contact) }
@@ -45,4 +45,26 @@ RSpec.describe CustomerRelations::IssueContact do
expect(built).not_to be_valid
end
end
+
+ describe '#self.find_contact_ids_by_emails' do
+ let_it_be(:for_issue) { create_list(:issue_customer_relations_contact, 2, :for_issue, issue: issue) }
+ let_it_be(:not_for_issue) { create_list(:issue_customer_relations_contact, 2) }
+
+ it 'returns ids of contacts from issue' do
+ contact_ids = described_class.find_contact_ids_by_emails(issue.id, for_issue.map(&:contact).pluck(:email))
+
+ expect(contact_ids).to match_array(for_issue.pluck(:contact_id))
+ end
+
+ it 'does not return ids of contacts from other issues' do
+ contact_ids = described_class.find_contact_ids_by_emails(issue.id, not_for_issue.map(&:contact).pluck(:email))
+
+ expect(contact_ids).to be_empty
+ end
+
+ it 'raises ArgumentError when called with too many emails' do
+ too_many_emails = described_class::MAX_PLUCK + 1
+ expect { described_class.find_contact_ids_by_emails(issue.id, Array(0..too_many_emails)) }.to raise_error(ArgumentError)
+ end
+ end
end
diff --git a/spec/models/deployment_metrics_spec.rb b/spec/models/deployment_metrics_spec.rb
index c804e20d66d..fe9218a9ae2 100644
--- a/spec/models/deployment_metrics_spec.rb
+++ b/spec/models/deployment_metrics_spec.rb
@@ -111,7 +111,7 @@ RSpec.describe DeploymentMetrics do
}
end
- let(:prometheus_adapter) { instance_double('prometheus_adapter', can_query?: true, configured?: true) }
+ let(:prometheus_adapter) { instance_double(::Integrations::Prometheus, can_query?: true, configured?: true) }
before do
allow(deployment_metrics).to receive(:prometheus_adapter).and_return(prometheus_adapter)
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index 51e1e63da8d..29b37ef7371 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -268,31 +268,69 @@ RSpec.describe Deployment do
end
end
+ context 'when deployment is blocked' do
+ let(:deployment) { create(:deployment, :created) }
+
+ it 'has correct status' do
+ deployment.block!
+
+ expect(deployment).to be_blocked
+ expect(deployment.finished_at).to be_nil
+ end
+
+ it 'does not execute Deployments::LinkMergeRequestWorker asynchronously' do
+ expect(Deployments::LinkMergeRequestWorker).not_to receive(:perform_async)
+
+ deployment.block!
+ end
+
+ it 'does not execute Deployments::HooksWorker' do
+ expect(Deployments::HooksWorker).not_to receive(:perform_async)
+
+ deployment.block!
+ end
+ end
+
describe 'synching status to Jira' do
- let(:deployment) { create(:deployment) }
+ let_it_be(:project) { create(:project, :repository) }
+ let(:deployment) { create(:deployment, project: project) }
let(:worker) { ::JiraConnect::SyncDeploymentsWorker }
- it 'calls the worker on creation' do
- expect(worker).to receive(:perform_async).with(Integer)
+ context 'when Jira Connect subscription does not exist' do
+ it 'does not call the worker' do
+ expect(worker).not_to receive(:perform_async)
- deployment
+ deployment
+ end
end
- it 'does not call the worker for skipped deployments' do
- expect(deployment).to be_present # warm-up, ignore the creation trigger
+ context 'when Jira Connect subscription exists' do
+ before_all do
+ create(:jira_connect_subscription, namespace: project.namespace)
+ end
- expect(worker).not_to receive(:perform_async)
+ it 'calls the worker on creation' do
+ expect(worker).to receive(:perform_async).with(Integer)
- deployment.skip!
- end
+ deployment
+ end
+
+ it 'does not call the worker for skipped deployments' do
+ expect(deployment).to be_present # warm-up, ignore the creation trigger
+
+ expect(worker).not_to receive(:perform_async)
+
+ deployment.skip!
+ end
- %i[run! succeed! drop! cancel!].each do |event|
- context "when we call pipeline.#{event}" do
- it 'triggers a Jira synch worker' do
- expect(worker).to receive(:perform_async).with(deployment.id)
+ %i[run! succeed! drop! cancel!].each do |event|
+ context "when we call pipeline.#{event}" do
+ it 'triggers a Jira synch worker' do
+ expect(worker).to receive(:perform_async).with(deployment.id)
- deployment.send(event)
+ deployment.send(event)
+ end
end
end
end
@@ -448,11 +486,12 @@ RSpec.describe Deployment do
subject { described_class.active }
it 'retrieves the active deployments' do
- deployment1 = create(:deployment, status: :created )
- deployment2 = create(:deployment, status: :running )
- create(:deployment, status: :failed )
- create(:deployment, status: :canceled )
+ deployment1 = create(:deployment, status: :created)
+ deployment2 = create(:deployment, status: :running)
+ create(:deployment, status: :failed)
+ create(:deployment, status: :canceled)
create(:deployment, status: :skipped)
+ create(:deployment, status: :blocked)
is_expected.to contain_exactly(deployment1, deployment2)
end
@@ -512,9 +551,25 @@ RSpec.describe Deployment do
deployment2 = create(:deployment, status: :success)
deployment3 = create(:deployment, status: :failed)
deployment4 = create(:deployment, status: :canceled)
+ deployment5 = create(:deployment, status: :blocked)
+ create(:deployment, status: :skipped)
+
+ is_expected.to contain_exactly(deployment1, deployment2, deployment3, deployment4, deployment5)
+ end
+ end
+
+ describe 'upcoming' do
+ subject { described_class.upcoming }
+
+ it 'retrieves the upcoming deployments' do
+ deployment1 = create(:deployment, status: :running)
+ deployment2 = create(:deployment, status: :blocked)
+ create(:deployment, status: :success)
+ create(:deployment, status: :failed)
+ create(:deployment, status: :canceled)
create(:deployment, status: :skipped)
- is_expected.to contain_exactly(deployment1, deployment2, deployment3, deployment4)
+ is_expected.to contain_exactly(deployment1, deployment2)
end
end
end
@@ -840,6 +895,27 @@ RSpec.describe Deployment do
expect(deploy.update_status('created')).to eq(false)
end
+
+ context 'mapping status to event' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:status, :method) do
+ 'running' | :run!
+ 'success' | :succeed!
+ 'failed' | :drop!
+ 'canceled' | :cancel!
+ 'skipped' | :skip!
+ 'blocked' | :block!
+ end
+
+ with_them do
+ it 'calls the correct method for the given status' do
+ expect(deploy).to receive(method)
+
+ deploy.update_status(status)
+ end
+ end
+ end
end
describe '#sync_status_with' do
diff --git a/spec/models/dev_ops_report/metric_spec.rb b/spec/models/dev_ops_report/metric_spec.rb
index 191692f43a4..8519217f719 100644
--- a/spec/models/dev_ops_report/metric_spec.rb
+++ b/spec/models/dev_ops_report/metric_spec.rb
@@ -5,6 +5,13 @@ require 'spec_helper'
RSpec.describe DevOpsReport::Metric do
let(:conv_dev_index) { create(:dev_ops_report_metric) }
+ describe 'validations' do
+ DevOpsReport::Metric::METRICS.each do |metric_name|
+ it { is_expected.to validate_presence_of(metric_name) }
+ it { is_expected.to validate_numericality_of(metric_name).is_greater_than_or_equal_to(0) }
+ end
+ end
+
describe '#percentage_score' do
it 'returns stored percentage score' do
expect(conv_dev_index.percentage_score('issues')).to eq(13.331)
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 9d9862aa3d3..3dd0e01d7b3 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -947,6 +947,12 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
it { is_expected.to eq(deployment) }
end
+
+ context 'when environment has a blocked deployment' do
+ let!(:deployment) { create(:deployment, :blocked, environment: environment, project: project) }
+
+ it { is_expected.to eq(deployment) }
+ end
end
describe '#has_terminals?' do
diff --git a/spec/models/event_spec.rb b/spec/models/event_spec.rb
index ee27eaf1d0b..97854086162 100644
--- a/spec/models/event_spec.rb
+++ b/spec/models/event_spec.rb
@@ -706,7 +706,7 @@ RSpec.describe Event do
describe '.for_wiki_meta' do
it 'finds events for a given wiki page metadata object' do
- event = events.select(&:wiki_page?).first
+ event = events.find(&:wiki_page?)
expect(described_class.for_wiki_meta(event.target)).to contain_exactly(event)
end
diff --git a/spec/models/external_pull_request_spec.rb b/spec/models/external_pull_request_spec.rb
index bac2c369d7d..b141600c4fd 100644
--- a/spec/models/external_pull_request_spec.rb
+++ b/spec/models/external_pull_request_spec.rb
@@ -232,4 +232,8 @@ RSpec.describe ExternalPullRequest do
'with space/README.md']
end
end
+
+ it_behaves_like 'it has loose foreign keys' do
+ let(:factory_name) { :external_pull_request }
+ end
end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 735aa4df2ba..fed4ee3f3a4 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -94,34 +94,6 @@ RSpec.describe Group do
expect(group).to be_valid
end
end
-
- context 'when the feature flag `validate_namespace_parent_type` is disabled' do
- before do
- stub_feature_flags(validate_namespace_parent_type: false)
- end
-
- context 'when the group has no parent' do
- it 'allows a group to have no parent associated with it' do
- group = build(:group)
-
- expect(group).to be_valid
- end
- end
-
- context 'when the group has a parent' do
- it 'allows a group to have a namespace as its parent' do
- group = build(:group, parent: build(:namespace))
-
- expect(group).to be_valid
- end
-
- it 'allows a group to have another group as its parent' do
- group = build(:group, parent: build(:group))
-
- expect(group).to be_valid
- end
- end
- end
end
describe 'path validation' do
@@ -533,6 +505,10 @@ RSpec.describe Group do
describe '#ancestors' do
it { expect(group.ancestors.to_sql).not_to include 'traversal_ids <@' }
end
+
+ describe '#ancestors_upto' do
+ it { expect(group.ancestors_upto.to_sql).not_to include "WITH ORDINALITY" }
+ end
end
context 'linear' do
@@ -566,6 +542,10 @@ RSpec.describe Group do
end
end
+ describe '#ancestors_upto' do
+ it { expect(group.ancestors_upto.to_sql).to include "WITH ORDINALITY" }
+ end
+
context 'when project namespace exists in the group' do
let!(:project) { create(:project, group: group) }
let!(:project_namespace) { project.project_namespace }
@@ -734,7 +714,6 @@ RSpec.describe Group do
let!(:project) { create(:project, group: group) }
before do
- stub_experiments(invite_members_for_task: true)
group.add_users([create(:user)], :developer, tasks_to_be_done: %w(ci code), tasks_project_id: project.id)
end
@@ -2317,14 +2296,6 @@ RSpec.describe Group do
end
it_behaves_like 'returns the expected groups for a group and its descendants'
-
- context 'when :linear_group_including_descendants_by feature flag is disabled' do
- before do
- stub_feature_flags(linear_group_including_descendants_by: false)
- end
-
- it_behaves_like 'returns the expected groups for a group and its descendants'
- end
end
describe '.preset_root_ancestor_for' do
diff --git a/spec/models/hooks/web_hook_spec.rb b/spec/models/hooks/web_hook_spec.rb
index 59f4533a6c1..c292e78b32d 100644
--- a/spec/models/hooks/web_hook_spec.rb
+++ b/spec/models/hooks/web_hook_spec.rb
@@ -330,6 +330,20 @@ RSpec.describe WebHook do
expect { hook.backoff! }.to change(hook, :backoff_count).by(1)
end
+ context 'when the hook is permanently disabled' do
+ before do
+ allow(hook).to receive(:permanently_disabled?).and_return(true)
+ end
+
+ it 'does not set disabled_until' do
+ expect { hook.backoff! }.not_to change(hook, :disabled_until)
+ end
+
+ it 'does not increment the backoff count' do
+ expect { hook.backoff! }.not_to change(hook, :backoff_count)
+ end
+ end
+
context 'when we have backed off MAX_FAILURES times' do
before do
stub_const("#{described_class}::MAX_FAILURES", 5)
@@ -392,4 +406,77 @@ RSpec.describe WebHook do
end
end
end
+
+ describe '#temporarily_disabled?' do
+ it 'is false when not temporarily disabled' do
+ expect(hook).not_to be_temporarily_disabled
+ end
+
+ context 'when hook has been told to back off' do
+ before do
+ hook.backoff!
+ end
+
+ it 'is true' do
+ expect(hook).to be_temporarily_disabled
+ end
+
+ it 'is false when `web_hooks_disable_failed` flag is disabled' do
+ stub_feature_flags(web_hooks_disable_failed: false)
+
+ expect(hook).not_to be_temporarily_disabled
+ end
+ end
+ end
+
+ describe '#permanently_disabled?' do
+ it 'is false when not disabled' do
+ expect(hook).not_to be_permanently_disabled
+ end
+
+ context 'when hook has been disabled' do
+ before do
+ hook.disable!
+ end
+
+ it 'is true' do
+ expect(hook).to be_permanently_disabled
+ end
+
+ it 'is false when `web_hooks_disable_failed` flag is disabled' do
+ stub_feature_flags(web_hooks_disable_failed: false)
+
+ expect(hook).not_to be_permanently_disabled
+ end
+ end
+ end
+
+ describe '#rate_limited?' do
+ context 'when there are rate limits' do
+ before do
+ allow(hook).to receive(:rate_limit).and_return(3)
+ end
+
+ it 'is false when hook has not been rate limited' do
+ expect(Gitlab::ApplicationRateLimiter).to receive(:peek).and_return(false)
+ expect(hook).not_to be_rate_limited
+ end
+
+ it 'is true when hook has been rate limited' do
+ expect(Gitlab::ApplicationRateLimiter).to receive(:peek).and_return(true)
+ expect(hook).to be_rate_limited
+ end
+ end
+
+ context 'when there are no rate limits' do
+ before do
+ allow(hook).to receive(:rate_limit).and_return(nil)
+ end
+
+ it 'does not call Gitlab::ApplicationRateLimiter, and is false' do
+ expect(Gitlab::ApplicationRateLimiter).not_to receive(:peek)
+ expect(hook).not_to be_rate_limited
+ end
+ end
+ end
end
diff --git a/spec/models/incident_management/issuable_escalation_status_spec.rb b/spec/models/incident_management/issuable_escalation_status_spec.rb
index f3e7b90cf3c..c548357bd3f 100644
--- a/spec/models/incident_management/issuable_escalation_status_spec.rb
+++ b/spec/models/incident_management/issuable_escalation_status_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe IncidentManagement::IssuableEscalationStatus do
describe 'associations' do
it { is_expected.to belong_to(:issue) }
+ it { is_expected.to have_one(:project).through(:issue) }
end
describe 'validatons' do
diff --git a/spec/models/instance_configuration_spec.rb b/spec/models/instance_configuration_spec.rb
index cc0b69e3526..698d74abf03 100644
--- a/spec/models/instance_configuration_spec.rb
+++ b/spec/models/instance_configuration_spec.rb
@@ -144,6 +144,7 @@ RSpec.describe InstanceConfiguration do
create(:plan_limits,
plan: plan1,
conan_max_file_size: 1001,
+ helm_max_file_size: 1008,
maven_max_file_size: 1002,
npm_max_file_size: 1003,
nuget_max_file_size: 1004,
@@ -154,6 +155,7 @@ RSpec.describe InstanceConfiguration do
create(:plan_limits,
plan: plan2,
conan_max_file_size: 1101,
+ helm_max_file_size: 1108,
maven_max_file_size: 1102,
npm_max_file_size: 1103,
nuget_max_file_size: 1104,
@@ -166,8 +168,8 @@ RSpec.describe InstanceConfiguration do
it 'returns package file size limits' do
file_size_limits = subject.settings[:package_file_size_limits]
- expect(file_size_limits[:Plan1]).to eq({ conan: 1001, maven: 1002, npm: 1003, nuget: 1004, pypi: 1005, terraform_module: 1006, generic: 1007 })
- expect(file_size_limits[:Plan2]).to eq({ conan: 1101, maven: 1102, npm: 1103, nuget: 1104, pypi: 1105, terraform_module: 1106, generic: 1107 })
+ expect(file_size_limits[:Plan1]).to eq({ conan: 1001, helm: 1008, maven: 1002, npm: 1003, nuget: 1004, pypi: 1005, terraform_module: 1006, generic: 1007 })
+ expect(file_size_limits[:Plan2]).to eq({ conan: 1101, helm: 1108, maven: 1102, npm: 1103, nuget: 1104, pypi: 1105, terraform_module: 1106, generic: 1107 })
end
end
diff --git a/spec/models/integrations/jira_spec.rb b/spec/models/integrations/jira_spec.rb
index 1d81668f97d..9163a7ef845 100644
--- a/spec/models/integrations/jira_spec.rb
+++ b/spec/models/integrations/jira_spec.rb
@@ -863,7 +863,7 @@ RSpec.describe Integrations::Jira do
subject { jira_integration.create_cross_reference_note(jira_issue, resource, user) }
shared_examples 'handles cross-references' do
- let(:resource_name) { jira_integration.send(:noteable_name, resource) }
+ let(:resource_name) { jira_integration.send(:mentionable_name, resource) }
let(:resource_url) { jira_integration.send(:build_entity_url, resource_name, resource.to_param) }
let(:issue_url) { "#{url}/rest/api/2/issue/JIRA-123" }
let(:comment_url) { "#{issue_url}/comment" }
diff --git a/spec/models/integrations/microsoft_teams_spec.rb b/spec/models/integrations/microsoft_teams_spec.rb
index 21b9a005746..06b285a855c 100644
--- a/spec/models/integrations/microsoft_teams_spec.rb
+++ b/spec/models/integrations/microsoft_teams_spec.rb
@@ -3,6 +3,17 @@
require 'spec_helper'
RSpec.describe Integrations::MicrosoftTeams do
+ it_behaves_like "chat integration", "Microsoft Teams" do
+ let(:client) { ::MicrosoftTeams::Notifier }
+ let(:client_arguments) { webhook_url }
+
+ let(:payload) do
+ {
+ summary: be_present
+ }
+ end
+ end
+
let(:chat_integration) { described_class.new }
let(:webhook_url) { 'https://example.gitlab.com/' }
@@ -304,7 +315,7 @@ RSpec.describe Integrations::MicrosoftTeams do
context 'with protected branch' do
before do
- create(:protected_branch, project: project, name: 'a-protected-branch')
+ create(:protected_branch, :create_branch_on_repository, project: project, name: 'a-protected-branch')
end
let(:pipeline) do
diff --git a/spec/models/integrations/shimo_spec.rb b/spec/models/integrations/shimo_spec.rb
index 25df8d2b249..41f3f3c0c16 100644
--- a/spec/models/integrations/shimo_spec.rb
+++ b/spec/models/integrations/shimo_spec.rb
@@ -38,4 +38,26 @@ RSpec.describe ::Integrations::Shimo do
end
end
end
+
+ describe 'Caching has_shimo on project_settings' do
+ let(:project) { create(:project) }
+
+ subject { project.project_setting.has_shimo? }
+
+ it 'sets the property to true when integration is active' do
+ create(:shimo_integration, project: project, active: true)
+
+ is_expected.to be(true)
+ end
+
+ it 'sets the property to false when integration is not active' do
+ create(:shimo_integration, project: project, active: false)
+
+ is_expected.to be(false)
+ end
+
+ it 'creates a project_setting record if one was not already created' do
+ expect { create(:shimo_integration) }.to change(ProjectSetting, :count).by(1)
+ end
+ end
end
diff --git a/spec/models/issue/email_spec.rb b/spec/models/issue/email_spec.rb
new file mode 100644
index 00000000000..57cc7c7df66
--- /dev/null
+++ b/spec/models/issue/email_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Issue::Email do
+ describe 'Associations' do
+ it { is_expected.to belong_to(:issue) }
+ end
+
+ describe 'Validations' do
+ subject { build(:issue_email) }
+
+ it { is_expected.to validate_presence_of(:issue) }
+ it { is_expected.to validate_uniqueness_of(:issue) }
+ it { is_expected.to validate_uniqueness_of(:email_message_id) }
+ it { is_expected.to validate_length_of(:email_message_id).is_at_most(1000) }
+ it { is_expected.to validate_presence_of(:email_message_id) }
+ end
+end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index ba4429451d1..4cbfa7c7758 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -32,6 +32,7 @@ RSpec.describe Issue do
it { is_expected.to have_and_belong_to_many(:self_managed_prometheus_alert_events) }
it { is_expected.to have_many(:prometheus_alerts) }
it { is_expected.to have_many(:issue_email_participants) }
+ it { is_expected.to have_one(:email) }
it { is_expected.to have_many(:timelogs).autosave(true) }
it { is_expected.to have_one(:incident_management_issuable_escalation_status) }
it { is_expected.to have_many(:issue_customer_relations_contacts) }
@@ -986,6 +987,7 @@ RSpec.describe Issue do
issue = build(:issue, project: project)
user = build(:user)
+ allow(::Gitlab::ExternalAuthorization).to receive(:access_allowed?).with(user, 'a-label', project.full_path).and_call_original
expect(::Gitlab::ExternalAuthorization).to receive(:access_allowed?).with(user, 'a-label') { false }
expect(issue.visible_to_user?(user)).to be_falsy
end
@@ -1019,6 +1021,7 @@ RSpec.describe Issue do
issue = build(:issue, project: project)
user = build(:admin)
+ allow(::Gitlab::ExternalAuthorization).to receive(:access_allowed?).with(user, 'a-label', project.full_path).and_call_original
expect(::Gitlab::ExternalAuthorization).to receive(:access_allowed?).with(user, 'a-label') { false }
expect(issue.visible_to_user?(user)).to be_falsy
end
@@ -1314,10 +1317,28 @@ RSpec.describe Issue do
let_it_be(:issue1) { create(:issue, project: project, relative_position: nil) }
let_it_be(:issue2) { create(:issue, project: project, relative_position: nil) }
- it_behaves_like "a class that supports relative positioning" do
- let_it_be(:project) { reusable_project }
- let(:factory) { :issue }
- let(:default_params) { { project: project } }
+ context 'when optimized_issue_neighbor_queries is enabled' do
+ before do
+ stub_feature_flags(optimized_issue_neighbor_queries: true)
+ end
+
+ it_behaves_like "a class that supports relative positioning" do
+ let_it_be(:project) { reusable_project }
+ let(:factory) { :issue }
+ let(:default_params) { { project: project } }
+ end
+ end
+
+ context 'when optimized_issue_neighbor_queries is disabled' do
+ before do
+ stub_feature_flags(optimized_issue_neighbor_queries: false)
+ end
+
+ it_behaves_like "a class that supports relative positioning" do
+ let_it_be(:project) { reusable_project }
+ let(:factory) { :issue }
+ let(:default_params) { { project: project } }
+ end
end
it 'is not blocked for repositioning by default' do
@@ -1461,7 +1482,7 @@ RSpec.describe Issue do
it 'schedules rebalancing if there is no space left' do
lhs = build_stubbed(:issue, relative_position: 99, project: project)
to_move = build(:issue, project: project)
- expect(IssueRebalancingWorker).to receive(:perform_async).with(nil, project_id, namespace_id)
+ expect(Issues::RebalancingWorker).to receive(:perform_async).with(nil, project_id, namespace_id)
expect { to_move.move_between(lhs, issue) }.to raise_error(RelativePositioning::NoSpaceLeft)
end
diff --git a/spec/models/lfs_objects_project_spec.rb b/spec/models/lfs_objects_project_spec.rb
index df49b60c4fa..7378beeed06 100644
--- a/spec/models/lfs_objects_project_spec.rb
+++ b/spec/models/lfs_objects_project_spec.rb
@@ -25,6 +25,28 @@ RSpec.describe LfsObjectsProject do
end
end
+ describe '#link_to_project!' do
+ it 'does not throw error when duplicate exists' do
+ subject
+
+ expect do
+ result = described_class.link_to_project!(subject.lfs_object, subject.project)
+ expect(result).to be_a(LfsObjectsProject)
+ end.not_to change { described_class.count }
+ end
+
+ it 'upserts a new entry and updates the project cache' do
+ new_project = create(:project)
+
+ allow(ProjectCacheWorker).to receive(:perform_async).and_call_original
+ expect(ProjectCacheWorker).to receive(:perform_async).with(new_project.id, [], [:lfs_objects_size])
+ expect { described_class.link_to_project!(subject.lfs_object, new_project) }
+ .to change { described_class.count }
+
+ expect(described_class.find_by(lfs_object_id: subject.lfs_object.id, project_id: new_project.id)).to be_present
+ end
+ end
+
describe '#update_project_statistics' do
it 'updates project statistics when the object is added' do
expect(ProjectCacheWorker).to receive(:perform_async)
diff --git a/spec/models/loose_foreign_keys/deleted_record_spec.rb b/spec/models/loose_foreign_keys/deleted_record_spec.rb
index cd5068bdb52..07ffff746a5 100644
--- a/spec/models/loose_foreign_keys/deleted_record_spec.rb
+++ b/spec/models/loose_foreign_keys/deleted_record_spec.rb
@@ -5,31 +5,148 @@ require 'spec_helper'
RSpec.describe LooseForeignKeys::DeletedRecord, type: :model do
let_it_be(:table) { 'public.projects' }
- let_it_be(:deleted_record_1) { described_class.create!(partition: 1, fully_qualified_table_name: table, primary_key_value: 5) }
- let_it_be(:deleted_record_2) { described_class.create!(partition: 1, fully_qualified_table_name: table, primary_key_value: 1) }
- let_it_be(:deleted_record_3) { described_class.create!(partition: 1, fully_qualified_table_name: 'public.other_table', primary_key_value: 3) }
- let_it_be(:deleted_record_4) { described_class.create!(partition: 1, fully_qualified_table_name: table, primary_key_value: 1) } # duplicate
+ describe 'class methods' do
+ let_it_be(:deleted_record_1) { described_class.create!(fully_qualified_table_name: table, primary_key_value: 5) }
+ let_it_be(:deleted_record_2) { described_class.create!(fully_qualified_table_name: table, primary_key_value: 1) }
+ let_it_be(:deleted_record_3) { described_class.create!(fully_qualified_table_name: 'public.other_table', primary_key_value: 3) }
+ let_it_be(:deleted_record_4) { described_class.create!(fully_qualified_table_name: table, primary_key_value: 1) } # duplicate
- describe '.load_batch_for_table' do
- it 'loads records and orders them by creation date' do
- records = described_class.load_batch_for_table(table, 10)
+ describe '.load_batch_for_table' do
+ it 'loads records and orders them by creation date' do
+ records = described_class.load_batch_for_table(table, 10)
- expect(records).to eq([deleted_record_1, deleted_record_2, deleted_record_4])
+ expect(records).to eq([deleted_record_1, deleted_record_2, deleted_record_4])
+ end
+
+ it 'supports configurable batch size' do
+ records = described_class.load_batch_for_table(table, 2)
+
+ expect(records).to eq([deleted_record_1, deleted_record_2])
+ end
end
- it 'supports configurable batch size' do
- records = described_class.load_batch_for_table(table, 2)
+ describe '.mark_records_processed' do
+ it 'updates all records' do
+ records = described_class.load_batch_for_table(table, 10)
+ described_class.mark_records_processed(records)
- expect(records).to eq([deleted_record_1, deleted_record_2])
+ expect(described_class.status_pending.count).to eq(1)
+ expect(described_class.status_processed.count).to eq(3)
+ end
end
end
- describe '.mark_records_processed' do
- it 'updates all records' do
- described_class.mark_records_processed([deleted_record_1, deleted_record_2, deleted_record_4])
+ describe 'sliding_list partitioning' do
+ let(:connection) { described_class.connection }
+ let(:partition_manager) { Gitlab::Database::Partitioning::PartitionManager.new(described_class) }
+
+ describe 'next_partition_if callback' do
+ let(:active_partition) { described_class.partitioning_strategy.active_partition.value }
+
+ subject(:value) { described_class.partitioning_strategy.next_partition_if.call(active_partition) }
+
+ context 'when the partition is empty' do
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when the partition has records' do
+ before do
+ described_class.create!(fully_qualified_table_name: 'public.table', primary_key_value: 1, status: :processed)
+ described_class.create!(fully_qualified_table_name: 'public.table', primary_key_value: 2, status: :pending)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when the first record of the partition is older than PARTITION_DURATION' do
+ before do
+ described_class.create!(
+ fully_qualified_table_name: 'public.table',
+ primary_key_value: 1,
+ created_at: (described_class::PARTITION_DURATION + 1.day).ago)
+
+ described_class.create!(fully_qualified_table_name: 'public.table', primary_key_value: 2)
+ end
+
+ it { is_expected.to eq(true) }
+
+ context 'when the lfk_automatic_partition_creation FF is off' do
+ before do
+ stub_feature_flags(lfk_automatic_partition_creation: false)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
+ end
+
+ describe 'detach_partition_if callback' do
+ let(:active_partition) { described_class.partitioning_strategy.active_partition.value }
+
+ subject(:value) { described_class.partitioning_strategy.detach_partition_if.call(active_partition) }
+
+ context 'when the partition contains unprocessed records' do
+ before do
+ described_class.create!(fully_qualified_table_name: 'public.table', primary_key_value: 1, status: :processed)
+ described_class.create!(fully_qualified_table_name: 'public.table', primary_key_value: 2, status: :pending)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when the partition contains only processed records' do
+ before do
+ described_class.create!(fully_qualified_table_name: 'public.table', primary_key_value: 1, status: :processed)
+ described_class.create!(fully_qualified_table_name: 'public.table', primary_key_value: 2, status: :processed)
+ end
+
+ it { is_expected.to eq(true) }
+
+ context 'when the lfk_automatic_partition_dropping FF is off' do
+ before do
+ stub_feature_flags(lfk_automatic_partition_dropping: false)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
+ end
+
+ describe 'the behavior of the strategy' do
+ it 'moves records to new partitions as time passes', :freeze_time do
+ # We start with partition 1
+ expect(described_class.partitioning_strategy.current_partitions.map(&:value)).to eq([1])
+
+ # it's not a day old yet so no new partitions are created
+ partition_manager.sync_partitions
+
+ expect(described_class.partitioning_strategy.current_partitions.map(&:value)).to eq([1])
+
+ # add one record so the next partition will be created
+ described_class.create!(fully_qualified_table_name: 'public.table', primary_key_value: 1)
+
+ # after traveling forward a day
+ travel(described_class::PARTITION_DURATION + 1.second)
+
+ # a new partition is created
+ partition_manager.sync_partitions
+
+ expect(described_class.partitioning_strategy.current_partitions.map(&:value)).to eq([1, 2])
+
+ # and we can insert to the new partition
+ expect { described_class.create!(fully_qualified_table_name: table, primary_key_value: 5) }.not_to raise_error
+
+ # after processing old records
+ LooseForeignKeys::DeletedRecord.for_partition(1).update_all(status: :processed)
+
+ partition_manager.sync_partitions
+
+ # the old one is removed
+ expect(described_class.partitioning_strategy.current_partitions.map(&:value)).to eq([2])
- expect(described_class.status_pending.count).to eq(1)
- expect(described_class.status_processed.count).to eq(3)
+ # and we only have the newly created partition left.
+ expect(described_class.count).to eq(1)
+ end
end
end
end
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index abff1815f1a..7ce32de6edc 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -681,8 +681,6 @@ RSpec.describe Member do
end
it 'schedules a TasksToBeDone::CreateWorker task' do
- stub_experiments(invite_members_for_task: true)
-
member_task = create(:member_task, member: member, project: member.project)
expect(TasksToBeDone::CreateWorker)
diff --git a/spec/models/merge_request/metrics_spec.rb b/spec/models/merge_request/metrics_spec.rb
index 13ff239a306..a4bdac39074 100644
--- a/spec/models/merge_request/metrics_spec.rb
+++ b/spec/models/merge_request/metrics_spec.rb
@@ -48,4 +48,10 @@ RSpec.describe MergeRequest::Metrics do
end
end
end
+
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let!(:merge_request) { create(:merge_request) }
+ let!(:parent) { create(:ci_pipeline, project: merge_request.target_project) }
+ let!(:model) { merge_request.metrics.tap { |metrics| metrics.update!(pipeline: parent) } }
+ end
end
diff --git a/spec/models/merge_request_assignee_spec.rb b/spec/models/merge_request_assignee_spec.rb
index 5bb8e7184a3..58b802de8e0 100644
--- a/spec/models/merge_request_assignee_spec.rb
+++ b/spec/models/merge_request_assignee_spec.rb
@@ -3,9 +3,10 @@
require 'spec_helper'
RSpec.describe MergeRequestAssignee do
+ let(:assignee) { create(:user) }
let(:merge_request) { create(:merge_request) }
- subject { merge_request.merge_request_assignees.build(assignee: create(:user)) }
+ subject { merge_request.merge_request_assignees.build(assignee: assignee) }
describe 'associations' do
it { is_expected.to belong_to(:merge_request).class_name('MergeRequest') }
@@ -41,4 +42,13 @@ RSpec.describe MergeRequestAssignee do
it_behaves_like 'having unique enum values'
it_behaves_like 'having reviewer state'
+
+ describe 'syncs to reviewer state' do
+ before do
+ reviewer = merge_request.merge_request_reviewers.build(reviewer: assignee)
+ reviewer.update!(state: :reviewed)
+ end
+
+ it { is_expected.to have_attributes(state: 'reviewed') }
+ end
end
diff --git a/spec/models/merge_request_reviewer_spec.rb b/spec/models/merge_request_reviewer_spec.rb
index d69d60c94f0..d99fd4afb0f 100644
--- a/spec/models/merge_request_reviewer_spec.rb
+++ b/spec/models/merge_request_reviewer_spec.rb
@@ -3,14 +3,24 @@
require 'spec_helper'
RSpec.describe MergeRequestReviewer do
+ let(:reviewer) { create(:user) }
let(:merge_request) { create(:merge_request) }
- subject { merge_request.merge_request_reviewers.build(reviewer: create(:user)) }
+ subject { merge_request.merge_request_reviewers.build(reviewer: reviewer) }
it_behaves_like 'having unique enum values'
it_behaves_like 'having reviewer state'
+ describe 'syncs to assignee state' do
+ before do
+ assignee = merge_request.merge_request_assignees.build(assignee: reviewer)
+ assignee.update!(state: :reviewed)
+ end
+
+ it { is_expected.to have_attributes(state: 'reviewed') }
+ end
+
describe 'associations' do
it { is_expected.to belong_to(:merge_request).class_name('MergeRequest') }
it { is_expected.to belong_to(:reviewer).class_name('User').inverse_of(:merge_request_reviewers) }
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 5618fb06157..e1db1b3cf3e 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -178,6 +178,13 @@ RSpec.describe MergeRequest, factory_default: :keep do
it 'returns the merge request title' do
expect(subject.default_squash_commit_message).to eq(subject.title)
end
+
+ it 'uses template from target project' do
+ subject.target_project.squash_commit_template = 'Squashed branch %{source_branch} into %{target_branch}'
+
+ expect(subject.default_squash_commit_message)
+ .to eq('Squashed branch master into feature')
+ end
end
describe 'modules' do
@@ -1132,7 +1139,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
it 'returns the correct overflow count' do
- allow(Commit).to receive(:max_diff_options).and_return(max_files: 2)
+ allow(Commit).to receive(:diff_max_files).and_return(2)
set_compare(merge_request)
expect(merge_request.diff_size).to eq('2+')
@@ -1641,6 +1648,9 @@ RSpec.describe MergeRequest, factory_default: :keep do
it 'uses template from target project' do
request = build(:merge_request, title: 'Fix everything')
+ request.compare_commits = [
+ double(safe_message: 'Commit message', gitaly_commit?: true, merge_commit?: false, description?: false)
+ ]
subject.target_project.merge_commit_template = '%{title}'
expect(request.default_merge_commit_message)
@@ -3953,7 +3963,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
create_build(source_pipeline, 60.2, 'test:1')
create_build(target_pipeline, 50, 'test:2')
- expect(merge_request.pipeline_coverage_delta).to eq('10.20')
+ expect(merge_request.pipeline_coverage_delta).to be_within(0.001).of(10.2)
end
end
@@ -5032,4 +5042,8 @@ RSpec.describe MergeRequest, factory_default: :keep do
expect(described_class.from_fork).to eq([fork_mr])
end
end
+
+ it_behaves_like 'it has loose foreign keys' do
+ let(:factory_name) { :merge_request }
+ end
end
diff --git a/spec/models/namespace/traversal_hierarchy_spec.rb b/spec/models/namespace/traversal_hierarchy_spec.rb
index d7b0ee888c0..51932ab943c 100644
--- a/spec/models/namespace/traversal_hierarchy_spec.rb
+++ b/spec/models/namespace/traversal_hierarchy_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe Namespace::TraversalHierarchy, type: :model do
end
context 'with group outside of hierarchy' do
- let(:group) { create(:namespace) }
+ let(:group) { create(:group) }
it { expect(hierarchy.root).not_to eq root }
end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 8f5860c799c..54327fc70d9 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -107,34 +107,6 @@ RSpec.describe Namespace do
end
end
end
-
- context 'when the feature flag `validate_namespace_parent_type` is disabled' do
- before do
- stub_feature_flags(validate_namespace_parent_type: false)
- end
-
- context 'when the namespace has no parent' do
- it 'allows a namespace to have no parent associated with it' do
- namespace = build(:namespace)
-
- expect(namespace).to be_valid
- end
- end
-
- context 'when the namespace has a parent' do
- it 'allows a namespace to have a group as its parent' do
- namespace = build(:namespace, parent: build(:group))
-
- expect(namespace).to be_valid
- end
-
- it 'allows a namespace to have another namespace as its parent' do
- namespace = build(:namespace, parent: build(:namespace))
-
- expect(namespace).to be_valid
- end
- end
- end
end
describe '#nesting_level_allowed' do
@@ -287,13 +259,12 @@ RSpec.describe Namespace do
end
end
- context 'creating a Namespace with nil type' do
+ context 'unable to create a Namespace with nil type' do
+ let(:namespace) { nil }
let(:namespace_type) { nil }
- it 'is the correct type of namespace' do
- expect(Namespace.find(namespace.id)).to be_a(Namespace)
- expect(namespace.kind).to eq('user')
- expect(namespace.user_namespace?).to be_truthy
+ it 'raises ActiveRecord::NotNullViolation' do
+ expect { create(:namespace, type: namespace_type, parent: parent) }.to raise_error(ActiveRecord::NotNullViolation)
end
end
@@ -700,20 +671,6 @@ RSpec.describe Namespace do
end
end
- describe '#ancestors_upto' do
- let(:parent) { create(:group) }
- let(:child) { create(:group, parent: parent) }
- let(:child2) { create(:group, parent: child) }
-
- it 'returns all ancestors when no namespace is given' do
- expect(child2.ancestors_upto).to contain_exactly(child, parent)
- end
-
- it 'includes ancestors upto but excluding the given ancestor' do
- expect(child2.ancestors_upto(parent)).to contain_exactly(child)
- end
- end
-
describe '#move_dir', :request_store do
shared_examples "namespace restrictions" do
context "when any project has container images" do
@@ -1274,6 +1231,38 @@ RSpec.describe Namespace do
end
end
+ describe '#use_traversal_ids_for_ancestors_upto?' do
+ let_it_be(:namespace, reload: true) { create(:namespace) }
+
+ subject { namespace.use_traversal_ids_for_ancestors_upto? }
+
+ context 'when use_traversal_ids_for_ancestors_upto feature flag is true' do
+ before do
+ stub_feature_flags(use_traversal_ids_for_ancestors_upto: true)
+ end
+
+ it { is_expected.to eq true }
+
+ it_behaves_like 'disabled feature flag when traversal_ids is blank'
+ end
+
+ context 'when use_traversal_ids_for_ancestors_upto feature flag is false' do
+ before do
+ stub_feature_flags(use_traversal_ids_for_ancestors_upto: false)
+ end
+
+ it { is_expected.to eq false }
+ end
+
+ context 'when use_traversal_ids? feature flag is false' do
+ before do
+ stub_feature_flags(use_traversal_ids: false)
+ end
+
+ it { is_expected.to eq false }
+ end
+ end
+
describe '#users_with_descendants' do
let(:user_a) { create(:user) }
let(:user_b) { create(:user) }
@@ -2066,4 +2055,79 @@ RSpec.describe Namespace do
it { is_expected.to be(true) }
end
end
+
+ it_behaves_like 'it has loose foreign keys' do
+ let(:factory_name) { :group }
+ end
+
+ context 'Namespaces::SyncEvent' do
+ let!(:namespace) { create(:group) }
+
+ let_it_be(:new_namespace1) { create(:group) }
+ let_it_be(:new_namespace2) { create(:group) }
+
+ context 'when creating the namespace' do
+ it 'creates a namespaces_sync_event record' do
+ expect(namespace.sync_events.count).to eq(1)
+ end
+
+ it 'enqueues ProcessSyncEventsWorker' do
+ expect(Namespaces::ProcessSyncEventsWorker).to receive(:perform_async)
+
+ create(:namespace)
+ end
+ end
+
+ context 'when updating namespace parent_id' do
+ it 'creates a namespaces_sync_event record' do
+ expect do
+ namespace.update!(parent_id: new_namespace1.id)
+ end.to change(Namespaces::SyncEvent, :count).by(1)
+
+ expect(namespace.sync_events.count).to eq(2)
+ end
+
+ it 'enqueues ProcessSyncEventsWorker' do
+ expect(Namespaces::ProcessSyncEventsWorker).to receive(:perform_async)
+
+ namespace.update!(parent_id: new_namespace1.id)
+ end
+ end
+
+ context 'when updating namespace other attribute' do
+ it 'creates a namespaces_sync_event record' do
+ expect do
+ namespace.update!(name: 'hello')
+ end.not_to change(Namespaces::SyncEvent, :count)
+ end
+ end
+
+ context 'in the same transaction' do
+ context 'when updating different parent_id' do
+ it 'creates two namespaces_sync_event records' do
+ expect do
+ Namespace.transaction do
+ namespace.update!(parent_id: new_namespace1.id)
+ namespace.update!(parent_id: new_namespace2.id)
+ end
+ end.to change(Namespaces::SyncEvent, :count).by(2)
+
+ expect(namespace.sync_events.count).to eq(3)
+ end
+ end
+
+ context 'when updating the same parent_id' do
+ it 'creates one namespaces_sync_event record' do
+ expect do
+ Namespace.transaction do
+ namespace.update!(parent_id: new_namespace1.id)
+ namespace.update!(parent_id: new_namespace1.id)
+ end
+ end.to change(Namespaces::SyncEvent, :count).by(1)
+
+ expect(namespace.sync_events.count).to eq(2)
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/packages/build_info_spec.rb b/spec/models/packages/build_info_spec.rb
index a4369c56fe2..db8ac605d72 100644
--- a/spec/models/packages/build_info_spec.rb
+++ b/spec/models/packages/build_info_spec.rb
@@ -6,4 +6,46 @@ RSpec.describe Packages::BuildInfo, type: :model do
it { is_expected.to belong_to(:package) }
it { is_expected.to belong_to(:pipeline) }
end
+
+ context 'with some build infos' do
+ let_it_be(:package) { create(:package) }
+ let_it_be(:build_infos) { create_list(:package_build_info, 3, :with_pipeline, package: package) }
+ let_it_be(:build_info_with_no_pipeline) { create(:package_build_info) }
+
+ describe '.pluck_pipeline_ids' do
+ subject { package.build_infos.pluck_pipeline_ids.sort }
+
+ it { is_expected.to eq(build_infos.map(&:pipeline_id).sort) }
+ end
+
+ describe '.without_empty_pipelines' do
+ subject { package.build_infos.without_empty_pipelines }
+
+ it { is_expected.to contain_exactly(*build_infos) }
+ end
+
+ describe '.order_by_pipeline_id asc' do
+ subject { package.build_infos.order_by_pipeline_id(:asc) }
+
+ it { is_expected.to eq(build_infos) }
+ end
+
+ describe '.order_by_pipeline_id desc' do
+ subject { package.build_infos.order_by_pipeline_id(:desc) }
+
+ it { is_expected.to eq(build_infos.reverse) }
+ end
+
+ describe '.with_pipeline_id_less_than' do
+ subject { package.build_infos.with_pipeline_id_less_than(build_infos[1].pipeline_id) }
+
+ it { is_expected.to contain_exactly(build_infos[0]) }
+ end
+
+ describe '.with_pipeline_id_greater_than' do
+ subject { package.build_infos.with_pipeline_id_greater_than(build_infos[1].pipeline_id) }
+
+ it { is_expected.to contain_exactly(build_infos[2]) }
+ end
+ end
end
diff --git a/spec/models/packages/conan/metadatum_spec.rb b/spec/models/packages/conan/metadatum_spec.rb
index 112f395818b..d00723e8e43 100644
--- a/spec/models/packages/conan/metadatum_spec.rb
+++ b/spec/models/packages/conan/metadatum_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Packages::Conan::Metadatum, type: :model do
+ using RSpec::Parameterized::TableSyntax
+
describe 'relationships' do
it { is_expected.to belong_to(:package) }
end
@@ -45,6 +47,30 @@ RSpec.describe Packages::Conan::Metadatum, type: :model do
it { is_expected.not_to allow_value("my@channel").for(:package_channel) }
end
+ describe '#username_channel_none_values' do
+ let_it_be(:package) { create(:conan_package) }
+
+ let(:metadatum) { package.conan_metadatum }
+
+ subject { metadatum.valid? }
+
+ where(:username, :channel, :valid) do
+ 'username' | 'channel' | true
+ 'username' | '_' | false
+ '_' | 'channel' | false
+ '_' | '_' | true
+ end
+
+ with_them do
+ before do
+ metadatum.package_username = username
+ metadatum.package_channel = channel
+ end
+
+ it { is_expected.to eq(valid) }
+ end
+ end
+
describe '#conan_package_type' do
it 'will not allow a package with a different package_type' do
package = build('package')
@@ -87,4 +113,27 @@ RSpec.describe Packages::Conan::Metadatum, type: :model do
expect(described_class.full_path_from(package_username: username)).to eq('foo/bar/baz-buz')
end
end
+
+ describe '.validate_username_and_channel' do
+ where(:username, :channel, :error_field) do
+ 'username' | 'channel' | nil
+ 'username' | '_' | :channel
+ '_' | 'channel' | :username
+ '_' | '_' | nil
+ end
+
+ with_them do
+ if params[:error_field]
+ it 'yields the block when there is an error' do
+ described_class.validate_username_and_channel(username, channel) do |none_field|
+ expect(none_field).to eq(error_field)
+ end
+ end
+ else
+ it 'does not yield the block when there is no error' do
+ expect { |b| described_class.validate_username_and_channel(username, channel, &b) }.not_to yield_control
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/postgresql/replication_slot_spec.rb b/spec/models/postgresql/replication_slot_spec.rb
index c3b67a2e7b8..63a19541ab5 100644
--- a/spec/models/postgresql/replication_slot_spec.rb
+++ b/spec/models/postgresql/replication_slot_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Postgresql::ReplicationSlot do
+ it { is_expected.to be_a Gitlab::Database::SharedModel }
+
describe '.in_use?' do
it 'returns true when replication slots are present' do
expect(described_class).to receive(:exists?).and_return(true)
@@ -73,28 +75,22 @@ RSpec.describe Postgresql::ReplicationSlot do
before(:all) do
skip('max_replication_slots too small') if skip_examples
- @current_slot_count = ApplicationRecord
+ @current_slot_count = described_class
.connection
- .execute("SELECT COUNT(*) FROM pg_replication_slots;")
- .first
- .fetch('count')
- .to_i
+ .select_value("SELECT COUNT(*) FROM pg_replication_slots")
- @current_unused_count = ApplicationRecord
+ @current_unused_count = described_class
.connection
- .execute("SELECT COUNT(*) FROM pg_replication_slots WHERE active = 'f';")
- .first
- .fetch('count')
- .to_i
+ .select_value("SELECT COUNT(*) FROM pg_replication_slots WHERE active = 'f';")
- ApplicationRecord
+ described_class
.connection
.execute("SELECT * FROM pg_create_physical_replication_slot('test_slot');")
end
after(:all) do
unless skip_examples
- ApplicationRecord
+ described_class
.connection
.execute("SELECT pg_drop_replication_slot('test_slot');")
end
diff --git a/spec/models/preloaders/group_root_ancestor_preloader_spec.rb b/spec/models/preloaders/group_root_ancestor_preloader_spec.rb
deleted file mode 100644
index 0d622e84ef1..00000000000
--- a/spec/models/preloaders/group_root_ancestor_preloader_spec.rb
+++ /dev/null
@@ -1,63 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Preloaders::GroupRootAncestorPreloader do
- let_it_be(:user) { create(:user) }
- let_it_be(:root_parent1) { create(:group, :private, name: 'root-1', path: 'root-1') }
- let_it_be(:root_parent2) { create(:group, :private, name: 'root-2', path: 'root-2') }
- let_it_be(:guest_group) { create(:group, name: 'public guest', path: 'public-guest') }
- let_it_be(:private_maintainer_group) { create(:group, :private, name: 'b private maintainer', path: 'b-private-maintainer', parent: root_parent1) }
- let_it_be(:private_developer_group) { create(:group, :private, project_creation_level: nil, name: 'c public developer', path: 'c-public-developer') }
- let_it_be(:public_maintainer_group) { create(:group, :private, name: 'a public maintainer', path: 'a-public-maintainer', parent: root_parent2) }
-
- let(:root_query_regex) { /\ASELECT.+FROM "namespaces" WHERE "namespaces"."id" = \d+/ }
- let(:additional_preloads) { [] }
- let(:groups) { [guest_group, private_maintainer_group, private_developer_group, public_maintainer_group] }
- let(:pristine_groups) { Group.where(id: groups) }
-
- shared_examples 'executes N matching DB queries' do |expected_query_count, query_method = nil|
- it 'executes the specified root_ancestor queries' do
- expect do
- pristine_groups.each do |group|
- root_ancestor = group.root_ancestor
-
- root_ancestor.public_send(query_method) if query_method.present?
- end
- end.to make_queries_matching(root_query_regex, expected_query_count)
- end
-
- it 'strong_memoizes the correct root_ancestor' do
- pristine_groups.each do |group|
- expected_parent_id = group.root_ancestor.id == group.id ? nil : group.root_ancestor.id
-
- expect(group.parent_id).to eq(expected_parent_id)
- end
- end
- end
-
- context 'when the preloader is used' do
- before do
- preload_ancestors
- end
-
- context 'when no additional preloads are provided' do
- it_behaves_like 'executes N matching DB queries', 0
- end
-
- context 'when additional preloads are provided' do
- let(:additional_preloads) { [:route] }
- let(:root_query_regex) { /\ASELECT.+FROM "routes" WHERE "routes"."source_id" = \d+/ }
-
- it_behaves_like 'executes N matching DB queries', 0, :full_path
- end
- end
-
- context 'when the preloader is not used' do
- it_behaves_like 'executes N matching DB queries', 2
- end
-
- def preload_ancestors
- described_class.new(pristine_groups, additional_preloads).execute
- end
-end
diff --git a/spec/models/project_authorization_spec.rb b/spec/models/project_authorization_spec.rb
index 58c0ff48b46..37da30fb54c 100644
--- a/spec/models/project_authorization_spec.rb
+++ b/spec/models/project_authorization_spec.rb
@@ -3,40 +3,59 @@
require 'spec_helper'
RSpec.describe ProjectAuthorization do
- let_it_be(:user) { create(:user) }
- let_it_be(:project1) { create(:project) }
- let_it_be(:project2) { create(:project) }
- let_it_be(:project3) { create(:project) }
+ describe 'relations' do
+ it { is_expected.to belong_to(:user) }
+ it { is_expected.to belong_to(:project) }
+ end
- describe '.insert_authorizations' do
- it 'inserts the authorizations' do
- described_class
- .insert_authorizations([[user.id, project1.id, Gitlab::Access::MAINTAINER]])
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_presence_of(:user) }
+ it { is_expected.to validate_presence_of(:access_level) }
+ it { is_expected.to validate_inclusion_of(:access_level).in_array(Gitlab::Access.all_values) }
+ end
- expect(user.project_authorizations.count).to eq(1)
- end
+ describe '.insert_all' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project_1) { create(:project) }
+ let_it_be(:project_2) { create(:project) }
+ let_it_be(:project_3) { create(:project) }
- it 'inserts rows in batches' do
- described_class.insert_authorizations([
- [user.id, project1.id, Gitlab::Access::MAINTAINER],
- [user.id, project2.id, Gitlab::Access::MAINTAINER]
- ], 1)
+ it 'skips duplicates and inserts the remaining rows without error' do
+ create(:project_authorization, user: user, project: project_1, access_level: Gitlab::Access::MAINTAINER)
+
+ attributes = [
+ { user_id: user.id, project_id: project_1.id, access_level: Gitlab::Access::MAINTAINER },
+ { user_id: user.id, project_id: project_2.id, access_level: Gitlab::Access::MAINTAINER },
+ { user_id: user.id, project_id: project_3.id, access_level: Gitlab::Access::MAINTAINER }
+ ]
- expect(user.project_authorizations.count).to eq(2)
+ described_class.insert_all(attributes)
+
+ expect(user.project_authorizations.pluck(:user_id, :project_id, :access_level)).to match_array(attributes.map(&:values))
end
+ end
- it 'skips duplicates and inserts the remaining rows without error' do
- create(:project_authorization, user: user, project: project1, access_level: Gitlab::Access::MAINTAINER)
+ describe '.insert_all_in_batches' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project_1) { create(:project) }
+ let_it_be(:project_2) { create(:project) }
+ let_it_be(:project_3) { create(:project) }
- rows = [
- [user.id, project1.id, Gitlab::Access::MAINTAINER],
- [user.id, project2.id, Gitlab::Access::MAINTAINER],
- [user.id, project3.id, Gitlab::Access::MAINTAINER]
+ let(:per_batch_size) { 2 }
+
+ it 'inserts the rows in batches, as per the `per_batch` size' do
+ attributes = [
+ { user_id: user.id, project_id: project_1.id, access_level: Gitlab::Access::MAINTAINER },
+ { user_id: user.id, project_id: project_2.id, access_level: Gitlab::Access::MAINTAINER },
+ { user_id: user.id, project_id: project_3.id, access_level: Gitlab::Access::MAINTAINER }
]
- described_class.insert_authorizations(rows)
+ expect(described_class).to receive(:insert_all).twice.and_call_original
+
+ described_class.insert_all_in_batches(attributes, per_batch_size)
- expect(user.project_authorizations.pluck(:user_id, :project_id, :access_level)).to match_array(rows)
+ expect(user.project_authorizations.pluck(:user_id, :project_id, :access_level)).to match_array(attributes.map(&:values))
end
end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 3a8768ff463..4e38bf7d3e3 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -261,7 +261,49 @@ RSpec.describe Project, factory_default: :keep do
end
context 'updating a project' do
- context 'with project namespaces' do
+ shared_examples 'project update' do
+ let_it_be(:project_namespace) { create(:project_namespace) }
+ let_it_be(:project) { project_namespace.project }
+
+ context 'when project namespace is not set' do
+ before do
+ project.update_column(:project_namespace_id, nil)
+ project.reload
+ end
+
+ it 'updates the project successfully' do
+ # pre-check that project does not have a project namespace
+ expect(project.project_namespace).to be_nil
+
+ project.update!(path: 'hopefully-valid-path2')
+
+ expect(project).to be_persisted
+ expect(project).to be_valid
+ expect(project.path).to eq('hopefully-valid-path2')
+ expect(project.project_namespace).to be_nil
+ end
+ end
+
+ context 'when project has an associated project namespace' do
+ # when FF is disabled creating a project does not create a project_namespace, so we create one
+ it 'project is INVALID when trying to remove project namespace' do
+ project.reload
+ # check that project actually has an associated project namespace
+ expect(project.project_namespace_id).to eq(project_namespace.id)
+
+ expect do
+ project.update!(project_namespace_id: nil, path: 'hopefully-valid-path1')
+ end.to raise_error(ActiveRecord::RecordInvalid)
+ expect(project).to be_invalid
+ expect(project.errors.full_messages).to include("Project namespace can't be blank")
+ expect(project.reload.project_namespace).to be_in_sync_with_project(project)
+ end
+ end
+ end
+
+ context 'with create_project_namespace_on_project_create FF enabled' do
+ it_behaves_like 'project update'
+
it 'keeps project namespace in sync with project' do
project = create(:project)
project.update!(path: 'hopefully-valid-path1')
@@ -270,19 +312,21 @@ RSpec.describe Project, factory_default: :keep do
expect(project.project_namespace).to be_persisted
expect(project.project_namespace).to be_in_sync_with_project(project)
end
+ end
- context 'with FF disabled' do
- before do
- stub_feature_flags(create_project_namespace_on_project_create: false)
- end
+ context 'with create_project_namespace_on_project_create FF disabled' do
+ before do
+ stub_feature_flags(create_project_namespace_on_project_create: false)
+ end
- it 'does not create a project namespace when project is updated' do
- project = create(:project)
- project.update!(path: 'hopefully-valid-path1')
+ it_behaves_like 'project update'
- expect(project).to be_persisted
- expect(project.project_namespace).to be_nil
- end
+ it 'does not create a project namespace when project is updated' do
+ project = create(:project)
+ project.update!(path: 'hopefully-valid-path1')
+
+ expect(project).to be_persisted
+ expect(project.project_namespace).to be_nil
end
end
end
@@ -807,6 +851,23 @@ RSpec.describe Project, factory_default: :keep do
end
end
+ describe '#remove_project_authorizations' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user_1) { create(:user) }
+ let_it_be(:user_2) { create(:user) }
+ let_it_be(:user_3) { create(:user) }
+
+ it 'removes the project authorizations of the specified users in the current project' do
+ create(:project_authorization, user: user_1, project: project)
+ create(:project_authorization, user: user_2, project: project)
+ create(:project_authorization, user: user_3, project: project)
+
+ project.remove_project_authorizations([user_1.id, user_2.id])
+
+ expect(project.project_authorizations.pluck(:user_id)).not_to include(user_1.id, user_2.id)
+ end
+ end
+
describe 'reference methods' do
let_it_be(:owner) { create(:user, name: 'Gitlab') }
let_it_be(:namespace) { create(:namespace, name: 'Sample namespace', path: 'sample-namespace', owner: owner) }
@@ -3520,6 +3581,29 @@ RSpec.describe Project, factory_default: :keep do
expect(project.forks).to contain_exactly(forked_project)
end
end
+
+ describe '#lfs_object_oids_from_fork_source' do
+ let_it_be(:lfs_object) { create(:lfs_object) }
+ let_it_be(:another_lfs_object) { create(:lfs_object) }
+
+ let(:oids) { [lfs_object.oid, another_lfs_object.oid] }
+
+ context 'when fork has one of two LFS objects' do
+ before do
+ create(:lfs_objects_project, lfs_object: lfs_object, project: project)
+ create(:lfs_objects_project, lfs_object: another_lfs_object, project: forked_project)
+ end
+
+ it 'returns OIDs of owned LFS objects', :aggregate_failures do
+ expect(forked_project.lfs_objects_oids_from_fork_source(oids: oids)).to eq([lfs_object.oid])
+ expect(forked_project.lfs_objects_oids(oids: oids)).to eq([another_lfs_object.oid])
+ end
+
+ it 'returns empty when project is not a fork' do
+ expect(project.lfs_objects_oids_from_fork_source(oids: oids)).to eq([])
+ end
+ end
+ end
end
it_behaves_like 'can housekeep repository' do
@@ -7392,6 +7476,83 @@ RSpec.describe Project, factory_default: :keep do
end
end
+ it_behaves_like 'it has loose foreign keys' do
+ let(:factory_name) { :project }
+ end
+
+ context 'Projects::SyncEvent' do
+ let!(:project) { create(:project) }
+
+ let_it_be(:new_namespace1) { create(:namespace) }
+ let_it_be(:new_namespace2) { create(:namespace) }
+
+ context 'when creating the project' do
+ it 'creates a projects_sync_event record' do
+ expect(project.sync_events.count).to eq(1)
+ end
+
+ it 'enqueues ProcessProjectSyncEventsWorker' do
+ expect(Projects::ProcessSyncEventsWorker).to receive(:perform_async)
+
+ create(:project)
+ end
+ end
+
+ context 'when updating project namespace_id' do
+ it 'creates a projects_sync_event record' do
+ expect do
+ project.update!(namespace_id: new_namespace1.id)
+ end.to change(Projects::SyncEvent, :count).by(1)
+
+ expect(project.sync_events.count).to eq(2)
+ end
+
+ it 'enqueues ProcessProjectSyncEventsWorker' do
+ expect(Projects::ProcessSyncEventsWorker).to receive(:perform_async)
+
+ project.update!(namespace_id: new_namespace1.id)
+ end
+ end
+
+ context 'when updating project other attribute' do
+ it 'creates a projects_sync_event record' do
+ expect do
+ project.update!(name: 'hello')
+ end.not_to change(Projects::SyncEvent, :count)
+ end
+ end
+
+ context 'in the same transaction' do
+ context 'when updating different namespace_id' do
+ it 'creates two projects_sync_event records' do
+ expect do
+ Project.transaction do
+ project.update!(namespace_id: new_namespace1.id)
+ project.update!(namespace_id: new_namespace2.id)
+ end
+ end.to change(Projects::SyncEvent, :count).by(2)
+
+ expect(project.sync_events.count).to eq(3)
+ end
+ end
+
+ context 'when updating the same namespace_id' do
+ it 'creates one projects_sync_event record' do
+ expect do
+ Project.transaction do
+ project.update!(namespace_id: new_namespace1.id)
+ project.update!(namespace_id: new_namespace1.id)
+ end
+ end.to change(Projects::SyncEvent, :count).by(1)
+
+ expect(project.sync_events.count).to eq(2)
+ end
+ end
+ end
+ end
+
+ private
+
def finish_job(export_job)
export_job.start
export_job.finish
diff --git a/spec/models/project_team_spec.rb b/spec/models/project_team_spec.rb
index a6a56180ce1..c0bad96effc 100644
--- a/spec/models/project_team_spec.rb
+++ b/spec/models/project_team_spec.rb
@@ -237,7 +237,6 @@ RSpec.describe ProjectTeam do
context 'when `tasks_to_be_done` and `tasks_project_id` are passed' do
before do
- stub_experiments(invite_members_for_task: true)
project.team.add_users([user1], :developer, tasks_to_be_done: %w(ci code), tasks_project_id: project.id)
end
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index d50c60774b4..96cbdb468aa 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -1679,6 +1679,16 @@ RSpec.describe Repository do
expect(blobs.first.name).to eq('foobar')
expect(blobs.size).to eq(1)
end
+
+ context 'when Gitaly returns NoRepository' do
+ before do
+ allow(repository.raw_repository).to receive(:batch_blobs).and_raise(Gitlab::Git::Repository::NoRepository)
+ end
+
+ it 'returns empty array' do
+ expect(repository.blobs_at([%w[master foobar]])).to match_array([])
+ end
+ end
end
describe '#root_ref' do
diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb
index e24dd910c39..5d4a78bb15f 100644
--- a/spec/models/snippet_spec.rb
+++ b/spec/models/snippet_spec.rb
@@ -403,6 +403,51 @@ RSpec.describe Snippet do
end
end
+ describe '.find_by_project_title_trunc_created_at' do
+ let_it_be(:snippet) { create(:snippet) }
+ let_it_be(:created_at_without_ms) { snippet.created_at.change(usec: 0) }
+
+ it 'returns a record if arguments match' do
+ result = described_class.find_by_project_title_trunc_created_at(
+ snippet.project,
+ snippet.title,
+ created_at_without_ms
+ )
+
+ expect(result).to eq(snippet)
+ end
+
+ it 'returns nil if project does not match' do
+ result = described_class.find_by_project_title_trunc_created_at(
+ 'unmatched project',
+ snippet.title,
+ created_at_without_ms # to_s truncates ms of the argument
+ )
+
+ expect(result).to be(nil)
+ end
+
+ it 'returns nil if title does not match' do
+ result = described_class.find_by_project_title_trunc_created_at(
+ snippet.project,
+ 'unmatched title',
+ created_at_without_ms # to_s truncates ms of the argument
+ )
+
+ expect(result).to be(nil)
+ end
+
+ it 'returns nil if created_at does not match' do
+ result = described_class.find_by_project_title_trunc_created_at(
+ snippet.project,
+ snippet.title,
+ snippet.created_at # fails match by milliseconds
+ )
+
+ expect(result).to be(nil)
+ end
+ end
+
describe '#participants' do
let_it_be(:project) { create(:project, :public) }
let_it_be(:snippet) { create(:snippet, content: 'foo', project: project) }
diff --git a/spec/models/terraform/state_version_spec.rb b/spec/models/terraform/state_version_spec.rb
index ac2e8d167b3..7af9b7897ff 100644
--- a/spec/models/terraform/state_version_spec.rb
+++ b/spec/models/terraform/state_version_spec.rb
@@ -92,4 +92,9 @@ RSpec.describe Terraform::StateVersion do
end
end
end
+
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let!(:model) { create(:terraform_state_version) }
+ let!(:parent) { model.build }
+ end
end
diff --git a/spec/models/timelog_spec.rb b/spec/models/timelog_spec.rb
index d6c31307e30..f96d02e6a82 100644
--- a/spec/models/timelog_spec.rb
+++ b/spec/models/timelog_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe Timelog do
it { expect(subject.project_id).not_to be_nil }
- describe 'Issuable validation' do
+ describe 'validation' do
it 'is invalid if issue_id and merge_request_id are missing' do
subject.attributes = { issue: nil, merge_request: nil }
@@ -139,4 +139,14 @@ RSpec.describe Timelog do
time + 1.day
end
end
+
+ describe 'hooks' do
+ describe '.set_project' do
+ it 'populates project with issuable project' do
+ timelog = create(:issue_timelog, issue: issue)
+
+ expect(timelog.project_id).to be(timelog.issue.project_id)
+ end
+ end
+ end
end
diff --git a/spec/models/u2f_registration_spec.rb b/spec/models/u2f_registration_spec.rb
index 7a70cf69566..6bb9ccfcf35 100644
--- a/spec/models/u2f_registration_spec.rb
+++ b/spec/models/u2f_registration_spec.rb
@@ -20,9 +20,9 @@ RSpec.describe U2fRegistration do
describe '#create_webauthn_registration' do
shared_examples_for 'creates webauthn registration' do
it 'creates webauthn registration' do
- u2f_registration.save!
+ created_record = u2f_registration
- webauthn_registration = WebauthnRegistration.where(u2f_registration_id: u2f_registration.id)
+ webauthn_registration = WebauthnRegistration.where(u2f_registration_id: created_record.id)
expect(webauthn_registration).to exist
end
end
@@ -43,13 +43,16 @@ RSpec.describe U2fRegistration do
it 'logs error' do
allow(Gitlab::Auth::U2fWebauthnConverter).to receive(:new).and_raise('boom!')
- expect(Gitlab::AppJsonLogger).to(
- receive(:error).with(a_hash_including(event: 'u2f_migration',
- error: 'RuntimeError',
- message: 'U2F to WebAuthn conversion failed'))
- )
- u2f_registration.save!
+ allow_next_instance_of(U2fRegistration) do |u2f_registration|
+ allow(u2f_registration).to receive(:id).and_return(123)
+ end
+
+ expect(Gitlab::ErrorTracking).to(
+ receive(:track_exception).with(kind_of(StandardError),
+ u2f_registration_id: 123))
+
+ u2f_registration
end
end
end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index b5d4614d206..f8cea619233 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -110,8 +110,8 @@ RSpec.describe User do
it { is_expected.to have_many(:spam_logs).dependent(:destroy) }
it { is_expected.to have_many(:todos) }
it { is_expected.to have_many(:award_emoji).dependent(:destroy) }
- it { is_expected.to have_many(:builds).dependent(:nullify) }
- it { is_expected.to have_many(:pipelines).dependent(:nullify) }
+ it { is_expected.to have_many(:builds) }
+ it { is_expected.to have_many(:pipelines) }
it { is_expected.to have_many(:chat_names).dependent(:destroy) }
it { is_expected.to have_many(:uploads) }
it { is_expected.to have_many(:reported_abuse_reports).dependent(:destroy).class_name('AbuseReport') }
@@ -124,7 +124,7 @@ RSpec.describe User do
it { is_expected.to have_many(:created_custom_emoji).inverse_of(:creator) }
it { is_expected.to have_many(:in_product_marketing_emails) }
it { is_expected.to have_many(:timelogs) }
- it { is_expected.to have_many(:callouts).class_name('UserCallout') }
+ it { is_expected.to have_many(:callouts).class_name('Users::Callout') }
it { is_expected.to have_many(:group_callouts).class_name('Users::GroupCallout') }
describe '#user_detail' do
@@ -1080,6 +1080,16 @@ RSpec.describe User do
end
end
+ context 'strip attributes' do
+ context 'name' do
+ let(:user) { User.new(name: ' John Smith ') }
+
+ it 'strips whitespaces on validation' do
+ expect { user.valid? }.to change { user.name }.to('John Smith')
+ end
+ end
+ end
+
describe 'Respond to' do
it { is_expected.to respond_to(:admin?) }
it { is_expected.to respond_to(:name) }
@@ -1540,7 +1550,11 @@ RSpec.describe User do
allow(user).to receive(:update_highest_role)
end
- expect(SecureRandom).to receive(:hex).and_return('3b8ca303')
+ allow_next_instance_of(Namespaces::UserNamespace) do |namespace|
+ allow(namespace).to receive(:schedule_sync_event_worker)
+ end
+
+ expect(SecureRandom).to receive(:hex).with(no_args).and_return('3b8ca303')
user = create(:user)
@@ -1612,6 +1626,46 @@ RSpec.describe User do
end
end
+ describe 'enabled_static_object_token' do
+ let_it_be(:static_object_token) { 'ilqx6jm1u945macft4eff0nw' }
+
+ it 'returns incoming email token when supported' do
+ allow(Gitlab::CurrentSettings).to receive(:static_objects_external_storage_enabled?).and_return(true)
+
+ user = create(:user, static_object_token: static_object_token)
+
+ expect(user.enabled_static_object_token).to eq(static_object_token)
+ end
+
+ it 'returns `nil` when not supported' do
+ allow(Gitlab::CurrentSettings).to receive(:static_objects_external_storage_enabled?).and_return(false)
+
+ user = create(:user, static_object_token: static_object_token)
+
+ expect(user.enabled_static_object_token).to be_nil
+ end
+ end
+
+ describe 'enabled_incoming_email_token' do
+ let_it_be(:incoming_email_token) { 'ilqx6jm1u945macft4eff0nw' }
+
+ it 'returns incoming email token when supported' do
+ allow(Gitlab::IncomingEmail).to receive(:supports_issue_creation?).and_return(true)
+
+ user = create(:user, incoming_email_token: incoming_email_token)
+
+ expect(user.enabled_incoming_email_token).to eq(incoming_email_token)
+ end
+
+ it 'returns `nil` when not supported' do
+ allow(Gitlab::IncomingEmail).to receive(:supports_issue_creation?).and_return(false)
+
+ user = create(:user, incoming_email_token: incoming_email_token)
+
+ expect(user.enabled_incoming_email_token).to be_nil
+ end
+ end
+
describe '#recently_sent_password_reset?' do
it 'is false when reset_password_sent_at is nil' do
user = build_stubbed(:user, reset_password_sent_at: nil)
@@ -1726,6 +1780,52 @@ RSpec.describe User do
end
end
+ context 'two_factor_u2f_enabled?' do
+ let_it_be(:user) { create(:user, :two_factor) }
+
+ context 'when webauthn feature flag is enabled' do
+ context 'user has no U2F registration' do
+ it { expect(user.two_factor_u2f_enabled?).to eq(false) }
+ end
+
+ context 'user has existing U2F registration' do
+ it 'returns false' do
+ device = U2F::FakeU2F.new(FFaker::BaconIpsum.characters(5))
+ create(:u2f_registration, name: 'my u2f device',
+ user: user,
+ certificate: Base64.strict_encode64(device.cert_raw),
+ key_handle: U2F.urlsafe_encode64(device.key_handle_raw),
+ public_key: Base64.strict_encode64(device.origin_public_key_raw))
+
+ expect(user.two_factor_u2f_enabled?).to eq(false)
+ end
+ end
+ end
+
+ context 'when webauthn feature flag is disabled' do
+ before do
+ stub_feature_flags(webauthn: false)
+ end
+
+ context 'user has no U2F registration' do
+ it { expect(user.two_factor_u2f_enabled?).to eq(false) }
+ end
+
+ context 'user has existing U2F registration' do
+ it 'returns true' do
+ device = U2F::FakeU2F.new(FFaker::BaconIpsum.characters(5))
+ create(:u2f_registration, name: 'my u2f device',
+ user: user,
+ certificate: Base64.strict_encode64(device.cert_raw),
+ key_handle: U2F.urlsafe_encode64(device.key_handle_raw),
+ public_key: Base64.strict_encode64(device.origin_public_key_raw))
+
+ expect(user.two_factor_u2f_enabled?).to eq(true)
+ end
+ end
+ end
+ end
+
describe 'projects' do
before do
@user = create(:user)
@@ -1856,15 +1956,31 @@ RSpec.describe User do
end
context 'when user has running CI pipelines' do
- let(:service) { double }
let(:pipelines) { build_list(:ci_pipeline, 3, :running) }
- it 'aborts all running pipelines and related jobs' do
+ it 'drops all running pipelines and related jobs' do
+ drop_service = double
+ disable_service = double
+
expect(user).to receive(:pipelines).and_return(pipelines)
- expect(Ci::DropPipelineService).to receive(:new).and_return(service)
- expect(service).to receive(:execute_async_for_all).with(pipelines, :user_blocked, user)
+ expect(Ci::DropPipelineService).to receive(:new).and_return(drop_service)
+ expect(drop_service).to receive(:execute_async_for_all).with(pipelines, :user_blocked, user)
- user.block
+ expect(Ci::DisableUserPipelineSchedulesService).to receive(:new).and_return(disable_service)
+ expect(disable_service).to receive(:execute).with(user)
+
+ user.block!
+ end
+
+ it 'does not drop running pipelines if the transaction rolls back' do
+ expect(Ci::DropPipelineService).not_to receive(:new)
+ expect(Ci::DisableUserPipelineSchedulesService).not_to receive(:new)
+
+ User.transaction do
+ user.block
+
+ raise ActiveRecord::Rollback
+ end
end
end
@@ -2540,26 +2656,18 @@ RSpec.describe User do
end
describe '.find_by_full_path' do
- using RSpec::Parameterized::TableSyntax
-
- # TODO: this `where/when` can be removed in issue https://gitlab.com/gitlab-org/gitlab/-/issues/341070
- # At that point we only need to check `user_namespace`
- where(namespace_type: [:namespace, :user_namespace])
+ let!(:user) { create(:user, namespace: create(:user_namespace)) }
- with_them do
- let!(:user) { create(:user, namespace: create(namespace_type)) }
-
- context 'with a route matching the given path' do
- let!(:route) { user.namespace.route }
+ context 'with a route matching the given path' do
+ let!(:route) { user.namespace.route }
- it 'returns the user' do
- expect(described_class.find_by_full_path(route.path)).to eq(user)
- end
+ it 'returns the user' do
+ expect(described_class.find_by_full_path(route.path)).to eq(user)
+ end
- it 'is case-insensitive' do
- expect(described_class.find_by_full_path(route.path.upcase)).to eq(user)
- expect(described_class.find_by_full_path(route.path.downcase)).to eq(user)
- end
+ it 'is case-insensitive' do
+ expect(described_class.find_by_full_path(route.path.upcase)).to eq(user)
+ expect(described_class.find_by_full_path(route.path.downcase)).to eq(user)
end
context 'with a redirect route matching the given path' do
@@ -3463,19 +3571,7 @@ RSpec.describe User do
subject { user.membership_groups }
- shared_examples 'returns groups where the user is a member' do
- specify { is_expected.to contain_exactly(parent_group, child_group) }
- end
-
- it_behaves_like 'returns groups where the user is a member'
-
- context 'when feature flag :linear_user_membership_groups is disabled' do
- before do
- stub_feature_flags(linear_user_membership_groups: false)
- end
-
- it_behaves_like 'returns groups where the user is a member'
- end
+ specify { is_expected.to contain_exactly(parent_group, child_group) }
end
describe '#authorizations_for_projects' do
@@ -5543,7 +5639,7 @@ RSpec.describe User do
describe '#dismissed_callout?' do
let_it_be(:user, refind: true) { create(:user) }
- let_it_be(:feature_name) { UserCallout.feature_names.each_key.first }
+ let_it_be(:feature_name) { Users::Callout.feature_names.each_key.first }
context 'when no callout dismissal record exists' do
it 'returns false when no ignore_dismissal_earlier_than provided' do
@@ -5553,7 +5649,7 @@ RSpec.describe User do
context 'when dismissed callout exists' do
before_all do
- create(:user_callout, user: user, feature_name: feature_name, dismissed_at: 4.months.ago)
+ create(:callout, user: user, feature_name: feature_name, dismissed_at: 4.months.ago)
end
it 'returns true when no ignore_dismissal_earlier_than provided' do
@@ -5572,12 +5668,12 @@ RSpec.describe User do
describe '#find_or_initialize_callout' do
let_it_be(:user, refind: true) { create(:user) }
- let_it_be(:feature_name) { UserCallout.feature_names.each_key.first }
+ let_it_be(:feature_name) { Users::Callout.feature_names.each_key.first }
subject(:find_or_initialize_callout) { user.find_or_initialize_callout(feature_name) }
context 'when callout exists' do
- let!(:callout) { create(:user_callout, user: user, feature_name: feature_name) }
+ let!(:callout) { create(:callout, user: user, feature_name: feature_name) }
it 'returns existing callout' do
expect(find_or_initialize_callout).to eq(callout)
@@ -5587,7 +5683,7 @@ RSpec.describe User do
context 'when callout does not exist' do
context 'when feature name is valid' do
it 'initializes a new callout' do
- expect(find_or_initialize_callout).to be_a_new(UserCallout)
+ expect(find_or_initialize_callout).to be_a_new(Users::Callout)
end
it 'is valid' do
@@ -5599,7 +5695,7 @@ RSpec.describe User do
let(:feature_name) { 'notvalid' }
it 'initializes a new callout' do
- expect(find_or_initialize_callout).to be_a_new(UserCallout)
+ expect(find_or_initialize_callout).to be_a_new(Users::Callout)
end
it 'is not valid' do
@@ -6092,20 +6188,6 @@ RSpec.describe User do
end
end
- describe '#default_dashboard?' do
- it 'is the default dashboard' do
- user = build(:user)
-
- expect(user.default_dashboard?).to be true
- end
-
- it 'is not the default dashboard' do
- user = build(:user, dashboard: 'stars')
-
- expect(user.default_dashboard?).to be false
- end
- end
-
describe '.dormant' do
it 'returns dormant users' do
freeze_time do
@@ -6218,19 +6300,7 @@ RSpec.describe User do
subject { user.send(:groups_with_developer_maintainer_project_access) }
- shared_examples 'groups_with_developer_maintainer_project_access examples' do
- specify { is_expected.to contain_exactly(developer_group2) }
- end
-
- it_behaves_like 'groups_with_developer_maintainer_project_access examples'
-
- context 'when feature flag :linear_user_groups_with_developer_maintainer_project_access is disabled' do
- before do
- stub_feature_flags(linear_user_groups_with_developer_maintainer_project_access: false)
- end
-
- it_behaves_like 'groups_with_developer_maintainer_project_access examples'
- end
+ specify { is_expected.to contain_exactly(developer_group2) }
end
describe '.get_ids_by_username' do
@@ -6269,4 +6339,8 @@ RSpec.describe User do
expect(user.user_readme).to be(nil)
end
end
+
+ it_behaves_like 'it has loose foreign keys' do
+ let(:factory_name) { :user }
+ end
end
diff --git a/spec/models/user_callout_spec.rb b/spec/models/users/callout_spec.rb
index 5b36c8450ea..293f0279e79 100644
--- a/spec/models/user_callout_spec.rb
+++ b/spec/models/users/callout_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe UserCallout do
- let_it_be(:callout) { create(:user_callout) }
+RSpec.describe Users::Callout do
+ let_it_be(:callout) { create(:callout) }
it_behaves_like 'having unique enum values'
diff --git a/spec/models/concerns/calloutable_spec.rb b/spec/models/users/calloutable_spec.rb
index d847413de88..01603d8bbd6 100644
--- a/spec/models/concerns/calloutable_spec.rb
+++ b/spec/models/users/calloutable_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe Calloutable do
- subject { build(:user_callout) }
+RSpec.describe Users::Calloutable do
+ subject { build(:callout) }
describe "Associations" do
it { is_expected.to belong_to(:user) }
@@ -14,9 +14,9 @@ RSpec.describe Calloutable do
end
describe '#dismissed_after?' do
- let(:some_feature_name) { UserCallout.feature_names.keys.second }
- let(:callout_dismissed_month_ago) { create(:user_callout, feature_name: some_feature_name, dismissed_at: 1.month.ago )}
- let(:callout_dismissed_day_ago) { create(:user_callout, feature_name: some_feature_name, dismissed_at: 1.day.ago )}
+ let(:some_feature_name) { Users::Callout.feature_names.keys.second }
+ let(:callout_dismissed_month_ago) { create(:callout, feature_name: some_feature_name, dismissed_at: 1.month.ago )}
+ let(:callout_dismissed_day_ago) { create(:callout, feature_name: some_feature_name, dismissed_at: 1.day.ago )}
it 'returns whether a callout dismissed after specified date' do
expect(callout_dismissed_month_ago.dismissed_after?(15.days.ago)).to eq(false)
diff --git a/spec/models/work_item/type_spec.rb b/spec/models/work_item/type_spec.rb
index dd5324d63a0..cc18558975b 100644
--- a/spec/models/work_item/type_spec.rb
+++ b/spec/models/work_item/type_spec.rb
@@ -19,10 +19,10 @@ RSpec.describe WorkItem::Type do
it 'deletes type but not unrelated issues' do
type = create(:work_item_type)
- expect(WorkItem::Type.count).to eq(5)
+ expect(WorkItem::Type.count).to eq(6)
expect { type.destroy! }.not_to change(Issue, :count)
- expect(WorkItem::Type.count).to eq(4)
+ expect(WorkItem::Type.count).to eq(5)
end
end