Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-01-18 22:00:14 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2023-01-18 22:00:14 +0300
commit05f0ebba3a2c8ddf39e436f412dc2ab5bf1353b2 (patch)
tree11d0f2a6ec31c7793c184106cedc2ded3d9a2cc5 /spec/services
parentec73467c23693d0db63a797d10194da9e72a74af (diff)
Add latest changes from gitlab-org/gitlab@15-8-stable-eev15.8.0-rc42
Diffstat (limited to 'spec/services')
-rw-r--r--spec/services/achievements/create_service_spec.rb46
-rw-r--r--spec/services/audit_event_service_spec.rb2
-rw-r--r--spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb2
-rw-r--r--spec/services/bulk_imports/create_service_spec.rb295
-rw-r--r--spec/services/bulk_imports/get_importable_data_service_spec.rb14
-rw-r--r--spec/services/chat_names/authorize_user_service_spec.rb5
-rw-r--r--spec/services/ci/create_downstream_pipeline_service_spec.rb77
-rw-r--r--spec/services/ci/create_pipeline_service/cache_spec.rb13
-rw-r--r--spec/services/ci/create_pipeline_service/include_spec.rb22
-rw-r--r--spec/services/ci/create_pipeline_service/logger_spec.rb69
-rw-r--r--spec/services/ci/create_pipeline_service/rules_spec.rb5
-rw-r--r--spec/services/ci/create_pipeline_service/variables_spec.rb37
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb33
-rw-r--r--spec/services/ci/job_artifacts/create_service_spec.rb76
-rw-r--r--spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb9
-rw-r--r--spec/services/ci/job_artifacts/destroy_associations_service_spec.rb35
-rw-r--r--spec/services/ci/job_artifacts/destroy_batch_service_spec.rb64
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb23
-rw-r--r--spec/services/clusters/aws/authorize_role_service_spec.rb102
-rw-r--r--spec/services/clusters/aws/fetch_credentials_service_spec.rb139
-rw-r--r--spec/services/clusters/aws/finalize_creation_service_spec.rb124
-rw-r--r--spec/services/clusters/aws/provision_service_spec.rb130
-rw-r--r--spec/services/clusters/aws/verify_provision_status_service_spec.rb76
-rw-r--r--spec/services/clusters/create_service_spec.rb1
-rw-r--r--spec/services/clusters/gcp/fetch_operation_service_spec.rb45
-rw-r--r--spec/services/clusters/gcp/finalize_creation_service_spec.rb161
-rw-r--r--spec/services/clusters/gcp/provision_service_spec.rb71
-rw-r--r--spec/services/clusters/gcp/verify_provision_status_service_spec.rb111
-rw-r--r--spec/services/database/consistency_check_service_spec.rb2
-rw-r--r--spec/services/design_management/save_designs_service_spec.rb23
-rw-r--r--spec/services/discussions/resolve_service_spec.rb14
-rw-r--r--spec/services/discussions/unresolve_service_spec.rb29
-rw-r--r--spec/services/draft_notes/publish_service_spec.rb8
-rw-r--r--spec/services/environments/stop_stale_service_spec.rb49
-rw-r--r--spec/services/feature_flags/create_service_spec.rb2
-rw-r--r--spec/services/feature_flags/destroy_service_spec.rb2
-rw-r--r--spec/services/feature_flags/update_service_spec.rb2
-rw-r--r--spec/services/files/base_service_spec.rb59
-rw-r--r--spec/services/groups/import_export/export_service_spec.rb10
-rw-r--r--spec/services/groups/import_export/import_service_spec.rb388
-rw-r--r--spec/services/groups/transfer_service_spec.rb37
-rw-r--r--spec/services/groups/update_shared_runners_service_spec.rb28
-rw-r--r--spec/services/ide/schemas_config_service_spec.rb34
-rw-r--r--spec/services/import/github/gists_import_service_spec.rb26
-rw-r--r--spec/services/import/github_service_spec.rb56
-rw-r--r--spec/services/issue_links/create_service_spec.rb4
-rw-r--r--spec/services/issues/close_service_spec.rb2
-rw-r--r--spec/services/issues/export_csv_service_spec.rb2
-rw-r--r--spec/services/issues/update_service_spec.rb2
-rw-r--r--spec/services/lfs/file_transformer_spec.rb38
-rw-r--r--spec/services/members/destroy_service_spec.rb102
-rw-r--r--spec/services/members/update_service_spec.rb44
-rw-r--r--spec/services/merge_requests/base_service_spec.rb68
-rw-r--r--spec/services/merge_requests/rebase_service_spec.rb39
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb6
-rw-r--r--spec/services/merge_requests/update_service_spec.rb26
-rw-r--r--spec/services/ml/experiment_tracking/candidate_repository_spec.rb22
-rw-r--r--spec/services/notes/create_service_spec.rb18
-rw-r--r--spec/services/notification_service_spec.rb9
-rw-r--r--spec/services/packages/conan/search_service_spec.rb2
-rw-r--r--spec/services/pages_domains/create_service_spec.rb3
-rw-r--r--spec/services/pages_domains/delete_service_spec.rb3
-rw-r--r--spec/services/pages_domains/retry_acme_order_service_spec.rb2
-rw-r--r--spec/services/pages_domains/update_service_spec.rb3
-rw-r--r--spec/services/personal_access_tokens/revoke_service_spec.rb14
-rw-r--r--spec/services/projects/create_service_spec.rb5
-rw-r--r--spec/services/projects/import_service_spec.rb22
-rw-r--r--spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb20
-rw-r--r--spec/services/projects/transfer_service_spec.rb4
-rw-r--r--spec/services/repositories/changelog_service_spec.rb2
-rw-r--r--spec/services/search_service_spec.rb28
-rw-r--r--spec/services/security/ci_configuration/dependency_scanning_create_service_spec.rb20
-rw-r--r--spec/services/security/ci_configuration/sast_create_service_spec.rb2
-rw-r--r--spec/services/service_ping/submit_service_ping_service_spec.rb13
-rw-r--r--spec/services/service_response_spec.rb36
-rw-r--r--spec/services/test_hooks/project_service_spec.rb18
-rw-r--r--spec/services/test_hooks/system_service_spec.rb4
-rw-r--r--spec/services/todo_service_spec.rb129
-rw-r--r--spec/services/users/block_service_spec.rb11
-rw-r--r--spec/services/users/signup_service_spec.rb20
-rw-r--r--spec/services/users/unblock_service_spec.rb45
-rw-r--r--spec/services/work_items/create_service_spec.rb2
-rw-r--r--spec/services/work_items/parent_links/create_service_spec.rb2
-rw-r--r--spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb4
84 files changed, 1585 insertions, 1767 deletions
diff --git a/spec/services/achievements/create_service_spec.rb b/spec/services/achievements/create_service_spec.rb
new file mode 100644
index 00000000000..f62a45deb50
--- /dev/null
+++ b/spec/services/achievements/create_service_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Achievements::CreateService, feature_category: :users do
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
+
+ let(:params) { attributes_for(:achievement, namespace: group) }
+
+ subject(:response) { described_class.new(namespace: group, current_user: user, params: params).execute }
+
+ context 'when user does not have permission' do
+ let_it_be(:group) { create(:group) }
+
+ before_all do
+ group.add_developer(user)
+ end
+
+ it 'returns an error' do
+ expect(response).to be_error
+ expect(response.message).to match_array(
+ ['You have insufficient permissions to create achievements for this namespace'])
+ end
+ end
+
+ context 'when user has permission' do
+ let_it_be(:group) { create(:group) }
+
+ before_all do
+ group.add_maintainer(user)
+ end
+
+ it 'creates an achievement' do
+ expect(response).to be_success
+ end
+
+ it 'returns an error when the achievement is not persisted' do
+ params[:name] = nil
+
+ expect(response).to be_error
+ expect(response.message).to match_array(["Name can't be blank"])
+ end
+ end
+ end
+end
diff --git a/spec/services/audit_event_service_spec.rb b/spec/services/audit_event_service_spec.rb
index 063d250f22b..1d079adc0be 100644
--- a/spec/services/audit_event_service_spec.rb
+++ b/spec/services/audit_event_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe AuditEventService do
+RSpec.describe AuditEventService, :with_license do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user, :with_sign_ins) }
let_it_be(:project_member) { create(:project_member, user: user) }
diff --git a/spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb b/spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb
index 5a7852fc32f..9a74f5ca07a 100644
--- a/spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb
+++ b/spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::CreatePipelineTrackersService do
+RSpec.describe BulkImports::CreatePipelineTrackersService, feature_category: :importers do
describe '#execute!' do
context 'when entity is group' do
it 'creates trackers for group entity' do
diff --git a/spec/services/bulk_imports/create_service_spec.rb b/spec/services/bulk_imports/create_service_spec.rb
index f1e5533139e..75f88e3989c 100644
--- a/spec/services/bulk_imports/create_service_spec.rb
+++ b/spec/services/bulk_imports/create_service_spec.rb
@@ -2,10 +2,11 @@
require 'spec_helper'
-RSpec.describe BulkImports::CreateService do
+RSpec.describe BulkImports::CreateService, feature_category: :importers do
let(:user) { create(:user) }
let(:credentials) { { url: 'http://gitlab.example', access_token: 'token' } }
let(:destination_group) { create(:group, path: 'destination1') }
+ let(:migrate_projects) { true }
let_it_be(:parent_group) { create(:group, path: 'parent-group') }
let(:params) do
[
@@ -13,19 +14,23 @@ RSpec.describe BulkImports::CreateService do
source_type: 'group_entity',
source_full_path: 'full/path/to/group1',
destination_slug: 'destination group 1',
- destination_namespace: 'full/path/to/destination1'
+ destination_namespace: 'parent-group',
+ migrate_projects: migrate_projects
+
},
{
source_type: 'group_entity',
source_full_path: 'full/path/to/group2',
destination_slug: 'destination group 2',
- destination_namespace: 'full/path/to/destination2'
+ destination_namespace: 'parent-group',
+ migrate_projects: migrate_projects
},
{
source_type: 'project_entity',
source_full_path: 'full/path/to/project1',
destination_slug: 'destination project 1',
- destination_namespace: 'full/path/to/destination1'
+ destination_namespace: 'parent-group',
+ migrate_projects: migrate_projects
}
]
end
@@ -33,113 +38,223 @@ RSpec.describe BulkImports::CreateService do
subject { described_class.new(user, params, credentials) }
describe '#execute' do
- let_it_be(:source_version) do
- Gitlab::VersionInfo.new(::BulkImport::MIN_MAJOR_VERSION,
- ::BulkImport::MIN_MINOR_VERSION_FOR_PROJECT)
- end
-
- before do
- allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
- allow(instance).to receive(:instance_version).and_return(source_version)
- allow(instance).to receive(:instance_enterprise).and_return(false)
- end
- end
+ context 'when gitlab version is 15.5 or higher' do
+ let(:source_version) { { version: "15.6.0", enterprise: false } }
- it 'creates bulk import' do
- parent_group.add_owner(user)
- expect { subject.execute }.to change { BulkImport.count }.by(1)
-
- last_bulk_import = BulkImport.last
-
- expect(last_bulk_import.user).to eq(user)
- expect(last_bulk_import.source_version).to eq(source_version.to_s)
- expect(last_bulk_import.user).to eq(user)
- expect(last_bulk_import.source_enterprise).to eq(false)
-
- expect_snowplow_event(
- category: 'BulkImports::CreateService',
- action: 'create',
- label: 'bulk_import_group'
- )
-
- expect_snowplow_event(
- category: 'BulkImports::CreateService',
- action: 'create',
- label: 'import_access_level',
- user: user,
- extra: { user_role: 'Owner', import_type: 'bulk_import_group' }
- )
- end
-
- it 'creates bulk import entities' do
- expect { subject.execute }.to change { BulkImports::Entity.count }.by(3)
- end
+ context 'when a BulkImports::Error is raised while validating the instance version' do
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ allow(client)
+ .to receive(:validate_instance_version!)
+ .and_raise(BulkImports::Error, "This is a BulkImports error.")
+ end
+ end
- it 'creates bulk import configuration' do
- expect { subject.execute }.to change { BulkImports::Configuration.count }.by(1)
- end
+ it 'rescues the error and raises a ServiceResponse::Error' do
+ result = subject.execute
- it 'enqueues BulkImportWorker' do
- expect(BulkImportWorker).to receive(:perform_async)
+ expect(result).to be_a(ServiceResponse)
+ expect(result).to be_error
+ expect(result.message).to eq("This is a BulkImports error.")
+ end
+ end
- subject.execute
- end
+ context 'when required scopes are not present' do
+ it 'returns ServiceResponse with error if token does not have api scope' do
+ stub_request(:get, 'http://gitlab.example/api/v4/version?private_token=token').to_return(status: 404)
+ stub_request(:get, 'http://gitlab.example/api/v4/metadata?private_token=token')
+ .to_return(
+ status: 200,
+ body: source_version.to_json,
+ headers: { 'Content-Type' => 'application/json' }
+ )
- it 'returns success ServiceResponse' do
- result = subject.execute
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ allow(client).to receive(:validate_instance_version!).and_raise(BulkImports::Error.scope_validation_failure)
+ end
- expect(result).to be_a(ServiceResponse)
- expect(result).to be_success
- end
+ result = subject.execute
- it 'returns ServiceResponse with error if validation fails' do
- params[0][:source_full_path] = nil
+ expect(result).to be_a(ServiceResponse)
+ expect(result).to be_error
+ expect(result.message)
+ .to eq(
+ "Import aborted as the provided personal access token does not have the required 'api' scope or is " \
+ "no longer valid."
+ )
+ end
+ end
- result = subject.execute
+ context 'when token validation succeeds' do
+ before do
+ stub_request(:get, 'http://gitlab.example/api/v4/version?private_token=token').to_return(status: 404)
+ stub_request(:get, 'http://gitlab.example/api/v4/metadata?private_token=token')
+ .to_return(status: 200, body: source_version.to_json, headers: { 'Content-Type' => 'application/json' })
+ stub_request(:get, 'http://gitlab.example/api/v4/personal_access_tokens/self?private_token=token')
+ .to_return(
+ status: 200,
+ body: { 'scopes' => ['api'] }.to_json,
+ headers: { 'Content-Type' => 'application/json' }
+ )
+ end
- expect(result).to be_a(ServiceResponse)
- expect(result).to be_error
- expect(result.message).to eq("Validation failed: Source full path can't be blank")
- end
+ it 'creates bulk import' do
+ parent_group.add_owner(user)
+ expect { subject.execute }.to change { BulkImport.count }.by(1)
- describe '#user-role' do
- context 'when there is a parent_namespace and the user is a member' do
- let(:group2) { create(:group, path: 'destination200', source_id: parent_group.id ) }
- let(:params) do
- [
- {
- source_type: 'group_entity',
- source_full_path: 'full/path/to/group1',
- destination_slug: 'destination200',
- destination_namespace: 'parent-group'
- }
- ]
- end
+ last_bulk_import = BulkImport.last
+ expect(last_bulk_import.user).to eq(user)
+ expect(last_bulk_import.source_version).to eq(source_version[:version])
+ expect(last_bulk_import.user).to eq(user)
+ expect(last_bulk_import.source_enterprise).to eq(false)
- it 'defines access_level from parent namespace membership' do
- parent_group.add_guest(user)
- subject.execute
+ expect_snowplow_event(
+ category: 'BulkImports::CreateService',
+ action: 'create',
+ label: 'bulk_import_group'
+ )
expect_snowplow_event(
category: 'BulkImports::CreateService',
action: 'create',
label: 'import_access_level',
user: user,
- extra: { user_role: 'Guest', import_type: 'bulk_import_group' }
+ extra: { user_role: 'Owner', import_type: 'bulk_import_group' }
)
end
+
+ describe 'projects migration flag' do
+ let(:import) { BulkImport.last }
+
+ context 'when false' do
+ let(:migrate_projects) { false }
+
+ it 'sets false' do
+ subject.execute
+
+ expect(import.entities.pluck(:migrate_projects)).to contain_exactly(false, false, false)
+ end
+ end
+
+ context 'when true' do
+ let(:migrate_projects) { true }
+
+ it 'sets true' do
+ subject.execute
+
+ expect(import.entities.pluck(:migrate_projects)).to contain_exactly(true, true, true)
+ end
+ end
+
+ context 'when nil' do
+ let(:migrate_projects) { nil }
+
+ it 'sets true' do
+ subject.execute
+
+ expect(import.entities.pluck(:migrate_projects)).to contain_exactly(true, true, true)
+ end
+ end
+ end
end
+ end
- context 'when there is a parent_namespace and the user is not a member' do
- let(:params) do
- [
- {
- source_type: 'group_entity',
- source_full_path: 'full/path/to/group1',
- destination_slug: 'destination-group-1',
- destination_namespace: 'parent-group'
- }
- ]
+ context 'when gitlab version is lower than 15.5' do
+ let(:source_version) do
+ Gitlab::VersionInfo.new(::BulkImport::MIN_MAJOR_VERSION,
+ ::BulkImport::MIN_MINOR_VERSION_FOR_PROJECT)
+ end
+
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
+ allow(instance).to receive(:instance_version).and_return(source_version)
+ allow(instance).to receive(:instance_enterprise).and_return(false)
+ end
+ end
+
+ it 'creates bulk import' do
+ parent_group.add_owner(user)
+ expect { subject.execute }.to change { BulkImport.count }.by(1)
+
+ last_bulk_import = BulkImport.last
+
+ expect(last_bulk_import.user).to eq(user)
+ expect(last_bulk_import.source_version).to eq(source_version.to_s)
+ expect(last_bulk_import.user).to eq(user)
+ expect(last_bulk_import.source_enterprise).to eq(false)
+
+ expect_snowplow_event(
+ category: 'BulkImports::CreateService',
+ action: 'create',
+ label: 'bulk_import_group'
+ )
+
+ expect_snowplow_event(
+ category: 'BulkImports::CreateService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { user_role: 'Owner', import_type: 'bulk_import_group' }
+ )
+ end
+
+ it 'creates bulk import entities' do
+ expect { subject.execute }.to change { BulkImports::Entity.count }.by(3)
+ end
+
+ it 'creates bulk import configuration' do
+ expect { subject.execute }.to change { BulkImports::Configuration.count }.by(1)
+ end
+
+ it 'enqueues BulkImportWorker' do
+ expect(BulkImportWorker).to receive(:perform_async)
+
+ subject.execute
+ end
+
+ it 'returns success ServiceResponse' do
+ result = subject.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result).to be_success
+ end
+
+ it 'returns ServiceResponse with error if validation fails' do
+ params[0][:source_full_path] = nil
+
+ result = subject.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result).to be_error
+ expect(result.message).to eq("Validation failed: Source full path can't be blank")
+ end
+
+ describe '#user-role' do
+ context 'when there is a parent_namespace and the user is a member' do
+ let(:group2) { create(:group, path: 'destination200', source_id: parent_group.id ) }
+ let(:params) do
+ [
+ {
+ source_type: 'group_entity',
+ source_full_path: 'full/path/to/group1',
+ destination_slug: 'destination200',
+ destination_namespace: 'parent-group'
+ }
+ ]
+ end
+
+ it 'defines access_level from parent namespace membership' do
+ parent_group.add_guest(user)
+ subject.execute
+
+ expect_snowplow_event(
+ category: 'BulkImports::CreateService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { user_role: 'Guest', import_type: 'bulk_import_group' }
+ )
+ end
end
it 'defines access_level as not a member' do
diff --git a/spec/services/bulk_imports/get_importable_data_service_spec.rb b/spec/services/bulk_imports/get_importable_data_service_spec.rb
index eccd3e5f49d..570f5199f01 100644
--- a/spec/services/bulk_imports/get_importable_data_service_spec.rb
+++ b/spec/services/bulk_imports/get_importable_data_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::GetImportableDataService do
+RSpec.describe BulkImports::GetImportableDataService, feature_category: :importers do
describe '#execute' do
include_context 'bulk imports requests context', 'https://gitlab.example.com'
@@ -34,6 +34,18 @@ RSpec.describe BulkImports::GetImportableDataService do
]
end
+ let(:source_version) do
+ Gitlab::VersionInfo.new(::BulkImport::MIN_MAJOR_VERSION,
+ ::BulkImport::MIN_MINOR_VERSION_FOR_PROJECT)
+ end
+
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
+ allow(instance).to receive(:instance_version).and_return(source_version)
+ allow(instance).to receive(:instance_enterprise).and_return(false)
+ end
+ end
+
subject do
described_class.new(params, query_params, credentials).execute
end
diff --git a/spec/services/chat_names/authorize_user_service_spec.rb b/spec/services/chat_names/authorize_user_service_spec.rb
index 53d90c7f100..4c261ece504 100644
--- a/spec/services/chat_names/authorize_user_service_spec.rb
+++ b/spec/services/chat_names/authorize_user_service_spec.rb
@@ -2,12 +2,11 @@
require 'spec_helper'
-RSpec.describe ChatNames::AuthorizeUserService do
+RSpec.describe ChatNames::AuthorizeUserService, feature_category: :users do
describe '#execute' do
- let(:integration) { create(:integration) }
let(:result) { subject.execute }
- subject { described_class.new(integration, params) }
+ subject { described_class.new(params) }
context 'when all parameters are valid' do
let(:params) { { team_id: 'T0001', team_domain: 'myteam', user_id: 'U0001', user_name: 'user' } }
diff --git a/spec/services/ci/create_downstream_pipeline_service_spec.rb b/spec/services/ci/create_downstream_pipeline_service_spec.rb
index bcdb2b4f796..fd978bffacb 100644
--- a/spec/services/ci/create_downstream_pipeline_service_spec.rb
+++ b/spec/services/ci/create_downstream_pipeline_service_spec.rb
@@ -41,12 +41,6 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute', feature_category
subject { service.execute(bridge) }
- shared_context 'when ci_bridge_remove_sourced_pipelines is disabled' do
- before do
- stub_feature_flags(ci_bridge_remove_sourced_pipelines: false)
- end
- end
-
context 'when downstream project has not been found' do
let(:trigger) do
{ trigger: { project: 'unknown/project' } }
@@ -128,19 +122,6 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute', feature_category
expect(pipeline.source_bridge).to be_a ::Ci::Bridge
end
- context 'when ci_bridge_remove_sourced_pipelines is disabled' do
- include_context 'when ci_bridge_remove_sourced_pipelines is disabled'
-
- it 'creates a new pipeline in a downstream project' do
- expect(pipeline.user).to eq bridge.user
- expect(pipeline.project).to eq downstream_project
- expect(bridge.sourced_pipelines.first.pipeline).to eq pipeline
- expect(pipeline.triggered_by_pipeline).to eq upstream_pipeline
- expect(pipeline.source_bridge).to eq bridge
- expect(pipeline.source_bridge).to be_a ::Ci::Bridge
- end
- end
-
it_behaves_like 'logs downstream pipeline creation' do
let(:downstream_pipeline) { pipeline }
let(:expected_root_pipeline) { upstream_pipeline }
@@ -179,31 +160,6 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute', feature_category
expect(subject).to be_error
expect(subject.message).to eq("Already has a downstream pipeline")
end
-
- context 'when ci_bridge_remove_sourced_pipelines is disabled' do
- include_context 'when ci_bridge_remove_sourced_pipelines is disabled'
-
- before do
- bridge.sourced_pipelines.create!(
- source_pipeline: bridge.pipeline,
- source_project: bridge.project,
- project: bridge.project,
- pipeline: create(:ci_pipeline, project: bridge.project)
- )
- end
-
- it 'logs an error and exits' do
- expect(Gitlab::ErrorTracking)
- .to receive(:track_exception)
- .with(
- instance_of(described_class::DuplicateDownstreamPipelineError),
- bridge_id: bridge.id, project_id: bridge.project.id)
- .and_call_original
- expect(Ci::CreatePipelineService).not_to receive(:new)
- expect(subject).to be_error
- expect(subject.message).to eq("Already has a downstream pipeline")
- end
- end
end
context 'when target ref is not specified' do
@@ -237,19 +193,6 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute', feature_category
expect(pipeline.source_bridge).to be_a ::Ci::Bridge
end
- context 'when ci_bridge_remove_sourced_pipelines is disabled' do
- include_context 'when ci_bridge_remove_sourced_pipelines is disabled'
-
- it 'creates a new pipeline in a downstream project' do
- expect(pipeline.user).to eq bridge.user
- expect(pipeline.project).to eq downstream_project
- expect(bridge.sourced_pipelines.first.pipeline).to eq pipeline
- expect(pipeline.triggered_by_pipeline).to eq upstream_pipeline
- expect(pipeline.source_bridge).to eq bridge
- expect(pipeline.source_bridge).to be_a ::Ci::Bridge
- end
- end
-
it 'updates the bridge status when downstream pipeline gets processed' do
expect(pipeline.reload).to be_failed
expect(bridge.reload).to be_failed
@@ -301,20 +244,6 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute', feature_category
expect(pipeline.source_bridge).to be_a ::Ci::Bridge
end
- context 'when ci_bridge_remove_sourced_pipelines is disabled' do
- include_context 'when ci_bridge_remove_sourced_pipelines is disabled'
-
- it 'creates a child pipeline in the same project' do
- expect(pipeline.builds.map(&:name)).to match_array(%w[rspec echo])
- expect(pipeline.user).to eq bridge.user
- expect(pipeline.project).to eq bridge.project
- expect(bridge.sourced_pipelines.first.pipeline).to eq pipeline
- expect(pipeline.triggered_by_pipeline).to eq upstream_pipeline
- expect(pipeline.source_bridge).to eq bridge
- expect(pipeline.source_bridge).to be_a ::Ci::Bridge
- end
- end
-
it 'updates bridge status when downstream pipeline gets processed' do
expect(pipeline.reload).to be_created
expect(bridge.reload).to be_success
@@ -825,11 +754,13 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute', feature_category
it 'does not create a pipeline and drops the bridge' do
expect { subject }.not_to change(downstream_project.ci_pipelines, :count)
expect(subject).to be_error
- expect(subject.message).to match_array(["No stages / jobs for this pipeline."])
+ expect(subject.message).to match_array(['Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.'])
expect(bridge.reload).to be_failed
expect(bridge.failure_reason).to eq('downstream_pipeline_creation_failed')
- expect(bridge.options[:downstream_errors]).to eq(['No stages / jobs for this pipeline.'])
+ expect(bridge.options[:downstream_errors]).to match_array(['Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.'])
end
end
diff --git a/spec/services/ci/create_pipeline_service/cache_spec.rb b/spec/services/ci/create_pipeline_service/cache_spec.rb
index 82c3d374636..f9640f99031 100644
--- a/spec/services/ci/create_pipeline_service/cache_spec.rb
+++ b/spec/services/ci/create_pipeline_service/cache_spec.rb
@@ -37,6 +37,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
paths: ['logs/', 'binaries/'],
policy: 'pull-push',
untracked: true,
+ unprotect: false,
when: 'on_success'
}
@@ -69,7 +70,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
key: /[a-f0-9]{40}/,
paths: ['logs/'],
policy: 'pull-push',
- when: 'on_success'
+ when: 'on_success',
+ unprotect: false
}
expect(pipeline).to be_persisted
@@ -85,7 +87,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
key: /default/,
paths: ['logs/'],
policy: 'pull-push',
- when: 'on_success'
+ when: 'on_success',
+ unprotect: false
}
expect(pipeline).to be_persisted
@@ -118,7 +121,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
key: /\$ENV_VAR-[a-f0-9]{40}/,
paths: ['logs/'],
policy: 'pull-push',
- when: 'on_success'
+ when: 'on_success',
+ unprotect: false
}
expect(pipeline).to be_persisted
@@ -134,7 +138,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
key: /\$ENV_VAR-default/,
paths: ['logs/'],
policy: 'pull-push',
- when: 'on_success'
+ when: 'on_success',
+ unprotect: false
}
expect(pipeline).to be_persisted
diff --git a/spec/services/ci/create_pipeline_service/include_spec.rb b/spec/services/ci/create_pipeline_service/include_spec.rb
index 3764663fd74..f18b4883aaf 100644
--- a/spec/services/ci/create_pipeline_service/include_spec.rb
+++ b/spec/services/ci/create_pipeline_service/include_spec.rb
@@ -2,7 +2,10 @@
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
+RSpec.describe Ci::CreatePipelineService,
+:yaml_processor_feature_flag_corectness, feature_category: :pipeline_authoring do
+ include RepoHelpers
+
context 'include:' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
@@ -16,14 +19,17 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
let(:file_location) { 'spec/fixtures/gitlab/ci/external_files/.gitlab-ci-template-1.yml' }
- before do
- allow(project.repository)
- .to receive(:blob_data_at).with(project.commit.id, '.gitlab-ci.yml')
- .and_return(config)
+ let(:project_files) do
+ {
+ '.gitlab-ci.yml' => config,
+ file_location => File.read(Rails.root.join(file_location))
+ }
+ end
- allow(project.repository)
- .to receive(:blob_data_at).with(project.commit.id, file_location)
- .and_return(File.read(Rails.root.join(file_location)))
+ around do |example|
+ create_and_delete_files(project, project_files) do
+ example.run
+ end
end
shared_examples 'not including the file' do
diff --git a/spec/services/ci/create_pipeline_service/logger_spec.rb b/spec/services/ci/create_pipeline_service/logger_spec.rb
index ccb15bfa684..ecb24a61075 100644
--- a/spec/services/ci/create_pipeline_service/logger_spec.rb
+++ b/spec/services/ci/create_pipeline_service/logger_spec.rb
@@ -139,5 +139,74 @@ RSpec.describe Ci::CreatePipelineService, # rubocop: disable RSpec/FilePath
expect(pipeline).to be_created_successfully
end
end
+
+ describe 'pipeline includes count' do
+ before do
+ stub_const('Gitlab::Ci::Config::External::Context::MAX_INCLUDES', 2)
+ end
+
+ context 'when the includes count exceeds the maximum' do
+ before do
+ allow_next_instance_of(Ci::Pipeline) do |pipeline|
+ allow(pipeline).to receive(:config_metadata)
+ .and_return({ includes: [{ file: 1 }, { file: 2 }, { file: 3 }] })
+ end
+ end
+
+ it 'creates a log entry' do
+ expect(Gitlab::AppJsonLogger)
+ .to receive(:info)
+ .with(a_hash_including({ 'pipeline_includes_count' => 3 }))
+ .and_call_original
+
+ expect(pipeline).to be_created_successfully
+ end
+ end
+
+ context 'when the includes count does not exceed the maximum' do
+ before do
+ allow_next_instance_of(Ci::Pipeline) do |pipeline|
+ allow(pipeline).to receive(:config_metadata)
+ .and_return({ includes: [{ file: 1 }, { file: 2 }] })
+ end
+ end
+
+ it 'does not create a log entry but it collects the data' do
+ expect(Gitlab::AppJsonLogger).not_to receive(:info)
+ expect(pipeline).to be_created_successfully
+
+ expect(service.logger.observations_hash)
+ .to match(a_hash_including({ 'pipeline_includes_count' => 2 }))
+ end
+ end
+
+ context 'when the includes data is nil' do
+ before do
+ allow_next_instance_of(Ci::Pipeline) do |pipeline|
+ allow(pipeline).to receive(:config_metadata)
+ .and_return({})
+ end
+ end
+
+ it 'does not create a log entry' do
+ expect(Gitlab::AppJsonLogger).not_to receive(:info)
+ expect(pipeline).to be_created_successfully
+ end
+ end
+
+ context 'when the pipeline config_metadata is nil' do
+ before do
+ allow_next_instance_of(Ci::Pipeline) do |pipeline|
+ allow(pipeline).to receive(:config_metadata)
+ .and_return(nil)
+ end
+ end
+
+ it 'does not create a log entry but it collects the data' do
+ expect(Gitlab::AppJsonLogger).not_to receive(:info)
+ expect(pipeline).to be_created_successfully
+ end
+ end
+ end
end
end
diff --git a/spec/services/ci/create_pipeline_service/rules_spec.rb b/spec/services/ci/create_pipeline_service/rules_spec.rb
index b866293393b..26bb8b7d006 100644
--- a/spec/services/ci/create_pipeline_service/rules_spec.rb
+++ b/spec/services/ci/create_pipeline_service/rules_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness, feature_category: :pipeline_authoring do
let(:project) { create(:project, :repository) }
let(:user) { project.first_owner }
let(:ref) { 'refs/heads/master' }
@@ -1166,7 +1166,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
let(:ref) { 'refs/heads/master' }
it 'invalidates the pipeline with an empty jobs error' do
- expect(pipeline.errors[:base]).to include('No stages / jobs for this pipeline.')
+ expect(pipeline.errors[:base]).to include('Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.')
expect(pipeline).not_to be_persisted
end
end
diff --git a/spec/services/ci/create_pipeline_service/variables_spec.rb b/spec/services/ci/create_pipeline_service/variables_spec.rb
index e9e0cf2c6e0..fd138bde656 100644
--- a/spec/services/ci/create_pipeline_service/variables_spec.rb
+++ b/spec/services/ci/create_pipeline_service/variables_spec.rb
@@ -60,27 +60,6 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
{ key: 'VAR8', value: "value 8 $CI_PIPELINE_ID", public: true, masked: false, raw: true }
)
end
-
- context 'when the FF ci_raw_variables_in_yaml_config is disabled' do
- before do
- stub_feature_flags(ci_raw_variables_in_yaml_config: false)
- end
-
- it 'creates the pipeline with a job that has all variables expanded' do
- expect(pipeline).to be_created_successfully
-
- expect(Ci::BuildRunnerPresenter.new(rspec).runner_variables).to include(
- { key: 'VAR1', value: "JOBID-#{rspec.id}", public: true, masked: false },
- { key: 'VAR2', value: "PIPELINEID-#{pipeline.id} and JOBID-#{rspec.id}", public: true, masked: false },
- { key: 'VAR3', value: "PIPELINEID-#{pipeline.id} and JOBID-#{rspec.id}", public: true, masked: false },
- { key: 'VAR4', value: "JOBID-#{rspec.id}", public: true, masked: false },
- { key: 'VAR5', value: "PIPELINEID-#{pipeline.id} and JOBID-#{rspec.id}", public: true, masked: false },
- { key: 'VAR6', value: "PIPELINEID-#{pipeline.id} and JOBID-#{rspec.id}", public: true, masked: false },
- { key: 'VAR7', value: "overridden value 7 #{pipeline.id}", public: true, masked: false },
- { key: 'VAR8', value: "value 8 #{pipeline.id}", public: true, masked: false }
- )
- end
- end
end
context 'when trigger variables have expand: true/false' do
@@ -109,22 +88,6 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
{ key: 'VAR3', value: "PIPELINEID-$CI_PIPELINE_ID and $VAR1", raw: true }
)
end
-
- context 'when the FF ci_raw_variables_in_yaml_config is disabled' do
- before do
- stub_feature_flags(ci_raw_variables_in_yaml_config: false)
- end
-
- it 'creates the pipeline with a job that has all variables expanded' do
- expect(pipeline).to be_created_successfully
-
- expect(child.downstream_variables).to include(
- { key: 'VAR1', value: "PROJECTID-#{project.id}" },
- { key: 'VAR2', value: "PIPELINEID-#{pipeline.id} and PROJECTID-$CI_PROJECT_ID" },
- { key: 'VAR3', value: "PIPELINEID-#{pipeline.id} and PROJECTID-$CI_PROJECT_ID" }
- )
- end
- end
end
end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 8628e95ba80..b0ba07ea295 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness, :clean_gitlab_redis_cache do
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness, :clean_gitlab_redis_cache, feature_category: :continuous_integration do
include ProjectForksHelper
let_it_be_with_refind(:project) { create(:project, :repository) }
@@ -684,7 +684,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
result = execute_service
expect(result).to be_error
- expect(result.message).to eq('No stages / jobs for this pipeline.')
+ expect(result.message).to eq('Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.')
expect(result.payload).not_to be_persisted
expect(Ci::Build.all).to be_empty
expect(Ci::Pipeline.count).to eq(0)
@@ -759,7 +760,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
stub_ci_pipeline_yaml_file(config)
end
- it 'creates the environment with tags' do
+ it 'creates the environment with tags', :sidekiq_inline do
result = execute_service.payload
expect(result).to be_persisted
@@ -862,7 +863,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
stub_ci_pipeline_yaml_file(YAML.dump(ci_yaml))
end
- it 'creates a pipeline with the environment' do
+ it 'creates a pipeline with the environment', :sidekiq_inline do
result = execute_service.payload
expect(result).to be_persisted
@@ -1311,9 +1312,10 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
}
end
- it 'has a job with environment' do
+ it 'has a job with environment', :sidekiq_inline do
expect(pipeline.builds.count).to eq(1)
expect(pipeline.builds.first.persisted_environment.name).to eq('review/master')
+ expect(pipeline.builds.first.persisted_environment.name).to eq('review/master')
expect(pipeline.builds.first.deployment).to be_created
end
end
@@ -1423,9 +1425,11 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
- expect(response.message).to eq('No stages / jobs for this pipeline.')
+ expect(response.message).to eq('Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.')
expect(pipeline).not_to be_persisted
- expect(pipeline.errors[:base]).to eq(['No stages / jobs for this pipeline.'])
+ expect(pipeline.errors[:base]).to eq(['Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.'])
end
end
end
@@ -1633,7 +1637,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
- expect(response.message).to eq('No stages / jobs for this pipeline.')
+ expect(response.message).to eq('Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.')
expect(pipeline).not_to be_persisted
end
end
@@ -1669,7 +1674,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
- expect(response.message).to eq('No stages / jobs for this pipeline.')
+ expect(response.message).to eq('Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.')
expect(pipeline).not_to be_persisted
end
end
@@ -1697,7 +1703,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
- expect(response.message).to eq('No stages / jobs for this pipeline.')
+ expect(response.message).to eq('Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.')
expect(pipeline).not_to be_persisted
end
end
@@ -1727,7 +1734,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
- expect(response.message).to eq('No stages / jobs for this pipeline.')
+ expect(response.message).to eq('Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.')
expect(pipeline).not_to be_persisted
end
end
@@ -1755,7 +1763,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
- expect(response.message).to eq('No stages / jobs for this pipeline.')
+ expect(response.message).to eq('Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.')
expect(pipeline).not_to be_persisted
end
end
diff --git a/spec/services/ci/job_artifacts/create_service_spec.rb b/spec/services/ci/job_artifacts/create_service_spec.rb
index 5df590a1b78..711002e28af 100644
--- a/spec/services/ci/job_artifacts/create_service_spec.rb
+++ b/spec/services/ci/job_artifacts/create_service_spec.rb
@@ -61,6 +61,49 @@ RSpec.describe Ci::JobArtifacts::CreateService do
expect(new_artifact.locked).to eq(job.pipeline.locked)
end
+ it 'sets accessibility level by default to public' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(1)
+
+ new_artifact = job.job_artifacts.last
+ expect(new_artifact).to be_public_accessibility
+ end
+
+ context 'when accessibility level passed as private' do
+ before do
+ params.merge!('accessibility' => 'private')
+ end
+
+ it 'sets accessibility level to private' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(1)
+
+ new_artifact = job.job_artifacts.last
+ expect(new_artifact).to be_private_accessibility
+ end
+ end
+
+ context 'when accessibility passed as public' do
+ before do
+ params.merge!('accessibility' => 'public')
+ end
+
+ it 'sets accessibility to public level' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(1)
+
+ new_artifact = job.job_artifacts.last
+ expect(new_artifact).to be_public_accessibility
+ end
+ end
+
+ context 'when accessibility passed as invalid value' do
+ before do
+ params.merge!('accessibility' => 'invalid_value')
+ end
+
+ it 'fails with argument error' do
+ expect { subject }.to raise_error(ArgumentError)
+ end
+ end
+
context 'when metadata file is also uploaded' do
let(:metadata_file) do
file_to_upload('spec/fixtures/ci_build_artifacts_metadata.gz', sha256: artifacts_sha256)
@@ -82,6 +125,39 @@ RSpec.describe Ci::JobArtifacts::CreateService do
expect(new_artifact.locked).to eq(job.pipeline.locked)
end
+ it 'sets accessibility by default to public' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(2)
+
+ new_artifact = job.job_artifacts.last
+ expect(new_artifact).to be_public_accessibility
+ end
+
+ context 'when accessibility level passed as private' do
+ before do
+ params.merge!('accessibility' => 'private')
+ end
+
+ it 'sets accessibility to private level' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(2)
+
+ new_artifact = job.job_artifacts.last
+ expect(new_artifact).to be_private_accessibility
+ end
+ end
+
+ context 'when accessibility passed as public' do
+ before do
+ params.merge!('accessibility' => 'public')
+ end
+
+ it 'sets accessibility level to public' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(2)
+
+ new_artifact = job.job_artifacts.last
+ expect(new_artifact).to be_public_accessibility
+ end
+ end
+
it 'sets expiration date according to application settings' do
expected_expire_at = 1.day.from_now
diff --git a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
index 4f7663d7996..dd10c0df374 100644
--- a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
@@ -87,12 +87,9 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
expect { subject }.to change { Ci::DeletedObject.count }.by(1)
end
- it 'resets project statistics' do
- expect(ProjectStatistics).to receive(:increment_statistic).once
- .with(artifact.project, :build_artifacts_size, -artifact.file.size)
- .and_call_original
-
- subject
+ it 'resets project statistics', :sidekiq_inline do
+ expect { subject }
+ .to change { artifact.project.statistics.reload.build_artifacts_size }.by(-artifact.file.size)
end
it 'does not remove the files' do
diff --git a/spec/services/ci/job_artifacts/destroy_associations_service_spec.rb b/spec/services/ci/job_artifacts/destroy_associations_service_spec.rb
index b1a4741851b..ca36c923dcf 100644
--- a/spec/services/ci/job_artifacts/destroy_associations_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_associations_service_spec.rb
@@ -3,23 +3,23 @@
require 'spec_helper'
RSpec.describe Ci::JobArtifacts::DestroyAssociationsService do
- let(:artifacts) { Ci::JobArtifact.all }
- let(:service) { described_class.new(artifacts) }
+ let_it_be(:project_1) { create(:project) }
+ let_it_be(:project_2) { create(:project) }
- let_it_be(:artifact, refind: true) do
- create(:ci_job_artifact)
- end
+ let_it_be(:artifact_1, refind: true) { create(:ci_job_artifact, :zip, project: project_1) }
+ let_it_be(:artifact_2, refind: true) { create(:ci_job_artifact, :zip, project: project_2) }
+ let_it_be(:artifact_3, refind: true) { create(:ci_job_artifact, :zip, project: project_1) }
- before do
- artifact.file = fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
- artifact.save!
- end
+ let(:artifacts) { Ci::JobArtifact.where(id: [artifact_1.id, artifact_2.id, artifact_3.id]) }
+ let(:service) { described_class.new(artifacts) }
describe '#destroy_records' do
it 'removes artifacts without updating statistics' do
- expect(ProjectStatistics).not_to receive(:increment_statistic)
+ expect_next_instance_of(Ci::JobArtifacts::DestroyBatchService) do |service|
+ expect(service).to receive(:execute).with(update_stats: false).and_call_original
+ end
- expect { service.destroy_records }.to change { Ci::JobArtifact.count }
+ expect { service.destroy_records }.to change { Ci::JobArtifact.count }.by(-3)
end
context 'when there are no artifacts' do
@@ -33,12 +33,21 @@ RSpec.describe Ci::JobArtifacts::DestroyAssociationsService do
describe '#update_statistics' do
before do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
service.destroy_records
end
it 'updates project statistics' do
- expect(ProjectStatistics).to receive(:increment_statistic).once
- .with(artifact.project, :build_artifacts_size, -artifact.file.size)
+ project1_increments = [
+ have_attributes(amount: -artifact_1.size, ref: artifact_1.id),
+ have_attributes(amount: -artifact_3.size, ref: artifact_3.id)
+ ]
+ project2_increments = [have_attributes(amount: -artifact_2.size, ref: artifact_2.id)]
+
+ expect(ProjectStatistics).to receive(:bulk_increment_statistic).once
+ .with(project_1, :build_artifacts_size, match_array(project1_increments))
+ expect(ProjectStatistics).to receive(:bulk_increment_statistic).once
+ .with(project_2, :build_artifacts_size, match_array(project2_increments))
service.update_statistics
end
diff --git a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
index 79920dcb2c7..cde42783d8c 100644
--- a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Ci::JobArtifacts::DestroyBatchService do
create(:ci_job_artifact, :trace, :expired)
end
- describe '.execute' do
+ describe '#execute' do
subject(:execute) { service.execute }
it 'creates a deleted object for artifact with attached file' do
@@ -207,44 +207,58 @@ RSpec.describe Ci::JobArtifacts::DestroyBatchService do
end
end
- context 'ProjectStatistics' do
- it 'resets project statistics' do
- expect(ProjectStatistics).to receive(:increment_statistic).once
- .with(artifact_with_file.project, :build_artifacts_size, -artifact_with_file.file.size)
- .and_call_original
- expect(ProjectStatistics).to receive(:increment_statistic).once
- .with(artifact_without_file.project, :build_artifacts_size, 0)
- .and_call_original
+ context 'ProjectStatistics', :sidekiq_inline do
+ let_it_be(:project_1) { create(:project) }
+ let_it_be(:project_2) { create(:project) }
+
+ let(:artifact_with_file) { create(:ci_job_artifact, :zip, project: project_1) }
+ let(:artifact_with_file_2) { create(:ci_job_artifact, :zip, project: project_1) }
+ let(:artifact_without_file) { create(:ci_job_artifact, project: project_2) }
+ let!(:artifacts) { Ci::JobArtifact.where(id: [artifact_with_file.id, artifact_without_file.id, artifact_with_file_2.id]) }
+
+ it 'updates project statistics by the relevant amount' do
+ expected_amount = -(artifact_with_file.size + artifact_with_file_2.size)
+
+ expect { execute }
+ .to change { project_1.statistics.reload.build_artifacts_size }.by(expected_amount)
+ .and change { project_2.statistics.reload.build_artifacts_size }.by(0)
+ end
+
+ it 'increments project statistics with artifact size as amount and job artifact id as ref' do
+ project_1_increments = [
+ have_attributes(amount: -artifact_with_file.size, ref: artifact_with_file.id),
+ have_attributes(amount: -artifact_with_file_2.file.size, ref: artifact_with_file_2.id)
+ ]
+ project_2_increments = [have_attributes(amount: 0, ref: artifact_without_file.id)]
+
+ expect(ProjectStatistics).to receive(:bulk_increment_statistic).with(project_1, :build_artifacts_size, match_array(project_1_increments))
+ expect(ProjectStatistics).to receive(:bulk_increment_statistic).with(project_2, :build_artifacts_size, match_array(project_2_increments))
execute
end
context 'with update_stats: false' do
- let_it_be(:extra_artifact_with_file) do
- create(:ci_job_artifact, :zip, project: artifact_with_file.project)
- end
-
- let(:artifacts) do
- Ci::JobArtifact.where(id: [artifact_with_file.id, extra_artifact_with_file.id,
- artifact_without_file.id, trace_artifact.id])
- end
+ subject(:execute) { service.execute(update_stats: false) }
it 'does not update project statistics' do
- expect(ProjectStatistics).not_to receive(:increment_statistic)
-
- service.execute(update_stats: false)
+ expect { execute }.not_to change { [project_1.statistics.reload.build_artifacts_size, project_2.statistics.reload.build_artifacts_size] }
end
- it 'returns size statistics' do
+ it 'returns statistic updates per project' do
+ project_1_updates = [
+ have_attributes(amount: -artifact_with_file.size, ref: artifact_with_file.id),
+ have_attributes(amount: -artifact_with_file_2.file.size, ref: artifact_with_file_2.id)
+ ]
+ project_2_updates = [have_attributes(amount: 0, ref: artifact_without_file.id)]
+
expected_updates = {
statistics_updates: {
- artifact_with_file.project => -(artifact_with_file.file.size + extra_artifact_with_file.file.size),
- artifact_without_file.project => 0
+ project_1 => match_array(project_1_updates),
+ project_2 => project_2_updates
}
}
- expect(service.execute(update_stats: false)).to match(
- a_hash_including(expected_updates))
+ expect(execute).to match(a_hash_including(expected_updates))
end
end
end
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
index 2f2af9f6c85..c1669e0424a 100644
--- a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Ci::PipelineProcessing::AtomicProcessingService, feature_category: :continuous_integration do
+ include RepoHelpers
+
describe 'Pipeline Processing Service Tests With Yaml' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
@@ -956,17 +958,16 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService, feature_category
Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload
end
- before do
- allow_next_instance_of(Repository) do |repository|
- allow(repository)
- .to receive(:blob_data_at)
- .with(an_instance_of(String), '.gitlab-ci.yml')
- .and_return(parent_config)
-
- allow(repository)
- .to receive(:blob_data_at)
- .with(an_instance_of(String), '.child.yml')
- .and_return(child_config)
+ let(:project_files) do
+ {
+ '.gitlab-ci.yml' => parent_config,
+ '.child.yml' => child_config
+ }
+ end
+
+ around do |example|
+ create_and_delete_files(project, project_files) do
+ example.run
end
end
diff --git a/spec/services/clusters/aws/authorize_role_service_spec.rb b/spec/services/clusters/aws/authorize_role_service_spec.rb
deleted file mode 100644
index 17bbc372675..00000000000
--- a/spec/services/clusters/aws/authorize_role_service_spec.rb
+++ /dev/null
@@ -1,102 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Aws::AuthorizeRoleService do
- subject { described_class.new(user, params: params).execute }
-
- let(:role) { create(:aws_role) }
- let(:user) { role.user }
- let(:credentials) { instance_double(Aws::Credentials) }
- let(:credentials_service) { instance_double(Clusters::Aws::FetchCredentialsService, execute: credentials) }
-
- let(:role_arn) { 'arn:my-role' }
- let(:region) { 'region' }
- let(:params) do
- params = ActionController::Parameters.new({
- cluster: {
- role_arn: role_arn,
- region: region
- }
- })
-
- params.require(:cluster).permit(:role_arn, :region)
- end
-
- before do
- allow(Clusters::Aws::FetchCredentialsService).to receive(:new)
- .with(instance_of(Aws::Role)).and_return(credentials_service)
- end
-
- context 'role exists' do
- it 'updates the existing Aws::Role record and returns a set of credentials' do
- expect(subject.status).to eq(:ok)
- expect(subject.body).to eq(credentials)
- expect(role.reload.role_arn).to eq(role_arn)
- end
- end
-
- context 'errors' do
- shared_examples 'bad request' do
- it 'returns an empty hash' do
- expect(subject.status).to eq(:unprocessable_entity)
- expect(subject.body).to eq({ message: message })
- end
-
- it 'logs the error' do
- expect(::Gitlab::ErrorTracking).to receive(:track_exception)
-
- subject
- end
- end
-
- context 'role does not exist' do
- let(:user) { create(:user) }
- let(:message) { 'Error: Unable to find AWS role for current user' }
-
- include_examples 'bad request'
- end
-
- context 'supplied ARN is invalid' do
- let(:role_arn) { 'invalid' }
- let(:message) { 'Validation failed: Role arn must be a valid Amazon Resource Name' }
-
- include_examples 'bad request'
- end
-
- context 'client errors' do
- before do
- allow(credentials_service).to receive(:execute).and_raise(error)
- end
-
- context 'error fetching credentials' do
- let(:error) { Aws::STS::Errors::ServiceError.new(nil, 'error message') }
- let(:message) { 'AWS service error: error message' }
-
- include_examples 'bad request'
- end
-
- context 'error in assuming role' do
- let(:raw_message) { "User foo is not authorized to perform: sts:AssumeRole on resource bar" }
- let(:error) { Aws::STS::Errors::AccessDenied.new(nil, raw_message) }
- let(:message) { "Access denied: #{raw_message}" }
-
- include_examples 'bad request'
- end
-
- context 'credentials not configured' do
- let(:error) { Aws::Errors::MissingCredentialsError.new('error message') }
- let(:message) { "Error: No AWS credentials were supplied" }
-
- include_examples 'bad request'
- end
-
- context 'role not configured' do
- let(:error) { Clusters::Aws::FetchCredentialsService::MissingRoleError.new('error message') }
- let(:message) { "Error: No AWS provision role found for user" }
-
- include_examples 'bad request'
- end
- end
- end
-end
diff --git a/spec/services/clusters/aws/fetch_credentials_service_spec.rb b/spec/services/clusters/aws/fetch_credentials_service_spec.rb
deleted file mode 100644
index 0358ca1f535..00000000000
--- a/spec/services/clusters/aws/fetch_credentials_service_spec.rb
+++ /dev/null
@@ -1,139 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Aws::FetchCredentialsService do
- describe '#execute' do
- let(:user) { create(:user) }
- let(:provider) { create(:cluster_provider_aws, region: 'ap-southeast-2') }
-
- let(:gitlab_access_key_id) { 'gitlab-access-key-id' }
- let(:gitlab_secret_access_key) { 'gitlab-secret-access-key' }
-
- let(:gitlab_credentials) { Aws::Credentials.new(gitlab_access_key_id, gitlab_secret_access_key) }
- let(:sts_client) { Aws::STS::Client.new(credentials: gitlab_credentials, region: region) }
- let(:assumed_role) { instance_double(Aws::AssumeRoleCredentials, credentials: assumed_role_credentials) }
-
- let(:assumed_role_credentials) { double }
-
- subject { described_class.new(provision_role, provider: provider).execute }
-
- context 'provision role is configured' do
- let(:provision_role) { create(:aws_role, user: user, region: 'custom-region') }
-
- before do
- stub_application_setting(eks_access_key_id: gitlab_access_key_id)
- stub_application_setting(eks_secret_access_key: gitlab_secret_access_key)
-
- expect(Aws::Credentials).to receive(:new)
- .with(gitlab_access_key_id, gitlab_secret_access_key)
- .and_return(gitlab_credentials)
-
- expect(Aws::STS::Client).to receive(:new)
- .with(credentials: gitlab_credentials, region: region)
- .and_return(sts_client)
-
- expect(Aws::AssumeRoleCredentials).to receive(:new)
- .with(
- client: sts_client,
- role_arn: provision_role.role_arn,
- role_session_name: session_name,
- external_id: provision_role.role_external_id,
- policy: session_policy
- ).and_return(assumed_role)
- end
-
- context 'provider is specified' do
- let(:region) { provider.region }
- let(:session_name) { "gitlab-eks-cluster-#{provider.cluster_id}-user-#{user.id}" }
- let(:session_policy) { nil }
-
- it { is_expected.to eq assumed_role_credentials }
- end
-
- context 'provider is not specifed' do
- let(:provider) { nil }
- let(:region) { provision_role.region }
- let(:session_name) { "gitlab-eks-autofill-user-#{user.id}" }
- let(:session_policy) { 'policy-document' }
-
- subject { described_class.new(provision_role, provider: provider).execute }
-
- before do
- stub_file_read(Rails.root.join('vendor', 'aws', 'iam', 'eks_cluster_read_only_policy.json'), content: session_policy)
- end
-
- it { is_expected.to eq assumed_role_credentials }
-
- context 'region is not specifed' do
- let(:region) { Clusters::Providers::Aws::DEFAULT_REGION }
- let(:provision_role) { create(:aws_role, user: user, region: nil) }
-
- it { is_expected.to eq assumed_role_credentials }
- end
- end
- end
-
- context 'provision role is not configured' do
- let(:provision_role) { nil }
-
- it 'raises an error' do
- expect { subject }.to raise_error(described_class::MissingRoleError, 'AWS provisioning role not configured')
- end
- end
-
- context 'with an instance profile attached to an IAM role' do
- let(:sts_client) { Aws::STS::Client.new(region: region, stub_responses: true) }
- let(:provision_role) { create(:aws_role, user: user, region: 'custom-region') }
-
- before do
- stub_application_setting(eks_access_key_id: nil)
- stub_application_setting(eks_secret_access_key: nil)
-
- expect(Aws::STS::Client).to receive(:new)
- .with(region: region)
- .and_return(sts_client)
-
- expect(Aws::AssumeRoleCredentials).to receive(:new)
- .with(
- client: sts_client,
- role_arn: provision_role.role_arn,
- role_session_name: session_name,
- external_id: provision_role.role_external_id,
- policy: session_policy
- ).and_call_original
- end
-
- context 'provider is specified' do
- let(:region) { provider.region }
- let(:session_name) { "gitlab-eks-cluster-#{provider.cluster_id}-user-#{user.id}" }
- let(:session_policy) { nil }
-
- it 'returns credentials', :aggregate_failures do
- expect(subject.access_key_id).to be_present
- expect(subject.secret_access_key).to be_present
- expect(subject.session_token).to be_present
- end
- end
-
- context 'provider is not specifed' do
- let(:provider) { nil }
- let(:region) { provision_role.region }
- let(:session_name) { "gitlab-eks-autofill-user-#{user.id}" }
- let(:session_policy) { 'policy-document' }
-
- before do
- stub_file_read(Rails.root.join('vendor', 'aws', 'iam', 'eks_cluster_read_only_policy.json'), content: session_policy)
- end
-
- subject { described_class.new(provision_role, provider: provider).execute }
-
- it 'returns credentials', :aggregate_failures do
- expect(subject.access_key_id).to be_present
- expect(subject.secret_access_key).to be_present
- expect(subject.session_token).to be_present
- end
- end
- end
- end
-end
diff --git a/spec/services/clusters/aws/finalize_creation_service_spec.rb b/spec/services/clusters/aws/finalize_creation_service_spec.rb
deleted file mode 100644
index 6b0cb86eff0..00000000000
--- a/spec/services/clusters/aws/finalize_creation_service_spec.rb
+++ /dev/null
@@ -1,124 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Aws::FinalizeCreationService do
- describe '#execute' do
- let(:provider) { create(:cluster_provider_aws, :creating) }
- let(:platform) { provider.cluster.platform_kubernetes }
-
- let(:create_service_account_service) { double(execute: true) }
- let(:fetch_token_service) { double(execute: gitlab_token) }
- let(:kube_client) { double(create_config_map: true) }
- let(:cluster_stack) { double(outputs: [endpoint_output, cert_output, node_role_output]) }
- let(:node_auth_config_map) { double }
-
- let(:endpoint_output) { double(output_key: 'ClusterEndpoint', output_value: api_url) }
- let(:cert_output) { double(output_key: 'ClusterCertificate', output_value: Base64.encode64(ca_pem)) }
- let(:node_role_output) { double(output_key: 'NodeInstanceRole', output_value: node_role) }
-
- let(:api_url) { 'https://kubernetes.example.com' }
- let(:ca_pem) { File.read(Rails.root.join('spec/fixtures/clusters/sample_cert.pem')) }
- let(:gitlab_token) { 'gitlab-token' }
- let(:iam_token) { 'iam-token' }
- let(:node_role) { 'arn::aws::iam::123456789012:role/node-role' }
-
- subject { described_class.new.execute(provider) }
-
- before do
- allow(Clusters::Kubernetes::CreateOrUpdateServiceAccountService).to receive(:gitlab_creator)
- .with(kube_client, rbac: true)
- .and_return(create_service_account_service)
-
- allow(Clusters::Kubernetes::FetchKubernetesTokenService).to receive(:new)
- .with(
- kube_client,
- Clusters::Kubernetes::GITLAB_ADMIN_TOKEN_NAME,
- Clusters::Kubernetes::GITLAB_SERVICE_ACCOUNT_NAMESPACE)
- .and_return(fetch_token_service)
-
- allow(Gitlab::Kubernetes::KubeClient).to receive(:new)
- .with(
- api_url,
- auth_options: { bearer_token: iam_token },
- ssl_options: {
- verify_ssl: OpenSSL::SSL::VERIFY_PEER,
- cert_store: instance_of(OpenSSL::X509::Store)
- },
- http_proxy_uri: nil
- )
- .and_return(kube_client)
-
- allow(provider.api_client).to receive(:describe_stacks)
- .with(stack_name: provider.cluster.name)
- .and_return(double(stacks: [cluster_stack]))
-
- allow(Kubeclient::AmazonEksCredentials).to receive(:token)
- .with(provider.credentials, provider.cluster.name)
- .and_return(iam_token)
-
- allow(Gitlab::Kubernetes::ConfigMaps::AwsNodeAuth).to receive(:new)
- .with(node_role).and_return(double(generate: node_auth_config_map))
- end
-
- it 'configures the provider and platform' do
- subject
-
- expect(provider).to be_created
- expect(platform.api_url).to eq(api_url)
- expect(platform.ca_pem).to eq(ca_pem)
- expect(platform.token).to eq(gitlab_token)
- expect(platform).to be_rbac
- end
-
- it 'calls the create_service_account_service' do
- expect(create_service_account_service).to receive(:execute).once
-
- subject
- end
-
- it 'configures cluster node authentication' do
- expect(kube_client).to receive(:create_config_map).with(node_auth_config_map).once
-
- subject
- end
-
- describe 'error handling' do
- shared_examples 'provision error' do |message|
- it "sets the status to :errored with an appropriate error message" do
- subject
-
- expect(provider).to be_errored
- expect(provider.status_reason).to include(message)
- end
- end
-
- context 'failed to request stack details from AWS' do
- before do
- allow(provider.api_client).to receive(:describe_stacks)
- .and_raise(Aws::CloudFormation::Errors::ServiceError.new(double, "Error message"))
- end
-
- include_examples 'provision error', 'Failed to fetch CloudFormation stack'
- end
-
- context 'failed to create auth config map' do
- before do
- allow(kube_client).to receive(:create_config_map)
- .and_raise(Kubeclient::HttpError.new(500, 'Error', nil))
- end
-
- include_examples 'provision error', 'Failed to run Kubeclient'
- end
-
- context 'failed to save records' do
- before do
- allow(provider.cluster).to receive(:save!)
- .and_raise(ActiveRecord::RecordInvalid)
- end
-
- include_examples 'provision error', 'Failed to configure EKS provider'
- end
- end
- end
-end
diff --git a/spec/services/clusters/aws/provision_service_spec.rb b/spec/services/clusters/aws/provision_service_spec.rb
deleted file mode 100644
index 5efac29ec1e..00000000000
--- a/spec/services/clusters/aws/provision_service_spec.rb
+++ /dev/null
@@ -1,130 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Aws::ProvisionService do
- describe '#execute' do
- let(:provider) { create(:cluster_provider_aws) }
-
- let(:provision_role) { create(:aws_role, user: provider.created_by_user) }
- let(:client) { instance_double(Aws::CloudFormation::Client, create_stack: true) }
- let(:cloudformation_template) { double }
- let(:credentials) do
- instance_double(
- Aws::Credentials,
- access_key_id: 'key',
- secret_access_key: 'secret',
- session_token: 'token'
- )
- end
-
- let(:parameters) do
- [
- { parameter_key: 'ClusterName', parameter_value: provider.cluster.name },
- { parameter_key: 'ClusterRole', parameter_value: provider.role_arn },
- { parameter_key: 'KubernetesVersion', parameter_value: provider.kubernetes_version },
- { parameter_key: 'ClusterControlPlaneSecurityGroup', parameter_value: provider.security_group_id },
- { parameter_key: 'VpcId', parameter_value: provider.vpc_id },
- { parameter_key: 'Subnets', parameter_value: provider.subnet_ids.join(',') },
- { parameter_key: 'NodeAutoScalingGroupDesiredCapacity', parameter_value: provider.num_nodes.to_s },
- { parameter_key: 'NodeInstanceType', parameter_value: provider.instance_type },
- { parameter_key: 'KeyName', parameter_value: provider.key_name }
- ]
- end
-
- subject { described_class.new.execute(provider) }
-
- before do
- allow(Clusters::Aws::FetchCredentialsService).to receive(:new)
- .with(provision_role, provider: provider)
- .and_return(double(execute: credentials))
-
- allow(provider).to receive(:api_client)
- .and_return(client)
-
- stub_file_read(Rails.root.join('vendor', 'aws', 'cloudformation', 'eks_cluster.yaml'), content: cloudformation_template)
- end
-
- it 'updates the provider status to :creating and configures the provider with credentials' do
- subject
-
- expect(provider).to be_creating
- expect(provider.access_key_id).to eq 'key'
- expect(provider.secret_access_key).to eq 'secret'
- expect(provider.session_token).to eq 'token'
- end
-
- it 'creates a CloudFormation stack' do
- expect(client).to receive(:create_stack).with(
- stack_name: provider.cluster.name,
- template_body: cloudformation_template,
- parameters: parameters,
- capabilities: ["CAPABILITY_IAM"]
- )
-
- subject
- end
-
- it 'schedules a worker to monitor creation status' do
- expect(WaitForClusterCreationWorker).to receive(:perform_in)
- .with(Clusters::Aws::VerifyProvisionStatusService::INITIAL_INTERVAL, provider.cluster_id)
-
- subject
- end
-
- describe 'error handling' do
- shared_examples 'provision error' do |message|
- it "sets the status to :errored with an appropriate error message" do
- subject
-
- expect(provider).to be_errored
- expect(provider.status_reason).to include(message)
- end
- end
-
- context 'invalid state transition' do
- before do
- allow(provider).to receive(:make_creating).and_return(false)
- end
-
- include_examples 'provision error', 'Failed to update provider record'
- end
-
- context 'AWS role is not configured' do
- before do
- allow(Clusters::Aws::FetchCredentialsService).to receive(:new)
- .and_raise(Clusters::Aws::FetchCredentialsService::MissingRoleError)
- end
-
- include_examples 'provision error', 'Amazon role is not configured'
- end
-
- context 'AWS credentials are not configured' do
- before do
- allow(Clusters::Aws::FetchCredentialsService).to receive(:new)
- .and_raise(Aws::Errors::MissingCredentialsError)
- end
-
- include_examples 'provision error', 'Amazon credentials are not configured'
- end
-
- context 'Authentication failure' do
- before do
- allow(Clusters::Aws::FetchCredentialsService).to receive(:new)
- .and_raise(Aws::STS::Errors::ServiceError.new(double, 'Error message'))
- end
-
- include_examples 'provision error', 'Amazon authentication failed'
- end
-
- context 'CloudFormation failure' do
- before do
- allow(client).to receive(:create_stack)
- .and_raise(Aws::CloudFormation::Errors::ServiceError.new(double, 'Error message'))
- end
-
- include_examples 'provision error', 'Amazon CloudFormation request failed'
- end
- end
- end
-end
diff --git a/spec/services/clusters/aws/verify_provision_status_service_spec.rb b/spec/services/clusters/aws/verify_provision_status_service_spec.rb
deleted file mode 100644
index b9a58b97842..00000000000
--- a/spec/services/clusters/aws/verify_provision_status_service_spec.rb
+++ /dev/null
@@ -1,76 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Aws::VerifyProvisionStatusService do
- describe '#execute' do
- let(:provider) { create(:cluster_provider_aws) }
-
- let(:stack) { double(stack_status: stack_status, creation_time: creation_time) }
- let(:creation_time) { 1.minute.ago }
-
- subject { described_class.new.execute(provider) }
-
- before do
- allow(provider.api_client).to receive(:describe_stacks)
- .with(stack_name: provider.cluster.name)
- .and_return(double(stacks: [stack]))
- end
-
- shared_examples 'provision error' do |message|
- it "sets the status to :errored with an appropriate error message" do
- subject
-
- expect(provider).to be_errored
- expect(provider.status_reason).to include(message)
- end
- end
-
- context 'stack creation is still in progress' do
- let(:stack_status) { 'CREATE_IN_PROGRESS' }
- let(:verify_service) { double(execute: true) }
-
- it 'schedules a worker to check again later' do
- expect(WaitForClusterCreationWorker).to receive(:perform_in)
- .with(described_class::POLL_INTERVAL, provider.cluster_id)
-
- subject
- end
-
- context 'stack creation is taking too long' do
- let(:creation_time) { 1.hour.ago }
-
- include_examples 'provision error', 'Kubernetes cluster creation time exceeds timeout'
- end
- end
-
- context 'stack creation is complete' do
- let(:stack_status) { 'CREATE_COMPLETE' }
- let(:finalize_service) { double(execute: true) }
-
- it 'finalizes creation' do
- expect(Clusters::Aws::FinalizeCreationService).to receive(:new).and_return(finalize_service)
- expect(finalize_service).to receive(:execute).with(provider).once
-
- subject
- end
- end
-
- context 'stack creation failed' do
- let(:stack_status) { 'CREATE_FAILED' }
-
- include_examples 'provision error', 'Unexpected status'
- end
-
- context 'error communicating with CloudFormation API' do
- let(:stack_status) { 'CREATE_IN_PROGRESS' }
-
- before do
- allow(provider.api_client).to receive(:describe_stacks)
- .and_raise(Aws::CloudFormation::Errors::ServiceError.new(double, 'Error message'))
- end
-
- include_examples 'provision error', 'Amazon CloudFormation request failed'
- end
- end
-end
diff --git a/spec/services/clusters/create_service_spec.rb b/spec/services/clusters/create_service_spec.rb
index 6e252bee7c0..95f10cdbd80 100644
--- a/spec/services/clusters/create_service_spec.rb
+++ b/spec/services/clusters/create_service_spec.rb
@@ -54,7 +54,6 @@ RSpec.describe Clusters::CreateService do
let!(:cluster) { create(:cluster, :provided_by_gcp, :production_environment, projects: [project]) }
it 'creates another cluster' do
- expect(ClusterProvisionWorker).to receive(:perform_async)
expect { subject }.to change { Clusters::Cluster.count }.by(1)
end
end
diff --git a/spec/services/clusters/gcp/fetch_operation_service_spec.rb b/spec/services/clusters/gcp/fetch_operation_service_spec.rb
deleted file mode 100644
index 990cc745382..00000000000
--- a/spec/services/clusters/gcp/fetch_operation_service_spec.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Gcp::FetchOperationService do
- include GoogleApi::CloudPlatformHelpers
-
- describe '#execute' do
- let(:provider) { create(:cluster_provider_gcp, :creating) }
- let(:gcp_project_id) { provider.gcp_project_id }
- let(:zone) { provider.zone }
- let(:operation_id) { provider.operation_id }
-
- shared_examples 'success' do
- it 'yields' do
- expect { |b| described_class.new.execute(provider, &b) }
- .to yield_with_args
- end
- end
-
- shared_examples 'error' do
- it 'sets an error to provider object' do
- expect { |b| described_class.new.execute(provider, &b) }
- .not_to yield_with_args
- expect(provider.reload).to be_errored
- end
- end
-
- context 'when succeeded to fetch operation' do
- before do
- stub_cloud_platform_get_zone_operation(gcp_project_id, zone, operation_id)
- end
-
- it_behaves_like 'success'
- end
-
- context 'when Internal Server Error happened' do
- before do
- stub_cloud_platform_get_zone_operation_error(gcp_project_id, zone, operation_id)
- end
-
- it_behaves_like 'error'
- end
- end
-end
diff --git a/spec/services/clusters/gcp/finalize_creation_service_spec.rb b/spec/services/clusters/gcp/finalize_creation_service_spec.rb
deleted file mode 100644
index 9c553d0eec2..00000000000
--- a/spec/services/clusters/gcp/finalize_creation_service_spec.rb
+++ /dev/null
@@ -1,161 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Gcp::FinalizeCreationService, '#execute' do
- include GoogleApi::CloudPlatformHelpers
- include KubernetesHelpers
-
- let(:cluster) { create(:cluster, :project, :providing_by_gcp) }
- let(:provider) { cluster.provider }
- let(:platform) { cluster.platform }
- let(:endpoint) { '111.111.111.111' }
- let(:api_url) { 'https://' + endpoint }
- let(:secret_name) { 'gitlab-token' }
- let(:token) { 'sample-token' }
- let(:namespace) { "#{cluster.project.path}-#{cluster.project.id}" }
-
- subject { described_class.new.execute(provider) }
-
- shared_examples 'success' do
- it 'configures provider and kubernetes' do
- subject
-
- expect(provider).to be_created
- end
-
- it 'properly configures database models' do
- subject
-
- cluster.reload
-
- expect(provider.endpoint).to eq(endpoint)
- expect(platform.api_url).to eq(api_url)
- expect(platform.ca_cert).to eq(Base64.decode64(load_sample_cert).strip)
- expect(platform.token).to eq(token)
- end
- end
-
- shared_examples 'error' do
- it 'sets an error to provider object' do
- subject
-
- expect(provider.reload).to be_errored
- end
- end
-
- shared_examples 'kubernetes information not successfully fetched' do
- context 'when failed to fetch gke cluster info' do
- before do
- stub_cloud_platform_get_zone_cluster_error(provider.gcp_project_id, provider.zone, cluster.name)
- end
-
- it_behaves_like 'error'
- end
-
- context 'when token is empty' do
- let(:token) { '' }
-
- it_behaves_like 'error'
- end
-
- context 'when failed to fetch kubernetes token' do
- before do
- stub_kubeclient_get_secret_error(api_url, secret_name, namespace: 'default')
- end
-
- it_behaves_like 'error'
- end
-
- context 'when service account fails to create' do
- before do
- stub_kubeclient_create_service_account_error(api_url, namespace: 'default')
- end
-
- it_behaves_like 'error'
- end
- end
-
- shared_context 'kubernetes information successfully fetched' do
- before do
- stub_cloud_platform_get_zone_cluster(
- provider.gcp_project_id, provider.zone, cluster.name, { endpoint: endpoint }
- )
-
- stub_kubeclient_discover(api_url)
- stub_kubeclient_get_namespace(api_url)
- stub_kubeclient_create_namespace(api_url)
- stub_kubeclient_get_service_account_error(api_url, 'gitlab')
- stub_kubeclient_create_service_account(api_url)
- stub_kubeclient_create_secret(api_url)
- stub_kubeclient_put_secret(api_url, 'gitlab-token')
-
- stub_kubeclient_get_secret(
- api_url,
- metadata_name: secret_name,
- token: Base64.encode64(token),
- namespace: 'default'
- )
-
- stub_kubeclient_put_cluster_role_binding(api_url, 'gitlab-admin')
- end
- end
-
- context 'With a legacy ABAC cluster' do
- before do
- provider.legacy_abac = true
- end
-
- include_context 'kubernetes information successfully fetched'
-
- it_behaves_like 'success'
-
- it 'uses ABAC authorization type' do
- subject
- cluster.reload
-
- expect(platform).to be_abac
- expect(platform.authorization_type).to eq('abac')
- end
-
- it_behaves_like 'kubernetes information not successfully fetched'
- end
-
- context 'With an RBAC cluster' do
- before do
- provider.legacy_abac = false
- end
-
- include_context 'kubernetes information successfully fetched'
-
- it_behaves_like 'success'
-
- it 'uses RBAC authorization type' do
- subject
- cluster.reload
-
- expect(platform).to be_rbac
- expect(platform.authorization_type).to eq('rbac')
- end
-
- it_behaves_like 'kubernetes information not successfully fetched'
- end
-
- context 'With a Cloud Run cluster' do
- before do
- provider.cloud_run = true
- end
-
- include_context 'kubernetes information successfully fetched'
-
- it_behaves_like 'success'
-
- it 'has knative pre-installed' do
- subject
- cluster.reload
-
- expect(cluster.application_knative).to be_present
- expect(cluster.application_knative).to be_pre_installed
- end
- end
-end
diff --git a/spec/services/clusters/gcp/provision_service_spec.rb b/spec/services/clusters/gcp/provision_service_spec.rb
deleted file mode 100644
index c8b7f628e5b..00000000000
--- a/spec/services/clusters/gcp/provision_service_spec.rb
+++ /dev/null
@@ -1,71 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Gcp::ProvisionService do
- include GoogleApi::CloudPlatformHelpers
-
- describe '#execute' do
- let(:provider) { create(:cluster_provider_gcp, :scheduled) }
- let(:gcp_project_id) { provider.gcp_project_id }
- let(:zone) { provider.zone }
-
- shared_examples 'success' do
- it 'schedules a worker for status minitoring' do
- expect(WaitForClusterCreationWorker).to receive(:perform_in)
-
- described_class.new.execute(provider)
-
- expect(provider.reload).to be_creating
- end
- end
-
- shared_examples 'error' do
- it 'sets an error to provider object' do
- described_class.new.execute(provider)
-
- expect(provider.reload).to be_errored
- end
- end
-
- context 'when succeeded to request provision' do
- before do
- stub_cloud_platform_create_cluster(gcp_project_id, zone)
- end
-
- it_behaves_like 'success'
- end
-
- context 'when operation status is unexpected' do
- before do
- stub_cloud_platform_create_cluster(
- gcp_project_id, zone,
- {
- "status": 'unexpected'
- })
- end
-
- it_behaves_like 'error'
- end
-
- context 'when selfLink is unexpected' do
- before do
- stub_cloud_platform_create_cluster(
- gcp_project_id, zone,
- {
- "selfLink": 'unexpected'
- })
- end
-
- it_behaves_like 'error'
- end
-
- context 'when Internal Server Error happened' do
- before do
- stub_cloud_platform_create_cluster_error(gcp_project_id, zone)
- end
-
- it_behaves_like 'error'
- end
- end
-end
diff --git a/spec/services/clusters/gcp/verify_provision_status_service_spec.rb b/spec/services/clusters/gcp/verify_provision_status_service_spec.rb
deleted file mode 100644
index ffe4516c02b..00000000000
--- a/spec/services/clusters/gcp/verify_provision_status_service_spec.rb
+++ /dev/null
@@ -1,111 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Gcp::VerifyProvisionStatusService do
- include GoogleApi::CloudPlatformHelpers
-
- describe '#execute' do
- let(:provider) { create(:cluster_provider_gcp, :creating) }
- let(:gcp_project_id) { provider.gcp_project_id }
- let(:zone) { provider.zone }
- let(:operation_id) { provider.operation_id }
-
- shared_examples 'continue_creation' do
- it 'schedules a worker for status minitoring' do
- expect(WaitForClusterCreationWorker).to receive(:perform_in)
-
- described_class.new.execute(provider)
- end
- end
-
- shared_examples 'finalize_creation' do
- it 'schedules a worker for status minitoring' do
- expect_next_instance_of(Clusters::Gcp::FinalizeCreationService) do |instance|
- expect(instance).to receive(:execute)
- end
-
- described_class.new.execute(provider)
- end
- end
-
- shared_examples 'error' do
- it 'sets an error to provider object' do
- described_class.new.execute(provider)
-
- expect(provider.reload).to be_errored
- end
- end
-
- context 'when operation status is RUNNING' do
- before do
- stub_cloud_platform_get_zone_operation(
- gcp_project_id, zone, operation_id,
- {
- "status": 'RUNNING',
- "startTime": 1.minute.ago.strftime("%FT%TZ")
- })
- end
-
- it_behaves_like 'continue_creation'
-
- context 'when cluster creation time exceeds timeout' do
- before do
- stub_cloud_platform_get_zone_operation(
- gcp_project_id, zone, operation_id,
- {
- "status": 'RUNNING',
- "startTime": 30.minutes.ago.strftime("%FT%TZ")
- })
- end
-
- it_behaves_like 'error'
- end
- end
-
- context 'when operation status is PENDING' do
- before do
- stub_cloud_platform_get_zone_operation(
- gcp_project_id, zone, operation_id,
- {
- "status": 'PENDING',
- "startTime": 1.minute.ago.strftime("%FT%TZ")
- })
- end
-
- it_behaves_like 'continue_creation'
- end
-
- context 'when operation status is DONE' do
- before do
- stub_cloud_platform_get_zone_operation(
- gcp_project_id, zone, operation_id,
- {
- "status": 'DONE'
- })
- end
-
- it_behaves_like 'finalize_creation'
- end
-
- context 'when operation status is unexpected' do
- before do
- stub_cloud_platform_get_zone_operation(
- gcp_project_id, zone, operation_id,
- {
- "status": 'unexpected'
- })
- end
-
- it_behaves_like 'error'
- end
-
- context 'when failed to get operation status' do
- before do
- stub_cloud_platform_get_zone_operation_error(gcp_project_id, zone, operation_id)
- end
-
- it_behaves_like 'error'
- end
- end
-end
diff --git a/spec/services/database/consistency_check_service_spec.rb b/spec/services/database/consistency_check_service_spec.rb
index d7dee50f7c2..6288fedfb59 100644
--- a/spec/services/database/consistency_check_service_spec.rb
+++ b/spec/services/database/consistency_check_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Database::ConsistencyCheckService, feature_category: :database do
+RSpec.describe Database::ConsistencyCheckService, feature_category: :pods do
let(:batch_size) { 5 }
let(:max_batches) { 2 }
diff --git a/spec/services/design_management/save_designs_service_spec.rb b/spec/services/design_management/save_designs_service_spec.rb
index c69df5f2eb9..a87494d87f7 100644
--- a/spec/services/design_management/save_designs_service_spec.rb
+++ b/spec/services/design_management/save_designs_service_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe DesignManagement::SaveDesignsService do
+RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_management do
include DesignManagementTestHelpers
include ConcurrentHelpers
@@ -242,6 +242,27 @@ RSpec.describe DesignManagement::SaveDesignsService do
expect(updated_designs.first.versions.size).to eq(1)
end
end
+
+ context 'when detecting content type' do
+ it 'detects content type when feature flag is enabled' do
+ expect_next_instance_of(::Lfs::FileTransformer) do |file_transformer|
+ expect(file_transformer).to receive(:new_file)
+ .with(anything, anything, hash_including(detect_content_type: true)).and_call_original
+ end
+
+ run_service
+ end
+
+ it 'skips content type detection when feature flag is disabled' do
+ stub_feature_flags(design_management_allow_dangerous_images: false)
+ expect_next_instance_of(::Lfs::FileTransformer) do |file_transformer|
+ expect(file_transformer).to receive(:new_file)
+ .with(anything, anything, hash_including(detect_content_type: false)).and_call_original
+ end
+
+ run_service
+ end
+ end
end
context 'when a design has not changed since its previous version' do
diff --git a/spec/services/discussions/resolve_service_spec.rb b/spec/services/discussions/resolve_service_spec.rb
index 9cc27973bcb..a6e1bad30ce 100644
--- a/spec/services/discussions/resolve_service_spec.rb
+++ b/spec/services/discussions/resolve_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Discussions::ResolveService do
+RSpec.describe Discussions::ResolveService, feature_category: :code_review_workflow do
describe '#execute' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user, developer_projects: [project]) }
@@ -46,6 +46,12 @@ RSpec.describe Discussions::ResolveService do
service.execute
end
+ it 'sends GraphQL triggers' do
+ expect(GraphqlTriggers).to receive(:merge_request_merge_status_updated).with(discussion.noteable)
+
+ service.execute
+ end
+
context 'with a project that requires all discussion to be resolved' do
before do
project.update!(only_allow_merge_if_all_discussions_are_resolved: true)
@@ -122,6 +128,12 @@ RSpec.describe Discussions::ResolveService do
service.execute
end
+
+ it 'does not send GraphQL triggers' do
+ expect(GraphqlTriggers).not_to receive(:merge_request_merge_status_updated).with(discussion.noteable)
+
+ service.execute
+ end
end
context 'when resolving a discussion' do
diff --git a/spec/services/discussions/unresolve_service_spec.rb b/spec/services/discussions/unresolve_service_spec.rb
index 0009239232c..e9f58e4e10e 100644
--- a/spec/services/discussions/unresolve_service_spec.rb
+++ b/spec/services/discussions/unresolve_service_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe Discussions::UnresolveService do
+RSpec.describe Discussions::UnresolveService, feature_category: :code_review_workflow do
describe "#execute" do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user, developer_projects: [project]) }
@@ -29,5 +29,32 @@ RSpec.describe Discussions::UnresolveService do
service.execute
end
+
+ it "sends GraphQL triggers" do
+ expect(GraphqlTriggers).to receive(:merge_request_merge_status_updated).with(discussion.noteable)
+
+ service.execute
+ end
+
+ context "when there are existing unresolved discussions" do
+ before do
+ create(:diff_note_on_merge_request, noteable: merge_request, project: project).to_discussion
+ end
+
+ it "does not send a GraphQL triggers" do
+ expect(GraphqlTriggers).not_to receive(:merge_request_merge_status_updated)
+
+ service.execute
+ end
+ end
+
+ context "when the noteable is not a merge request" do
+ it "does not send a GraphQL triggers" do
+ expect(discussion).to receive(:for_merge_request?).and_return(false)
+ expect(GraphqlTriggers).not_to receive(:merge_request_merge_status_updated)
+
+ service.execute
+ end
+ end
end
end
diff --git a/spec/services/draft_notes/publish_service_spec.rb b/spec/services/draft_notes/publish_service_spec.rb
index 81443eed7d3..44fe9063ac9 100644
--- a/spec/services/draft_notes/publish_service_spec.rb
+++ b/spec/services/draft_notes/publish_service_spec.rb
@@ -78,6 +78,10 @@ RSpec.describe DraftNotes::PublishService do
end
end
+ it_behaves_like 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
+ let(:action) { publish }
+ end
+
it 'does not publish any draft note' do
expect { publish }.not_to change { DraftNote.count }
end
@@ -97,6 +101,10 @@ RSpec.describe DraftNotes::PublishService do
end
end
+ it_behaves_like 'triggers GraphQL subscription mergeRequestMergeStatusUpdated' do
+ let(:action) { publish }
+ end
+
it 'returns success' do
result = publish
diff --git a/spec/services/environments/stop_stale_service_spec.rb b/spec/services/environments/stop_stale_service_spec.rb
new file mode 100644
index 00000000000..46d770c30cc
--- /dev/null
+++ b/spec/services/environments/stop_stale_service_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Environments::StopStaleService,
+ :clean_gitlab_redis_shared_state,
+ :sidekiq_inline,
+ feature_category: :continuous_delivery do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:params) { { after: nil } }
+ let(:service) { described_class.new(project, user, params) }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:stale_environment) { create(:environment, project: project, updated_at: 2.weeks.ago) }
+ let_it_be(:stale_environment2) { create(:environment, project: project, updated_at: 2.weeks.ago) }
+ let_it_be(:recent_environment) { create(:environment, project: project, updated_at: Date.today) }
+
+ let_it_be(:params) { { before: 1.week.ago } }
+
+ before do
+ allow(service).to receive(:can?).with(user, :stop_environment, project).and_return(true)
+ end
+
+ it 'only stops stale environments' do
+ spy_service = Environments::AutoStopWorker.new
+
+ allow(Environments::AutoStopWorker).to receive(:new) { spy_service }
+
+ expect(spy_service).to receive(:perform).with(stale_environment.id).and_call_original
+ expect(spy_service).to receive(:perform).with(stale_environment2.id).and_call_original
+ expect(spy_service).not_to receive(:perform).with(recent_environment.id)
+
+ expect(Environment).to receive(:deployed_and_updated_before).with(project.id, params[:before]).and_call_original
+ expect(Environment).to receive(:without_protected).with(project).and_call_original
+
+ expect(subject.success?).to be_truthy
+
+ expect(stale_environment.reload).to be_stopped
+ expect(stale_environment2.reload).to be_stopped
+ expect(recent_environment.reload).to be_available
+ end
+ end
+end
diff --git a/spec/services/feature_flags/create_service_spec.rb b/spec/services/feature_flags/create_service_spec.rb
index 1c9bde70af3..1a32faad948 100644
--- a/spec/services/feature_flags/create_service_spec.rb
+++ b/spec/services/feature_flags/create_service_spec.rb
@@ -86,7 +86,7 @@ RSpec.describe FeatureFlags::CreateService do
end
end
- it 'creates audit event' do
+ it 'creates audit event', :with_license do
expect { subject }.to change { AuditEvent.count }.by(1)
expect(AuditEvent.last.details[:custom_message]).to start_with('Created feature flag feature_flag with description "description".')
expect(AuditEvent.last.details[:custom_message]).to include('Created strategy "default" with scopes "*".')
diff --git a/spec/services/feature_flags/destroy_service_spec.rb b/spec/services/feature_flags/destroy_service_spec.rb
index 740923db9b6..b2793dc0560 100644
--- a/spec/services/feature_flags/destroy_service_spec.rb
+++ b/spec/services/feature_flags/destroy_service_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe FeatureFlags::DestroyService do
expect { subject }.to change { Operations::FeatureFlag.count }.by(-1)
end
- it 'creates audit log' do
+ it 'creates audit log', :with_license do
expect { subject }.to change { AuditEvent.count }.by(1)
expect(audit_event_message).to eq("Deleted feature flag #{feature_flag.name}.")
end
diff --git a/spec/services/feature_flags/update_service_spec.rb b/spec/services/feature_flags/update_service_spec.rb
index 8f985d34961..1c5af71a50a 100644
--- a/spec/services/feature_flags/update_service_spec.rb
+++ b/spec/services/feature_flags/update_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe FeatureFlags::UpdateService do
+RSpec.describe FeatureFlags::UpdateService, :with_license do
let_it_be(:project) { create(:project) }
let_it_be(:developer) { create(:user) }
let_it_be(:reporter) { create(:user) }
diff --git a/spec/services/files/base_service_spec.rb b/spec/services/files/base_service_spec.rb
new file mode 100644
index 00000000000..57fb378f1a0
--- /dev/null
+++ b/spec/services/files/base_service_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Files::BaseService, feature_category: :source_code_management do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:user) { create(:user) }
+ let(:params) { {} }
+
+ subject(:author_email) { described_class.new(project, user, params).instance_variable_get(:@author_email) }
+
+ before do
+ group.add_developer(user)
+ end
+
+ context 'with no namespace_commit_emails' do
+ it 'sets @author_email to user default email' do
+ expect(author_email).to eq(user.email)
+ end
+ end
+
+ context 'with an author_email in params and namespace_commit_email' do
+ let(:params) { { author_email: 'email_from_params@example.com' } }
+
+ before do
+ create(:namespace_commit_email, user: user, namespace: group)
+ end
+
+ it 'gives precedence to the parameter value for @author_email' do
+ expect(author_email).to eq('email_from_params@example.com')
+ end
+ end
+
+ context 'with a project namespace_commit_email' do
+ it 'sets @author_email to the project namespace_commit_email' do
+ namespace_commit_email = create(:namespace_commit_email, user: user, namespace: project.project_namespace)
+
+ expect(author_email).to eq(namespace_commit_email.email.email)
+ end
+ end
+
+ context 'with a group namespace_commit_email' do
+ it 'sets @author_email to the group namespace_commit_email' do
+ namespace_commit_email = create(:namespace_commit_email, user: user, namespace: group)
+
+ expect(author_email).to eq(namespace_commit_email.email.email)
+ end
+ end
+
+ context 'with a project and group namespace_commit_email' do
+ it 'sets @author_email to the project namespace_commit_email' do
+ namespace_commit_email = create(:namespace_commit_email, user: user, namespace: project.project_namespace)
+ create(:namespace_commit_email, user: user, namespace: group)
+
+ expect(author_email).to eq(namespace_commit_email.email.email)
+ end
+ end
+end
diff --git a/spec/services/groups/import_export/export_service_spec.rb b/spec/services/groups/import_export/export_service_spec.rb
index d6ce40f413b..ec42a728409 100644
--- a/spec/services/groups/import_export/export_service_spec.rb
+++ b/spec/services/groups/import_export/export_service_spec.rb
@@ -56,21 +56,11 @@ RSpec.describe Groups::ImportExport::ExportService do
end
it 'saves the models using ndjson tree saver' do
- stub_feature_flags(group_export_ndjson: true)
-
expect(Gitlab::ImportExport::Group::TreeSaver).to receive(:new).and_call_original
service.execute
end
- it 'saves the models using legacy tree saver' do
- stub_feature_flags(group_export_ndjson: false)
-
- expect(Gitlab::ImportExport::Group::LegacyTreeSaver).to receive(:new).and_call_original
-
- service.execute
- end
-
it 'compresses and removes tmp files' do
expect(group.import_export_upload).to be_nil
expect(Gitlab::ImportExport::Saver).to receive(:new).and_call_original
diff --git a/spec/services/groups/import_export/import_service_spec.rb b/spec/services/groups/import_export/import_service_spec.rb
index d41acbcc2de..972b12d7ee5 100644
--- a/spec/services/groups/import_export/import_service_spec.rb
+++ b/spec/services/groups/import_export/import_service_spec.rb
@@ -59,32 +59,32 @@ RSpec.describe Groups::ImportExport::ImportService do
end
end
- context 'with group_import_ndjson feature flag disabled' do
+ context 'when importing a ndjson export' do
let(:user) { create(:user) }
let(:group) { create(:group) }
+ let(:import_file) { fixture_file_upload('spec/fixtures/group_export.tar.gz') }
+
let(:import_logger) { instance_double(Gitlab::Import::Logger) }
subject(:service) { described_class.new(group: group, user: user) }
before do
- stub_feature_flags(group_import_ndjson: false)
-
- group.add_owner(user)
-
ImportExportUpload.create!(group: group, import_file: import_file)
allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
allow(import_logger).to receive(:error)
allow(import_logger).to receive(:info)
+ allow(import_logger).to receive(:warn)
+ allow(FileUtils).to receive(:rm_rf).and_call_original
end
- context 'with a json file' do
- let(:import_file) { fixture_file_upload('spec/fixtures/legacy_group_export.tar.gz') }
-
- it 'uses LegacyTreeRestorer to import the file' do
- expect(Gitlab::ImportExport::Group::LegacyTreeRestorer).to receive(:new).and_call_original
+ context 'when user has correct permissions' do
+ before do
+ group.add_owner(user)
+ end
- service.execute
+ it 'imports group structure successfully' do
+ expect(service.execute).to be_truthy
end
it 'tracks the event' do
@@ -95,317 +95,151 @@ RSpec.describe Groups::ImportExport::ImportService do
action: 'create',
label: 'import_group_from_file'
)
- end
- end
-
- context 'with a ndjson file' do
- let(:import_file) { fixture_file_upload('spec/fixtures/group_export.tar.gz') }
- it 'fails to import' do
- expect { service.execute }.to raise_error(Gitlab::ImportExport::Error, 'Incorrect JSON format')
+ expect_snowplow_event(
+ category: 'Groups::ImportExport::ImportService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { user_role: 'Owner', import_type: 'import_group_from_file' }
+ )
end
- end
- end
-
- context 'with group_import_ndjson feature flag enabled' do
- before do
- stub_feature_flags(group_import_ndjson: true)
- end
-
- context 'when importing a ndjson export' do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:import_file) { fixture_file_upload('spec/fixtures/group_export.tar.gz') }
- let(:import_logger) { instance_double(Gitlab::Import::Logger) }
-
- subject(:service) { described_class.new(group: group, user: user) }
-
- before do
- ImportExportUpload.create!(group: group, import_file: import_file)
+ it 'removes import file' do
+ service.execute
- allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
- allow(import_logger).to receive(:error)
- allow(import_logger).to receive(:info)
- allow(import_logger).to receive(:warn)
- allow(FileUtils).to receive(:rm_rf).and_call_original
+ expect(group.import_export_upload.import_file.file).to be_nil
end
- context 'when user has correct permissions' do
- before do
- group.add_owner(user)
- end
+ it 'removes tmp files' do
+ shared = Gitlab::ImportExport::Shared.new(group)
+ allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
- it 'imports group structure successfully' do
- expect(service.execute).to be_truthy
- end
-
- it 'tracks the event' do
- service.execute
-
- expect_snowplow_event(
- category: 'Groups::ImportExport::ImportService',
- action: 'create',
- label: 'import_group_from_file'
- )
-
- expect_snowplow_event(
- category: 'Groups::ImportExport::ImportService',
- action: 'create',
- label: 'import_access_level',
- user: user,
- extra: { user_role: 'Owner', import_type: 'import_group_from_file' }
- )
- end
-
- it 'removes import file' do
- service.execute
-
- expect(group.import_export_upload.import_file.file).to be_nil
- end
-
- it 'removes tmp files' do
- shared = Gitlab::ImportExport::Shared.new(group)
- allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
-
- service.execute
-
- expect(FileUtils).to have_received(:rm_rf).with(shared.base_path)
- expect(Dir.exist?(shared.base_path)).to eq(false)
- end
-
- it 'logs the import success' do
- expect(import_logger).to receive(:info).with(
- group_id: group.id,
- group_name: group.name,
- message: 'Group Import/Export: Import succeeded'
- ).once
+ service.execute
- service.execute
- end
+ expect(FileUtils).to have_received(:rm_rf).with(shared.base_path)
+ expect(Dir.exist?(shared.base_path)).to eq(false)
end
- context 'when user does not have correct permissions' do
- it 'logs the error and raises an exception' do
- expect(import_logger).to receive(:error).with(
- group_id: group.id,
- group_name: group.name,
- message: a_string_including('Errors occurred')
- )
+ it 'logs the import success' do
+ expect(import_logger).to receive(:info).with(
+ group_id: group.id,
+ group_name: group.name,
+ message: 'Group Import/Export: Import succeeded'
+ ).once
- expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
- end
-
- it 'tracks the error' do
- shared = Gitlab::ImportExport::Shared.new(group)
- allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
-
- expect(shared).to receive(:error) do |param|
- expect(param.message).to include 'does not have required permissions for'
- end
-
- expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
- end
+ service.execute
end
+ end
- context 'when there are errors with the import file' do
- let(:import_file) { fixture_file_upload('spec/fixtures/symlink_export.tar.gz') }
-
- it 'logs the error and raises an exception' do
- expect(import_logger).to receive(:error).with(
- group_id: group.id,
- group_name: group.name,
- message: a_string_including('Errors occurred')
- ).once
+ context 'when user does not have correct permissions' do
+ it 'logs the error and raises an exception' do
+ expect(import_logger).to receive(:error).with(
+ group_id: group.id,
+ group_name: group.name,
+ message: a_string_including('Errors occurred')
+ )
- expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
- end
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end
- context 'when there are errors with the sub-relations' do
- let(:import_file) { fixture_file_upload('spec/fixtures/group_export_invalid_subrelations.tar.gz') }
+ it 'tracks the error' do
+ shared = Gitlab::ImportExport::Shared.new(group)
+ allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
- before do
- group.add_owner(user)
+ expect(shared).to receive(:error) do |param|
+ expect(param.message).to include 'does not have required permissions for'
end
- it 'successfully imports the group' do
- expect(service.execute).to be_truthy
- end
-
- it 'logs the import success' do
- allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
+ end
+ end
- expect(import_logger).to receive(:info).with(
- group_id: group.id,
- group_name: group.name,
- message: 'Group Import/Export: Import succeeded'
- )
+ context 'when there are errors with the import file' do
+ let(:import_file) { fixture_file_upload('spec/fixtures/symlink_export.tar.gz') }
- service.execute
+ it 'logs the error and raises an exception' do
+ expect(import_logger).to receive(:error).with(
+ group_id: group.id,
+ group_name: group.name,
+ message: a_string_including('Errors occurred')
+ ).once
- expect_snowplow_event(
- category: 'Groups::ImportExport::ImportService',
- action: 'create',
- label: 'import_access_level',
- user: user,
- extra: { user_role: 'Owner', import_type: 'import_group_from_file' }
- )
- end
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end
end
- context 'when importing a json export' do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:import_file) { fixture_file_upload('spec/fixtures/legacy_group_export.tar.gz') }
-
- let(:import_logger) { instance_double(Gitlab::Import::Logger) }
-
- subject(:service) { described_class.new(group: group, user: user) }
+ context 'when there are errors with the sub-relations' do
+ let(:import_file) { fixture_file_upload('spec/fixtures/group_export_invalid_subrelations.tar.gz') }
before do
- ImportExportUpload.create!(group: group, import_file: import_file)
-
- allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
- allow(import_logger).to receive(:error)
- allow(import_logger).to receive(:warn)
- allow(import_logger).to receive(:info)
- allow(FileUtils).to receive(:rm_rf).and_call_original
+ group.add_owner(user)
end
- context 'when user has correct permissions' do
- before do
- group.add_owner(user)
- end
-
- it 'imports group structure successfully' do
- expect(service.execute).to be_truthy
- end
-
- it 'tracks the event' do
- service.execute
-
- expect_snowplow_event(
- category: 'Groups::ImportExport::ImportService',
- action: 'create',
- label: 'import_group_from_file'
- )
-
- expect_snowplow_event(
- category: 'Groups::ImportExport::ImportService',
- action: 'create',
- label: 'import_access_level',
- user: user,
- extra: { user_role: 'Owner', import_type: 'import_group_from_file' }
- )
- end
-
- it 'removes import file' do
- service.execute
-
- expect(group.import_export_upload.import_file.file).to be_nil
- end
-
- it 'removes tmp files' do
- shared = Gitlab::ImportExport::Shared.new(group)
- allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
-
- service.execute
-
- expect(FileUtils).to have_received(:rm_rf).with(shared.base_path)
- expect(Dir.exist?(shared.base_path)).to eq(false)
- end
-
- it 'logs the import success' do
- expect(import_logger).to receive(:info).with(
- group_id: group.id,
- group_name: group.name,
- message: 'Group Import/Export: Import succeeded'
- ).once
-
- service.execute
- end
+ it 'successfully imports the group' do
+ expect(service.execute).to be_truthy
end
- context 'when user does not have correct permissions' do
- it 'logs the error and raises an exception' do
- expect(import_logger).to receive(:error).with(
- group_id: group.id,
- group_name: group.name,
- message: a_string_including('Errors occurred')
- )
-
- expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
- end
+ it 'logs the import success' do
+ allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
- it 'tracks the error' do
- shared = Gitlab::ImportExport::Shared.new(group)
- allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
+ expect(import_logger).to receive(:info).with(
+ group_id: group.id,
+ group_name: group.name,
+ message: 'Group Import/Export: Import succeeded'
+ )
- expect(shared).to receive(:error) do |param|
- expect(param.message).to include 'does not have required permissions for'
- end
+ service.execute
- expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
- end
+ expect_snowplow_event(
+ category: 'Groups::ImportExport::ImportService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { user_role: 'Owner', import_type: 'import_group_from_file' }
+ )
end
+ end
+ end
- context 'when there are errors with the import file' do
- let(:import_file) { fixture_file_upload('spec/fixtures/legacy_symlink_export.tar.gz') }
-
- it 'logs the error and raises an exception' do
- expect(import_logger).to receive(:error).with(
- group_id: group.id,
- group_name: group.name,
- message: a_string_including('Errors occurred')
- ).once
+ context 'when importing a json export' do
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:import_file) { fixture_file_upload('spec/fixtures/legacy_group_export.tar.gz') }
- expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
- end
- end
+ let(:import_logger) { instance_double(Gitlab::Import::Logger) }
- context 'when there are errors with the sub-relations' do
- let(:import_file) { fixture_file_upload('spec/fixtures/legacy_group_export_invalid_subrelations.tar.gz') }
+ subject(:service) { described_class.new(group: group, user: user) }
- before do
- group.add_owner(user)
- end
+ before do
+ group.add_owner(user)
+ ImportExportUpload.create!(group: group, import_file: import_file)
- it 'successfully imports the group' do
- expect(service.execute).to be_truthy
- end
+ allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
+ allow(import_logger).to receive(:error)
+ allow(import_logger).to receive(:warn)
+ allow(import_logger).to receive(:info)
+ end
- it 'tracks the event' do
- service.execute
-
- expect_snowplow_event(
- category: 'Groups::ImportExport::ImportService',
- action: 'create',
- label: 'import_group_from_file'
- )
-
- expect_snowplow_event(
- category: 'Groups::ImportExport::ImportService',
- action: 'create',
- label: 'import_access_level',
- user: user,
- extra: { user_role: 'Owner', import_type: 'import_group_from_file' }
- )
- end
+ it 'logs the error and raises an exception' do
+ expect(import_logger).to receive(:error).with(
+ group_id: group.id,
+ group_name: group.name,
+ message: a_string_including('Errors occurred')
+ ).once
- it 'logs the import success' do
- allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
+ end
- expect(import_logger).to receive(:info).with(
- group_id: group.id,
- group_name: group.name,
- message: 'Group Import/Export: Import succeeded'
- )
+ it 'tracks the error' do
+ shared = Gitlab::ImportExport::Shared.new(group)
+ allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
- service.execute
- end
+ expect(shared).to receive(:error) do |param|
+ expect(param.message).to include 'The import file is incompatible'
end
+
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end
end
end
diff --git a/spec/services/groups/transfer_service_spec.rb b/spec/services/groups/transfer_service_spec.rb
index 3cf2c875341..10399bed655 100644
--- a/spec/services/groups/transfer_service_spec.rb
+++ b/spec/services/groups/transfer_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::TransferService, :sidekiq_inline do
+RSpec.describe Groups::TransferService, :sidekiq_inline, feature_category: :subgroups do
shared_examples 'project namespace path is in sync with project path' do
it 'keeps project and project namespace attributes in sync' do
projects_with_project_namespace.each do |project|
@@ -364,7 +364,7 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
let(:new_parent_group) { create(:group, shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: true) }
it 'calls update service' do
- expect(Groups::UpdateSharedRunnersService).to receive(:new).with(group, user, { shared_runners_setting: Namespace::SR_DISABLED_WITH_OVERRIDE }).and_call_original
+ expect(Groups::UpdateSharedRunnersService).to receive(:new).with(group, user, { shared_runners_setting: Namespace::SR_DISABLED_AND_OVERRIDABLE }).and_call_original
transfer_service.execute(new_parent_group)
end
@@ -1005,5 +1005,38 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
end
end
end
+
+ context 'with namespace_commit_emails concerns' do
+ let_it_be(:group, reload: true) { create(:group) }
+ let_it_be(:target) { create(:group) }
+
+ before do
+ group.add_owner(user)
+ target.add_owner(user)
+ end
+
+ context 'when origin is a root group' do
+ before do
+ create_list(:namespace_commit_email, 2, namespace: group)
+ end
+
+ it 'deletes all namespace_commit_emails' do
+ expect { transfer_service.execute(target) }
+ .to change { group.namespace_commit_emails.count }.by(-2)
+ end
+
+ it_behaves_like 'publishes a GroupTransferedEvent'
+ end
+
+ context 'when origin is not a root group' do
+ let(:group) { create(:group, parent: create(:group)) }
+
+ it 'does not attempt to delete namespace_commit_emails' do
+ expect(Users::NamespaceCommitEmail).not_to receive(:delete_for_namespace)
+
+ transfer_service.execute(target)
+ end
+ end
+ end
end
end
diff --git a/spec/services/groups/update_shared_runners_service_spec.rb b/spec/services/groups/update_shared_runners_service_spec.rb
index 98eccedeace..a29f73a71c2 100644
--- a/spec/services/groups/update_shared_runners_service_spec.rb
+++ b/spec/services/groups/update_shared_runners_service_spec.rb
@@ -114,13 +114,13 @@ RSpec.describe Groups::UpdateSharedRunnersService do
end
context 'allow descendants to override' do
- let(:params) { { shared_runners_setting: Namespace::SR_DISABLED_WITH_OVERRIDE } }
+ let(:params) { { shared_runners_setting: Namespace::SR_DISABLED_AND_OVERRIDABLE } }
context 'top level group' do
let_it_be(:group) { create(:group, :shared_runners_disabled) }
it 'receives correct method and succeeds' do
- expect(group).to receive(:update_shared_runners_setting!).with(Namespace::SR_DISABLED_WITH_OVERRIDE)
+ expect(group).to receive(:update_shared_runners_setting!).with(Namespace::SR_DISABLED_AND_OVERRIDABLE)
expect(subject[:status]).to eq(:success)
end
@@ -135,6 +135,30 @@ RSpec.describe Groups::UpdateSharedRunnersService do
expect(subject[:message]).to eq('Validation failed: Allow descendants override disabled shared runners cannot be enabled because parent group does not allow it')
end
end
+
+ context 'when using DISABLED_WITH_OVERRIDE (deprecated)' do
+ let(:params) { { shared_runners_setting: Namespace::SR_DISABLED_WITH_OVERRIDE } }
+
+ context 'top level group' do
+ let_it_be(:group) { create(:group, :shared_runners_disabled) }
+
+ it 'receives correct method and succeeds' do
+ expect(group).to receive(:update_shared_runners_setting!).with(Namespace::SR_DISABLED_WITH_OVERRIDE)
+
+ expect(subject[:status]).to eq(:success)
+ end
+ end
+
+ context 'when parent does not allow' do
+ let_it_be(:parent) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false) }
+ let_it_be(:group) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false, parent: parent) }
+
+ it 'results error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq('Validation failed: Allow descendants override disabled shared runners cannot be enabled because parent group does not allow it')
+ end
+ end
+ end
end
end
end
diff --git a/spec/services/ide/schemas_config_service_spec.rb b/spec/services/ide/schemas_config_service_spec.rb
index 69ad9b5cbea..f277b8e9954 100644
--- a/spec/services/ide/schemas_config_service_spec.rb
+++ b/spec/services/ide/schemas_config_service_spec.rb
@@ -20,35 +20,21 @@ RSpec.describe Ide::SchemasConfigService do
subject { described_class.new(project, user, filename: filename).execute }
- context 'feature flag schema_linting is enabled', unless: Gitlab.ee? do
- before do
- stub_feature_flags(schema_linting: true)
- end
-
- context 'when no predefined schema exists for the given filename' do
- it 'returns an empty object' do
- is_expected.to include(
- status: :success,
- schema: {})
- end
- end
-
- context 'when a predefined schema exists for the given filename' do
- let(:filename) { '.gitlab-ci.yml' }
-
- it 'uses predefined schema matches' do
- expect(Gitlab::HTTP).to receive(:get).with('https://json.schemastore.org/gitlab-ci')
- expect(subject[:schema]['title']).to eq "Sample schema"
- end
- end
- end
-
- context 'feature flag schema_linting is disabled', unless: Gitlab.ee? do
+ context 'when no predefined schema exists for the given filename', unless: Gitlab.ee? do
it 'returns an empty object' do
is_expected.to include(
status: :success,
schema: {})
end
end
+
+ context 'when a predefined schema exists for the given filename' do
+ let(:filename) { '.gitlab-ci.yml' }
+
+ it 'uses predefined schema matches' do
+ expect(Gitlab::HTTP).to receive(:get).with('https://json.schemastore.org/gitlab-ci')
+ expect(subject[:schema]['title']).to eq "Sample schema"
+ end
+ end
end
end
diff --git a/spec/services/import/github/gists_import_service_spec.rb b/spec/services/import/github/gists_import_service_spec.rb
index c5d73e6479d..32d04a580da 100644
--- a/spec/services/import/github/gists_import_service_spec.rb
+++ b/spec/services/import/github/gists_import_service_spec.rb
@@ -2,16 +2,19 @@
require 'spec_helper'
-RSpec.describe Import::Github::GistsImportService, feature_category: :importer do
- subject(:import) { described_class.new(user, params) }
+RSpec.describe Import::Github::GistsImportService, feature_category: :importers do
+ subject(:import) { described_class.new(user, client, params) }
let_it_be(:user) { create(:user) }
let(:params) { { github_access_token: 'token' } }
let(:import_status) { instance_double('Gitlab::GithubGistsImport::Status') }
+ let(:client) { Gitlab::GithubImport::Client.new(params[:github_access_token]) }
+ let(:octokit_user) { { login: 'user_login' } }
describe '#execute', :aggregate_failures do
before do
allow(Gitlab::GithubGistsImport::Status).to receive(:new).and_return(import_status)
+ allow(client.octokit).to receive(:user).and_return(octokit_user)
end
context 'when import in progress' do
@@ -43,5 +46,24 @@ RSpec.describe Import::Github::GistsImportService, feature_category: :importer d
expect(import.execute).to eq({ status: :success })
end
end
+
+ context 'when user token is invalid' do
+ before do
+ allow(client.octokit).to receive(:user).and_raise(Octokit::Unauthorized)
+ allow(import_status).to receive(:started?).and_return(false)
+ end
+
+ let(:expected_result) do
+ {
+ http_status: 401,
+ message: 'Access denied to the GitHub account.',
+ status: :error
+ }
+ end
+
+ it 'returns 401 error' do
+ expect(import.execute).to eq(expected_result)
+ end
+ end
end
end
diff --git a/spec/services/import/github_service_spec.rb b/spec/services/import/github_service_spec.rb
index d1b372c5e87..293e247c140 100644
--- a/spec/services/import/github_service_spec.rb
+++ b/spec/services/import/github_service_spec.rb
@@ -7,22 +7,19 @@ RSpec.describe Import::GithubService do
let_it_be(:token) { 'complex-token' }
let_it_be(:access_params) { { github_access_token: 'github-complex-token' } }
let(:settings) { instance_double(Gitlab::GithubImport::Settings) }
+ let(:user_namespace_path) { user.namespace_path }
let(:optional_stages) { nil }
let(:params) do
{
repo_id: 123,
new_name: 'new_repo',
- target_namespace: 'root',
+ target_namespace: user_namespace_path,
optional_stages: optional_stages
}
end
subject(:github_importer) { described_class.new(client, user, params) }
- before do
- allow(subject).to receive(:authorized?).and_return(true)
- end
-
shared_examples 'handles errors' do |klass|
let(:client) { klass.new(token) }
let(:project_double) { instance_double(Project, persisted?: true) }
@@ -74,6 +71,7 @@ RSpec.describe Import::GithubService do
let(:repository_double) { { name: 'repository', size: 99 } }
before do
+ allow(subject).to receive(:authorized?).and_return(true)
expect(client).to receive(:repository).and_return(repository_double)
allow_next_instance_of(Gitlab::LegacyGithubImport::ProjectCreator) do |creator|
@@ -215,6 +213,38 @@ RSpec.describe Import::GithubService do
end
end
end
+
+ context 'when target_namespace is blank' do
+ before do
+ params[:target_namespace] = ''
+ end
+
+ it 'raises an exception' do
+ expect { subject.execute(access_params, :github) }.to raise_error(ArgumentError, 'Target namespace is required')
+ end
+ end
+
+ context 'when namespace to import repository into does not exist' do
+ before do
+ params[:target_namespace] = 'unknown_path'
+ end
+
+ it 'returns an error' do
+ expect(github_importer.execute(access_params, :github)).to include(not_existed_namespace_error)
+ end
+ end
+
+ context 'when user has no permissions to import repository into the specified namespace' do
+ let_it_be(:group) { create(:group) }
+
+ before do
+ params[:target_namespace] = group.full_path
+ end
+
+ it 'returns an error' do
+ expect(github_importer.execute(access_params, :github)).to include(taken_namespace_error)
+ end
+ end
end
context 'when remove_legacy_github_client feature flag is enabled' do
@@ -248,4 +278,20 @@ RSpec.describe Import::GithubService do
message: "Invalid URL: #{url}"
}
end
+
+ def not_existed_namespace_error
+ {
+ status: :error,
+ http_status: :unprocessable_entity,
+ message: 'Namespace or group to import repository into does not exist.'
+ }
+ end
+
+ def taken_namespace_error
+ {
+ status: :error,
+ http_status: :unprocessable_entity,
+ message: 'This namespace has already been taken. Choose a different one.'
+ }
+ end
end
diff --git a/spec/services/issue_links/create_service_spec.rb b/spec/services/issue_links/create_service_spec.rb
index 88e8470658d..0629b8b091b 100644
--- a/spec/services/issue_links/create_service_spec.rb
+++ b/spec/services/issue_links/create_service_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe IssueLinks::CreateService do
let_it_be(:project) { create :project, namespace: namespace }
let_it_be(:issuable) { create :issue, project: project }
let_it_be(:issuable2) { create :issue, project: project }
- let_it_be(:guest_issuable) { create :issue }
+ let_it_be(:restricted_issuable) { create :issue }
let_it_be(:another_project) { create :project, namespace: project.namespace }
let_it_be(:issuable3) { create :issue, project: another_project }
let_it_be(:issuable_a) { create :issue, project: project }
@@ -23,7 +23,7 @@ RSpec.describe IssueLinks::CreateService do
before do
project.add_developer(user)
- guest_issuable.project.add_guest(user)
+ restricted_issuable.project.add_guest(user)
another_project.add_developer(user)
end
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index e6ad755f911..ef24d1e940e 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -140,7 +140,7 @@ RSpec.describe Issues::CloseService do
end
context 'when the escalation status did not change to resolved' do
- let(:escalation_status) { instance_double('IncidentManagement::IssuableEscalationStatus', resolve: false) }
+ let(:escalation_status) { instance_double('IncidentManagement::IssuableEscalationStatus', resolve: false, status_name: 'acknowledged') }
before do
allow(issue).to receive(:incident_management_issuable_escalation_status).and_return(escalation_status)
diff --git a/spec/services/issues/export_csv_service_spec.rb b/spec/services/issues/export_csv_service_spec.rb
index 66d017464bf..d3359447fd8 100644
--- a/spec/services/issues/export_csv_service_spec.rb
+++ b/spec/services/issues/export_csv_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Issues::ExportCsvService do
+RSpec.describe Issues::ExportCsvService, :with_license do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :public, group: group) }
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index 70fc6ffc38f..930766c520b 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -1168,6 +1168,7 @@ RSpec.describe Issues::UpdateService, :mailer do
it 'triggers webhooks' do
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks)
expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :issue_hooks)
+ expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :incident_hooks)
update_issue(opts)
end
@@ -1281,6 +1282,7 @@ RSpec.describe Issues::UpdateService, :mailer do
it 'triggers webhooks' do
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks)
expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :issue_hooks)
+ expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :incident_hooks)
update_issue(opts)
end
diff --git a/spec/services/lfs/file_transformer_spec.rb b/spec/services/lfs/file_transformer_spec.rb
index e87c80b4c6c..9d4d8851c2d 100644
--- a/spec/services/lfs/file_transformer_spec.rb
+++ b/spec/services/lfs/file_transformer_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe Lfs::FileTransformer do
+RSpec.describe Lfs::FileTransformer, feature_category: :git_lfs do
let(:project) { create(:project, :repository, :wiki_repo) }
let(:repository) { project.repository }
let(:file_content) { 'Test file content' }
@@ -13,6 +13,10 @@ RSpec.describe Lfs::FileTransformer do
describe '#new_file' do
context 'with lfs disabled' do
+ before do
+ allow(project).to receive(:lfs_enabled?).and_return(false)
+ end
+
it 'skips gitattributes check' do
expect(repository.raw).not_to receive(:blob_at)
@@ -98,6 +102,38 @@ RSpec.describe Lfs::FileTransformer do
expect(project.lfs_objects_projects.first.repository_type).to eq('design')
end
end
+
+ context 'when content type detection enabled' do
+ let(:detect_content_type) { true }
+
+ before do
+ allow(Gitlab::Utils::MimeType).to receive(:from_string).with(file_content).and_return(mime_type)
+ end
+
+ context 'when mime type detected' do
+ let(:mime_type) { 'image/tiff' }
+
+ it 'creates a file with custom content type' do
+ expect(CarrierWaveStringFile).to receive(:new_file).with({
+ file_content: file_content,
+ filename: anything,
+ content_type: mime_type
+ })
+
+ subject.new_file(file_path, file, detect_content_type: detect_content_type)
+ end
+ end
+
+ context 'when mime type not detected' do
+ let(:mime_type) { nil }
+
+ it 'creates a file with default content type' do
+ expect(CarrierWaveStringFile).to receive(:new).with(file_content)
+
+ subject.new_file(file_path, file, detect_content_type: detect_content_type)
+ end
+ end
+ end
end
context "when doesn't use LFS" do
diff --git a/spec/services/members/destroy_service_spec.rb b/spec/services/members/destroy_service_spec.rb
index d0f009f1321..d8a8d5881bf 100644
--- a/spec/services/members/destroy_service_spec.rb
+++ b/spec/services/members/destroy_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Members::DestroyService do
+RSpec.describe Members::DestroyService, feature_category: :subgroups do
let(:current_user) { create(:user) }
let(:member_user) { create(:user) }
let(:group) { create(:group, :public) }
@@ -100,32 +100,104 @@ RSpec.describe Members::DestroyService do
end
context 'With ExclusiveLeaseHelpers' do
+ include ExclusiveLeaseHelpers
+
+ let(:lock_key) do
+ "delete_members:#{member_to_delete.source.class}:#{member_to_delete.source.id}"
+ end
+
+ let(:timeout) { 1.minute }
let(:service_object) { described_class.new(current_user) }
- let!(:member) { group_project.add_developer(member_user) }
- subject(:destroy_member) { service_object.execute(member, **opts) }
+ subject(:destroy_member) { service_object.execute(member_to_delete, **opts) }
- before do
- group_project.add_maintainer(current_user)
+ shared_examples_for 'deletes the member without using a lock' do
+ it 'does not try to perform the delete within a lock' do
+ # `UpdateHighestRole` concern also uses locks to peform work
+ # whenever a Member is committed, so that needs to be accounted for.
+ lock_key_for_update_highest_role = "update_highest_role:#{member_to_delete.user_id}"
+ expect(Gitlab::ExclusiveLease)
+ .to receive(:new).with(lock_key_for_update_highest_role, timeout: 10.minutes.to_i).and_call_original
+
+ # We do not use any locks for member deletion process.
+ expect(Gitlab::ExclusiveLease)
+ .not_to receive(:new).with(lock_key, timeout: timeout)
- allow(service_object).to receive(:in_lock) do |_, &block|
- block.call if lock_obtained
+ destroy_member
+ end
+
+ it 'destroys the membership' do
+ expect { destroy_member }.to change { entity.members.count }.by(-1)
end
end
- context 'when lock is obtained' do
- let(:lock_obtained) { true }
+ context 'for group members' do
+ before do
+ group.add_owner(current_user)
+ end
+
+ context 'deleting group owners' do
+ let!(:member_to_delete) { group.add_owner(member_user) }
- it 'destroys the membership' do
- expect { destroy_member }.to change { group_project.members.count }.by(-1)
+ context 'locking to avoid race conditions' do
+ it 'tries to perform the delete within a lock' do
+ expect_to_obtain_exclusive_lease(lock_key, timeout: timeout)
+
+ destroy_member
+ end
+
+ context 'based on status of the lock' do
+ context 'when lock is obtained' do
+ it 'destroys the membership' do
+ expect_to_obtain_exclusive_lease(lock_key, timeout: timeout)
+
+ expect { destroy_member }.to change { group.members.count }.by(-1)
+ end
+ end
+
+ context 'when the lock cannot be obtained' do
+ before do
+ stub_exclusive_lease_taken(lock_key, timeout: timeout)
+ end
+
+ it 'raises error' do
+ expect { destroy_member }.to raise_error(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
+ end
+ end
+ end
+ end
+ end
+
+ context 'deleting group members that are not owners' do
+ let!(:member_to_delete) { group.add_developer(member_user) }
+
+ it_behaves_like 'deletes the member without using a lock' do
+ let(:entity) { group }
+ end
end
end
- context 'when the lock can not be obtained' do
- let(:lock_obtained) { false }
+ context 'for project members' do
+ before do
+ group_project.add_owner(current_user)
+ end
+
+ context 'deleting project owners' do
+ context 'deleting project owners' do
+ let!(:member_to_delete) { entity.add_owner(member_user) }
- it 'does not destroy the membership' do
- expect { destroy_member }.not_to change { group_project.members.count }
+ it_behaves_like 'deletes the member without using a lock' do
+ let(:entity) { group_project }
+ end
+ end
+ end
+
+ context 'deleting project memebrs that are not owners' do
+ let!(:member_to_delete) { group_project.add_developer(member_user) }
+
+ it_behaves_like 'deletes the member without using a lock' do
+ let(:entity) { group_project }
+ end
end
end
end
diff --git a/spec/services/members/update_service_spec.rb b/spec/services/members/update_service_spec.rb
index eb8fae03c39..8a7f9a84c77 100644
--- a/spec/services/members/update_service_spec.rb
+++ b/spec/services/members/update_service_spec.rb
@@ -14,10 +14,7 @@ RSpec.describe Members::UpdateService do
let(:members) { source.members_and_requesters.where(user_id: member_users).to_a }
let(:update_service) { described_class.new(current_user, params) }
let(:params) { { access_level: access_level } }
- let(:updated_members) do
- result = subject
- Array.wrap(result[:members] || result[:member])
- end
+ let(:updated_members) { subject[:members] }
before do
member_users.first.tap do |member_user|
@@ -255,40 +252,6 @@ RSpec.describe Members::UpdateService do
end
end
- context 'when :bulk_update_membership_roles feature flag is disabled' do
- let(:member) { source.members_and_requesters.find_by!(user_id: member_user1.id) }
- let(:members) { [member] }
-
- subject { update_service.execute(member, permission: permission) }
-
- shared_examples 'a service returning an error' do
- before do
- allow(member).to receive(:save) do
- member.errors.add(:user_id)
- member.errors.add(:access_level)
- end
- .and_return(false)
- end
-
- it_behaves_like 'returns error status when params are invalid'
-
- it 'returns the error' do
- response = subject
-
- expect(response[:status]).to eq(:error)
- expect(response[:message]).to eq('User is invalid and Access level is invalid')
- end
- end
-
- before do
- stub_feature_flags(bulk_update_membership_roles: false)
- end
-
- it_behaves_like 'current user cannot update the given members'
- it_behaves_like 'updating a project'
- it_behaves_like 'updating a group'
- end
-
subject { update_service.execute(members, permission: permission) }
shared_examples 'a service returning an error' do
@@ -326,15 +289,14 @@ RSpec.describe Members::UpdateService do
it_behaves_like 'updating a group'
context 'with a single member' do
- let(:member) { create(:group_member, group: group) }
- let(:members) { member }
+ let(:members) { create(:group_member, group: group) }
before do
group.add_owner(current_user)
end
it 'returns the correct response' do
- expect(subject[:member]).to eq(member)
+ expect(subject[:members]).to contain_exactly(members)
end
end
diff --git a/spec/services/merge_requests/base_service_spec.rb b/spec/services/merge_requests/base_service_spec.rb
index 6eeba3029ae..bd907ba6015 100644
--- a/spec/services/merge_requests/base_service_spec.rb
+++ b/spec/services/merge_requests/base_service_spec.rb
@@ -2,7 +2,15 @@
require 'spec_helper'
-RSpec.describe MergeRequests::BaseService do
+module MergeRequests
+ class ExampleService < MergeRequests::BaseService
+ def execute(merge_request, async: false, allow_duplicate: false)
+ create_pipeline_for(merge_request, current_user, async: async, allow_duplicate: allow_duplicate)
+ end
+ end
+end
+
+RSpec.describe MergeRequests::BaseService, feature_category: :code_review_workflow do
include ProjectForksHelper
let_it_be(:project) { create(:project, :repository) }
@@ -57,4 +65,62 @@ RSpec.describe MergeRequests::BaseService do
it_behaves_like 'does not enqueue Jira sync worker'
end
end
+
+ describe `#create_pipeline_for` do
+ let_it_be(:merge_request) { create(:merge_request) }
+
+ subject { MergeRequests::ExampleService.new(project: project, current_user: project.first_owner, params: params) }
+
+ context 'async: false' do
+ it 'creates a pipeline directly' do
+ expect(MergeRequests::CreatePipelineService)
+ .to receive(:new)
+ .with(hash_including(project: project, current_user: project.first_owner, params: { allow_duplicate: false }))
+ .and_call_original
+ expect(MergeRequests::CreatePipelineWorker).not_to receive(:perform_async)
+
+ subject.execute(merge_request, async: false)
+ end
+
+ context 'allow_duplicate: true' do
+ it 'passes :allow_duplicate as true' do
+ expect(MergeRequests::CreatePipelineService)
+ .to receive(:new)
+ .with(hash_including(project: project, current_user: project.first_owner, params: { allow_duplicate: true }))
+ .and_call_original
+ expect(MergeRequests::CreatePipelineWorker).not_to receive(:perform_async)
+
+ subject.execute(merge_request, async: false, allow_duplicate: true)
+ end
+ end
+ end
+
+ context 'async: true' do
+ it 'enques a CreatePipelineWorker' do
+ expect(MergeRequests::CreatePipelineService).not_to receive(:new)
+ expect(MergeRequests::CreatePipelineWorker)
+ .to receive(:perform_async)
+ .with(project.id, project.first_owner.id, merge_request.id, { "allow_duplicate" => false })
+ .and_call_original
+
+ Sidekiq::Testing.fake! do
+ expect { subject.execute(merge_request, async: true) }.to change(MergeRequests::CreatePipelineWorker.jobs, :size).by(1)
+ end
+ end
+
+ context 'allow_duplicate: true' do
+ it 'passes :allow_duplicate as true' do
+ expect(MergeRequests::CreatePipelineService).not_to receive(:new)
+ expect(MergeRequests::CreatePipelineWorker)
+ .to receive(:perform_async)
+ .with(project.id, project.first_owner.id, merge_request.id, { "allow_duplicate" => true })
+ .and_call_original
+
+ Sidekiq::Testing.fake! do
+ expect { subject.execute(merge_request, async: true, allow_duplicate: true) }.to change(MergeRequests::CreatePipelineWorker.jobs, :size).by(1)
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/services/merge_requests/rebase_service_spec.rb b/spec/services/merge_requests/rebase_service_spec.rb
index e7aa6e74246..316f20d8276 100644
--- a/spec/services/merge_requests/rebase_service_spec.rb
+++ b/spec/services/merge_requests/rebase_service_spec.rb
@@ -24,6 +24,45 @@ RSpec.describe MergeRequests::RebaseService do
project.add_maintainer(user)
end
+ describe '#validate' do
+ subject { service.validate(merge_request) }
+
+ it { is_expected.to be_success }
+
+ context 'when source branch does not exist' do
+ before do
+ merge_request.update!(source_branch: 'does_not_exist')
+ end
+
+ it 'returns an error' do
+ is_expected.to be_error
+ expect(subject.message).to eq('Source branch does not exist')
+ end
+ end
+
+ context 'when user has no permissions to rebase' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'returns an error' do
+ is_expected.to be_error
+ expect(subject.message).to eq('Cannot push to source branch')
+ end
+ end
+
+ context 'when branch is protected' do
+ before do
+ create(:protected_branch, project: project, name: merge_request.source_branch, allow_force_push: false)
+ end
+
+ it 'returns an error' do
+ is_expected.to be_error
+ expect(subject.message).to eq('Source branch is protected from force push')
+ end
+ end
+ end
+
describe '#execute' do
shared_examples 'sequence of failure and success' do
it 'properly clears the error message' do
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index 5174ceaaa82..0814942b6b7 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe MergeRequests::RefreshService do
+RSpec.describe MergeRequests::RefreshService, feature_category: :code_review_workflow do
include ProjectForksHelper
include UserHelpers
@@ -138,7 +138,7 @@ RSpec.describe MergeRequests::RefreshService do
refresh_service.execute(@oldrev, @newrev, 'refs/heads/master')
expect { refresh_service.execute(@oldrev, @newrev, 'refs/heads/master') }.to change {
- refresh_service.instance_variable_get("@source_merge_requests").first.merge_request_diff
+ refresh_service.instance_variable_get(:@source_merge_requests).first.merge_request_diff
}
end
@@ -799,7 +799,7 @@ RSpec.describe MergeRequests::RefreshService do
it 'does not mark as draft based on commits that do not belong to an MR' do
allow(refresh_service).to receive(:find_new_commits)
- refresh_service.instance_variable_set("@commits",
+ refresh_service.instance_variable_set(:@commits,
[
double(
id: 'aaaaaaa',
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index da78f86c7c8..344d93fc5ca 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe MergeRequests::UpdateService, :mailer do
+RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_review_workflow do
include ProjectForksHelper
let(:group) { create(:group, :public) }
@@ -479,6 +479,16 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
end
end
+ shared_examples_for "creates a new pipeline" do
+ it "creates a new pipeline" do
+ expect(MergeRequests::CreatePipelineWorker)
+ .to receive(:perform_async)
+ .with(project.id, user.id, merge_request.id, { "allow_duplicate" => true })
+
+ update_merge_request(target_branch: new_target_branch)
+ end
+ end
+
shared_examples_for 'correct merge behavior' do
let(:opts) do
{
@@ -784,7 +794,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
end
end
- context 'when the target branch change' do
+ context 'when the target branch changes' do
it 'calls MergeRequests::ResolveTodosService#async_execute' do
expect_next_instance_of(MergeRequests::ResolveTodosService, merge_request, user) do |service|
expect(service).to receive(:async_execute)
@@ -799,6 +809,10 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
update_merge_request({ target_branch: "target" })
end
+
+ it_behaves_like "creates a new pipeline" do
+ let(:new_target_branch) { "target" }
+ end
end
context 'when auto merge is enabled and target branch changed' do
@@ -813,6 +827,10 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
update_merge_request({ target_branch: 'target' })
end
+
+ it_behaves_like "creates a new pipeline" do
+ let(:new_target_branch) { "target" }
+ end
end
end
@@ -1237,6 +1255,10 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
expect { update_merge_request(target_branch: 'master', target_branch_was_deleted: true) }
.to change { merge_request.reload.target_branch }.from('mr-a').to('master')
end
+
+ it_behaves_like "creates a new pipeline" do
+ let(:new_target_branch) { "target" }
+ end
end
it_behaves_like 'issuable record that supports quick actions' do
diff --git a/spec/services/ml/experiment_tracking/candidate_repository_spec.rb b/spec/services/ml/experiment_tracking/candidate_repository_spec.rb
index ff3b295d185..e3c05178025 100644
--- a/spec/services/ml/experiment_tracking/candidate_repository_spec.rb
+++ b/spec/services/ml/experiment_tracking/candidate_repository_spec.rb
@@ -31,17 +31,37 @@ RSpec.describe ::Ml::ExperimentTracking::CandidateRepository do
end
describe '#create!' do
- subject { repository.create!(experiment, 1234, [{ key: 'hello', value: 'world' }]) }
+ let(:tags) { [{ key: 'hello', value: 'world' }] }
+ let(:name) { 'some_candidate' }
+
+ subject { repository.create!(experiment, 1234, tags, name) }
it 'creates the candidate' do
expect(subject.start_time).to eq(1234)
expect(subject.iid).not_to be_nil
expect(subject.end_time).to be_nil
+ expect(subject.name).to eq('some_candidate')
end
it 'creates with tag' do
expect(subject.metadata.length).to eq(1)
end
+
+ context 'when name is passed as tag' do
+ let(:tags) { [{ key: 'mlflow.runName', value: 'blah' }] }
+
+ it 'ignores if name is not nil' do
+ expect(subject.name).to eq('some_candidate')
+ end
+
+ context 'when name is nil' do
+ let(:name) { nil }
+
+ it 'sets the mlflow.runName as candidate name' do
+ expect(subject.name).to eq('blah')
+ end
+ end
+ end
end
describe '#update' do
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index 2f1c5a5b0f3..22606cc2461 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -18,6 +18,10 @@ RSpec.describe Notes::CreateService do
end
context "valid params" do
+ it_behaves_like 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
+ let(:action) { note }
+ end
+
it 'returns a valid note' do
expect(note).to be_valid
end
@@ -230,6 +234,10 @@ RSpec.describe Notes::CreateService do
confidential: false)
end
+ it_behaves_like 'triggers GraphQL subscription mergeRequestMergeStatusUpdated' do
+ let(:action) { described_class.new(project_with_repo, user, new_opts).execute }
+ end
+
it 'note is associated with a note diff file' do
MergeRequests::MergeToRefService.new(project: merge_request.project, current_user: merge_request.author).execute(merge_request)
@@ -248,6 +256,16 @@ RSpec.describe Notes::CreateService do
end
end
+ context 'when skip_merge_status_trigger execute option is set to true' do
+ it_behaves_like 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
+ let(:action) do
+ described_class
+ .new(project_with_repo, user, new_opts)
+ .execute(skip_merge_status_trigger: true)
+ end
+ end
+ end
+
it 'does not track ipynb note usage data' do
expect(::Gitlab::UsageDataCounters::IpynbDiffActivityCounter).not_to receive(:note_created)
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 1ca14cd430b..1ad9234c939 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe NotificationService, :mailer do
+RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
include EmailSpec::Matchers
include ExternalAuthorizationServiceHelpers
include NotificationHelpers
@@ -337,11 +337,12 @@ RSpec.describe NotificationService, :mailer do
describe '#access_token_expired' do
let_it_be(:user) { create(:user) }
+ let_it_be(:pat) { create(:personal_access_token, user: user) }
- subject { notification.access_token_expired(user) }
+ subject { notification.access_token_expired(user, pat.name) }
it 'sends email to the token owner' do
- expect { subject }.to have_enqueued_email(user, mail: "access_token_expired_email")
+ expect { subject }.to have_enqueued_email(user, pat.name, mail: "access_token_expired_email")
end
context 'when user is not allowed to receive notifications' do
@@ -350,7 +351,7 @@ RSpec.describe NotificationService, :mailer do
end
it 'does not send email to the token owner' do
- expect { subject }.not_to have_enqueued_email(user, mail: "access_token_expired_email")
+ expect { subject }.not_to have_enqueued_email(user, pat.name, mail: "access_token_expired_email")
end
end
end
diff --git a/spec/services/packages/conan/search_service_spec.rb b/spec/services/packages/conan/search_service_spec.rb
index 55dcdfe646d..9e8be164d8c 100644
--- a/spec/services/packages/conan/search_service_spec.rb
+++ b/spec/services/packages/conan/search_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Packages::Conan::SearchService do
+RSpec.describe Packages::Conan::SearchService, feature_category: :package_registry do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public) }
diff --git a/spec/services/pages_domains/create_service_spec.rb b/spec/services/pages_domains/create_service_spec.rb
index cac941fb134..4dd9bd8f3bb 100644
--- a/spec/services/pages_domains/create_service_spec.rb
+++ b/spec/services/pages_domains/create_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::PagesDomains::CreateService do
+RSpec.describe ::PagesDomains::CreateService, feature_category: :pages do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :in_subgroup) }
@@ -37,6 +37,7 @@ RSpec.describe ::PagesDomains::CreateService do
project_id: project.id,
namespace_id: project.namespace.id,
root_namespace_id: project.root_namespace.id,
+ domain_id: kind_of(Numeric),
domain: domain
)
diff --git a/spec/services/pages_domains/delete_service_spec.rb b/spec/services/pages_domains/delete_service_spec.rb
index 5f98fe3c7f7..43d59961637 100644
--- a/spec/services/pages_domains/delete_service_spec.rb
+++ b/spec/services/pages_domains/delete_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::PagesDomains::DeleteService do
+RSpec.describe ::PagesDomains::DeleteService, feature_category: :pages do
let_it_be(:user) { create(:user) }
let_it_be(:pages_domain) { create(:pages_domain, :with_project) }
@@ -39,6 +39,7 @@ RSpec.describe ::PagesDomains::DeleteService do
project_id: pages_domain.project.id,
namespace_id: pages_domain.project.namespace.id,
root_namespace_id: pages_domain.project.root_namespace.id,
+ domain_id: pages_domain.id,
domain: pages_domain.domain
)
end
diff --git a/spec/services/pages_domains/retry_acme_order_service_spec.rb b/spec/services/pages_domains/retry_acme_order_service_spec.rb
index 3152e05f2f1..4860d57475b 100644
--- a/spec/services/pages_domains/retry_acme_order_service_spec.rb
+++ b/spec/services/pages_domains/retry_acme_order_service_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe PagesDomains::RetryAcmeOrderService, feature_category: :pages do
project_id: project.id,
namespace_id: project.namespace.id,
root_namespace_id: project.root_namespace.id,
+ domain_id: domain.id,
domain: domain.domain
)
end
@@ -31,6 +32,7 @@ RSpec.describe PagesDomains::RetryAcmeOrderService, feature_category: :pages do
project_id: project.id,
namespace_id: project.namespace.id,
root_namespace_id: project.root_namespace.id,
+ domain_id: domain.id,
domain: domain.domain
)
end
diff --git a/spec/services/pages_domains/update_service_spec.rb b/spec/services/pages_domains/update_service_spec.rb
index f6558f56422..c317a2c68f6 100644
--- a/spec/services/pages_domains/update_service_spec.rb
+++ b/spec/services/pages_domains/update_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe PagesDomains::UpdateService do
+RSpec.describe PagesDomains::UpdateService, feature_category: :pages do
let_it_be(:user) { create(:user) }
let_it_be(:pages_domain) { create(:pages_domain, :with_project) }
@@ -40,6 +40,7 @@ RSpec.describe PagesDomains::UpdateService do
project_id: pages_domain.project.id,
namespace_id: pages_domain.project.namespace.id,
root_namespace_id: pages_domain.project.root_namespace.id,
+ domain_id: pages_domain.id,
domain: pages_domain.domain
)
end
diff --git a/spec/services/personal_access_tokens/revoke_service_spec.rb b/spec/services/personal_access_tokens/revoke_service_spec.rb
index 562d6017405..a9b4df9749f 100644
--- a/spec/services/personal_access_tokens/revoke_service_spec.rb
+++ b/spec/services/personal_access_tokens/revoke_service_spec.rb
@@ -71,26 +71,30 @@ RSpec.describe PersonalAccessTokens::RevokeService do
let_it_be(:current_user) { nil }
context 'when source is valid' do
- let_it_be(:source) { 'secret_detection' }
+ let_it_be(:source) { :secret_detection }
let_it_be(:token) { create(:personal_access_token) }
it_behaves_like 'a successfully revoked token' do
- let(:revoked_by) { 'secret_detection' }
+ let(:revoked_by) { :secret_detection }
end
end
context 'when source is invalid' do
- let_it_be(:source) { 'external_request' }
+ let_it_be(:source) { :external_request }
let_it_be(:token) { create(:personal_access_token) }
- it_behaves_like 'an unsuccessfully revoked token'
+ it 'raises ArgumentError' do
+ expect { subject }.to raise_error ArgumentError
+ end
end
context 'when source is missing' do
let_it_be(:source) { nil }
let_it_be(:token) { create(:personal_access_token) }
- it_behaves_like 'an unsuccessfully revoked token'
+ it 'raises ArgumentError' do
+ expect { subject }.to raise_error ArgumentError
+ end
end
end
end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index f42ab198a04..f85a8eda7ee 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::CreateService, '#execute' do
+RSpec.describe Projects::CreateService, '#execute', feature_category: :projects do
include ExternalAuthorizationServiceHelpers
let(:user) { create :user }
@@ -995,6 +995,7 @@ RSpec.describe Projects::CreateService, '#execute' do
where(:shared_runners_setting, :desired_config_for_new_project, :expected_result_for_project) do
Namespace::SR_ENABLED | nil | true
Namespace::SR_DISABLED_WITH_OVERRIDE | nil | false
+ Namespace::SR_DISABLED_AND_OVERRIDABLE | nil | false
Namespace::SR_DISABLED_AND_UNOVERRIDABLE | nil | false
end
@@ -1017,6 +1018,8 @@ RSpec.describe Projects::CreateService, '#execute' do
Namespace::SR_ENABLED | false | false
Namespace::SR_DISABLED_WITH_OVERRIDE | false | false
Namespace::SR_DISABLED_WITH_OVERRIDE | true | true
+ Namespace::SR_DISABLED_AND_OVERRIDABLE | false | false
+ Namespace::SR_DISABLED_AND_OVERRIDABLE | true | true
Namespace::SR_DISABLED_AND_UNOVERRIDABLE | false | false
end
diff --git a/spec/services/projects/import_service_spec.rb b/spec/services/projects/import_service_spec.rb
index bb11b2e617e..38ab7b6e2ee 100644
--- a/spec/services/projects/import_service_spec.rb
+++ b/spec/services/projects/import_service_spec.rb
@@ -373,6 +373,28 @@ RSpec.describe Projects::ImportService do
expect(result[:status]).to eq(:success)
end
+
+ context 'when host resolves to an IPv6 address' do
+ before do
+ project.import_url = 'https://gitlab.com/gitlab-org/gitlab-development-kit'
+
+ allow(Gitlab::UrlBlocker).to receive(:validate!)
+ .with(project.import_url, ports: Project::VALID_IMPORT_PORTS, schemes: Project::VALID_IMPORT_PROTOCOLS, dns_rebind_protection: true)
+ .and_return([Addressable::URI.parse('https://[2606:4700:90:0:f22e:fbec:5bed:a9b9]/gitlab-org/gitlab-development-kit'), 'gitlab.com'])
+ end
+
+ it 'imports repository with url and additional resolved bare IPv6 address' do
+ expect(project.repository).to receive(:import_repository).with('https://gitlab.com/gitlab-org/gitlab-development-kit', resolved_address: '2606:4700:90:0:f22e:fbec:5bed:a9b9').and_return(true)
+
+ expect_next_instance_of(Projects::LfsPointers::LfsImportService) do |service|
+ expect(service).to receive(:execute).and_return(status: :success)
+ end
+
+ result = subject.execute
+
+ expect(result[:status]).to eq(:success)
+ end
+ end
end
context 'when http url is provided' do
diff --git a/spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb b/spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb
index a3cff345f68..62330441d2f 100644
--- a/spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb
+++ b/spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitl
let(:service) { described_class.new }
describe '#execute' do
- let_it_be(:project) { create(:project) }
+ let_it_be(:project, reload: true) { create(:project) }
let_it_be(:artifact_1) { create(:ci_job_artifact, project: project, size: 1, created_at: 14.days.ago) }
let_it_be(:artifact_2) { create(:ci_job_artifact, project: project, size: 2, created_at: 13.days.ago) }
@@ -29,6 +29,7 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitl
let(:now) { Time.zone.now }
let(:statistics) { project.statistics }
+ let(:increment) { Gitlab::Counters::Increment.new(amount: 30) }
around do |example|
freeze_time { example.run }
@@ -36,17 +37,19 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitl
before do
stub_const("#{described_class}::BATCH_SIZE", 3)
+ stub_const("#{described_class}::REFRESH_INTERVAL_SECONDS", 0)
stats = create(:project_statistics, project: project, build_artifacts_size: 120)
- stats.increment_counter(:build_artifacts_size, 30)
+ stats.increment_counter(:build_artifacts_size, increment)
end
it 'resets the build artifacts size stats' do
- expect { service.execute }.to change { project.statistics.reload.build_artifacts_size }.to(0)
+ expect { service.execute }.to change { statistics.reload.build_artifacts_size }.from(120).to(0)
end
- it 'increments the counter attribute by the total size of the current batch of artifacts' do
- expect { service.execute }.to change { statistics.counter(:build_artifacts_size).get }.to(3)
+ it 'resets the buffered counter' do
+ expect { service.execute }
+ .to change { Gitlab::Counters::BufferedCounter.new(statistics, :build_artifacts_size).get }.to(0)
end
it 'updates the last_job_artifact_id to the ID of the last artifact from the batch' do
@@ -56,7 +59,7 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitl
it 'updates the last_job_artifact_id to the ID of the last artifact from the project' do
expect { service.execute }
.to change { refresh.reload.last_job_artifact_id_on_refresh_start.to_i }
- .to(project.job_artifacts.last.id)
+ .to(project.job_artifacts.last.id)
end
it 'requeues the refresh job' do
@@ -106,9 +109,10 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitl
)
end
- it 'deletes the refresh record' do
+ it 'schedules the refresh to be finalized' do
service.execute
- expect(Projects::BuildArtifactsSizeRefresh.where(id: refresh.id)).not_to exist
+
+ expect(refresh.reload.finalizing?).to be(true)
end
end
end
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index 4d75786a4c3..5171836f917 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -535,8 +535,8 @@ RSpec.describe Projects::TransferService do
where(:project_shared_runners_enabled, :shared_runners_setting, :expected_shared_runners_enabled) do
true | :disabled_and_unoverridable | false
false | :disabled_and_unoverridable | false
- true | :disabled_with_override | true
- false | :disabled_with_override | false
+ true | :disabled_and_overridable | true
+ false | :disabled_and_overridable | false
true | :shared_runners_enabled | true
false | :shared_runners_enabled | false
end
diff --git a/spec/services/repositories/changelog_service_spec.rb b/spec/services/repositories/changelog_service_spec.rb
index 47ebd55022f..42b586637ad 100644
--- a/spec/services/repositories/changelog_service_spec.rb
+++ b/spec/services/repositories/changelog_service_spec.rb
@@ -79,7 +79,7 @@ RSpec.describe Repositories::ChangelogService do
recorder = ActiveRecord::QueryRecorder.new { service.execute(commit_to_changelog: commit_to_changelog) }
changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data
- expect(recorder.count).to eq(10)
+ expect(recorder.count).to eq(12)
expect(changelog).to include('Title 1', 'Title 2')
end
diff --git a/spec/services/search_service_spec.rb b/spec/services/search_service_spec.rb
index 90e80a45515..d11fc377d83 100644
--- a/spec/services/search_service_spec.rb
+++ b/spec/services/search_service_spec.rb
@@ -471,4 +471,32 @@ RSpec.describe SearchService, feature_category: :global_search do
end
end
end
+
+ describe '.global_search_enabled_for_scope?' do
+ using RSpec::Parameterized::TableSyntax
+ let(:search) { 'foobar' }
+
+ where(:scope, :feature_flag, :enabled, :expected) do
+ 'blobs' | :global_search_code_tab | false | false
+ 'blobs' | :global_search_code_tab | true | true
+ 'commits' | :global_search_commits_tab | false | false
+ 'commits' | :global_search_commits_tab | true | true
+ 'issues' | :global_search_issues_tab | false | false
+ 'issues' | :global_search_issues_tab | true | true
+ 'merge_requests' | :global_search_merge_requests_tab | false | false
+ 'merge_requests' | :global_search_merge_requests_tab | true | true
+ 'wiki_blobs' | :global_search_wiki_tab | false | false
+ 'wiki_blobs' | :global_search_wiki_tab | true | true
+ 'users' | :global_search_users_tab | false | false
+ 'users' | :global_search_users_tab | true | true
+ 'random' | :random | nil | true
+ end
+
+ with_them do
+ it 'returns false when feature_flag is not enabled and returns true when feature_flag is enabled' do
+ stub_feature_flags(feature_flag => enabled)
+ expect(subject.global_search_enabled_for_scope?).to eq expected
+ end
+ end
+ end
end
diff --git a/spec/services/security/ci_configuration/dependency_scanning_create_service_spec.rb b/spec/services/security/ci_configuration/dependency_scanning_create_service_spec.rb
new file mode 100644
index 00000000000..719a2cf24e9
--- /dev/null
+++ b/spec/services/security/ci_configuration/dependency_scanning_create_service_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Security::CiConfiguration::DependencyScanningCreateService, :snowplow,
+ feature_category: :dependency_scanning do
+ subject(:result) { described_class.new(project, user).execute }
+
+ let(:branch_name) { 'set-dependency-scanning-config-1' }
+
+ let(:snowplow_event) do
+ {
+ category: 'Security::CiConfiguration::DependencyScanningCreateService',
+ action: 'create',
+ label: ''
+ }
+ end
+
+ include_examples 'services security ci configuration create service', true
+end
diff --git a/spec/services/security/ci_configuration/sast_create_service_spec.rb b/spec/services/security/ci_configuration/sast_create_service_spec.rb
index c7e732dc79a..1e6dc367146 100644
--- a/spec/services/security/ci_configuration/sast_create_service_spec.rb
+++ b/spec/services/security/ci_configuration/sast_create_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Security::CiConfiguration::SastCreateService, :snowplow do
+RSpec.describe Security::CiConfiguration::SastCreateService, :snowplow, feature_category: :sast do
subject(:result) { described_class.new(project, user, params).execute }
let(:branch_name) { 'set-sast-config-1' }
diff --git a/spec/services/service_ping/submit_service_ping_service_spec.rb b/spec/services/service_ping/submit_service_ping_service_spec.rb
index 37231307156..b02f1e84d25 100644
--- a/spec/services/service_ping/submit_service_ping_service_spec.rb
+++ b/spec/services/service_ping/submit_service_ping_service_spec.rb
@@ -305,13 +305,20 @@ RSpec.describe ServicePing::SubmitService do
stub_response(body: with_conv_index_params)
end
- let(:metric_double) { instance_double(Gitlab::Usage::ServicePing::LegacyMetricTimingDecorator, duration: 123) }
+ let(:metric_double) do
+ instance_double(Gitlab::Usage::ServicePing::LegacyMetricMetadataDecorator, duration: 123, error: nil)
+ end
+
+ let(:metric_double_with_error) do
+ instance_double(Gitlab::Usage::ServicePing::LegacyMetricMetadataDecorator, duration: 123, error: 'Error')
+ end
+
let(:usage_data) do
{
uuid: 'uuid',
metric_a: metric_double,
metric_group: {
- metric_b: metric_double
+ metric_b: metric_double_with_error
},
metric_without_timing: "value",
recorded_at: Time.current
@@ -324,7 +331,7 @@ RSpec.describe ServicePing::SubmitService do
uuid: 'uuid',
metrics: [
{ name: 'metric_a', time_elapsed: 123 },
- { name: 'metric_group.metric_b', time_elapsed: 123 }
+ { name: 'metric_group.metric_b', time_elapsed: 123, error: 'Error' }
]
}
}
diff --git a/spec/services/service_response_spec.rb b/spec/services/service_response_spec.rb
index 2d70979dd3a..58dd2fd4c5e 100644
--- a/spec/services/service_response_spec.rb
+++ b/spec/services/service_response_spec.rb
@@ -178,4 +178,40 @@ RSpec.describe ServiceResponse do
end
end
end
+
+ describe '#log_and_raise_exception' do
+ context 'when successful' do
+ let(:response) { described_class.success }
+
+ it 'returns self' do
+ expect(response.log_and_raise_exception).to be response
+ end
+ end
+
+ context 'when an error' do
+ let(:response) { described_class.error(message: 'bang') }
+
+ it 'logs' do
+ expect(::Gitlab::ErrorTracking).to receive(:log_and_raise_exception)
+ .with(StandardError.new('bang'), {})
+
+ response.log_and_raise_exception
+ end
+
+ it 'allows specification of error class' do
+ error = Class.new(StandardError)
+ expect(::Gitlab::ErrorTracking).to receive(:log_and_raise_exception)
+ .with(error.new('bang'), {})
+
+ response.log_and_raise_exception(as: error)
+ end
+
+ it 'allows extra data for tracking' do
+ expect(::Gitlab::ErrorTracking).to receive(:log_and_raise_exception)
+ .with(StandardError.new('bang'), { foo: 1, bar: 2 })
+
+ response.log_and_raise_exception(foo: 1, bar: 2)
+ end
+ end
+ end
end
diff --git a/spec/services/test_hooks/project_service_spec.rb b/spec/services/test_hooks/project_service_spec.rb
index d97a6f15270..13f863dbbdb 100644
--- a/spec/services/test_hooks/project_service_spec.rb
+++ b/spec/services/test_hooks/project_service_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe TestHooks::ProjectService do
context 'hook with not implemented test' do
it 'returns error message' do
expect(hook).not_to receive(:execute)
- expect(service.execute).to include({ status: :error, message: 'Testing not available for this hook' })
+ expect(service.execute).to have_attributes(status: :error, message: 'Testing not available for this hook')
end
end
@@ -60,7 +60,7 @@ RSpec.describe TestHooks::ProjectService do
it 'returns error message if not enough data' do
expect(hook).not_to receive(:execute)
- expect(service.execute).to include({ status: :error, message: 'Ensure the project has notes.' })
+ expect(service.execute).to have_attributes(status: :error, message: 'Ensure the project has notes.')
end
it 'executes hook' do
@@ -79,7 +79,7 @@ RSpec.describe TestHooks::ProjectService do
it 'returns error message if not enough data' do
expect(hook).not_to receive(:execute)
- expect(service.execute).to include({ status: :error, message: 'Ensure the project has issues.' })
+ expect(service.execute).to have_attributes(status: :error, message: 'Ensure the project has issues.')
end
it 'executes hook' do
@@ -112,7 +112,7 @@ RSpec.describe TestHooks::ProjectService do
it 'returns error message if not enough data' do
expect(hook).not_to receive(:execute)
- expect(service.execute).to include({ status: :error, message: 'Ensure the project has merge requests.' })
+ expect(service.execute).to have_attributes(status: :error, message: 'Ensure the project has merge requests.')
end
it 'executes hook' do
@@ -131,7 +131,7 @@ RSpec.describe TestHooks::ProjectService do
it 'returns error message if not enough data' do
expect(hook).not_to receive(:execute)
- expect(service.execute).to include({ status: :error, message: 'Ensure the project has CI jobs.' })
+ expect(service.execute).to have_attributes(status: :error, message: 'Ensure the project has CI jobs.')
end
it 'executes hook' do
@@ -150,7 +150,7 @@ RSpec.describe TestHooks::ProjectService do
it 'returns error message if not enough data' do
expect(hook).not_to receive(:execute)
- expect(service.execute).to include({ status: :error, message: 'Ensure the project has CI pipelines.' })
+ expect(service.execute).to have_attributes(status: :error, message: 'Ensure the project has CI pipelines.')
end
it 'executes hook' do
@@ -172,12 +172,12 @@ RSpec.describe TestHooks::ProjectService do
allow(project).to receive(:wiki_enabled?).and_return(false)
expect(hook).not_to receive(:execute)
- expect(service.execute).to include({ status: :error, message: 'Ensure the wiki is enabled and has pages.' })
+ expect(service.execute).to have_attributes(status: :error, message: 'Ensure the wiki is enabled and has pages.')
end
it 'returns error message if not enough data' do
expect(hook).not_to receive(:execute)
- expect(service.execute).to include({ status: :error, message: 'Ensure the wiki is enabled and has pages.' })
+ expect(service.execute).to have_attributes(status: :error, message: 'Ensure the wiki is enabled and has pages.')
end
it 'executes hook' do
@@ -196,7 +196,7 @@ RSpec.describe TestHooks::ProjectService do
it 'returns error message if not enough data' do
expect(hook).not_to receive(:execute)
- expect(service.execute).to include({ status: :error, message: 'Ensure the project has releases.' })
+ expect(service.execute).to have_attributes(status: :error, message: 'Ensure the project has releases.')
end
it 'executes hook' do
diff --git a/spec/services/test_hooks/system_service_spec.rb b/spec/services/test_hooks/system_service_spec.rb
index 66a1218d123..e94ea4669c6 100644
--- a/spec/services/test_hooks/system_service_spec.rb
+++ b/spec/services/test_hooks/system_service_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe TestHooks::SystemService do
it 'returns error message' do
expect(hook).not_to receive(:execute)
- expect(service.execute).to include({ status: :error, message: 'Testing not available for this hook' })
+ expect(service.execute).to have_attributes(status: :error, message: 'Testing not available for this hook')
end
end
@@ -70,7 +70,7 @@ RSpec.describe TestHooks::SystemService do
it 'returns error message if the user does not have any repository with a merge request' do
expect(hook).not_to receive(:execute)
- expect(service.execute).to include({ status: :error, message: 'Ensure one of your projects has merge requests.' })
+ expect(service.execute).to have_attributes(status: :error, message: 'Ensure one of your projects has merge requests.')
end
it 'executes hook' do
diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb
index c4ed34a693e..596ca9495ff 100644
--- a/spec/services/todo_service_spec.rb
+++ b/spec/services/todo_service_spec.rb
@@ -1259,92 +1259,85 @@ RSpec.describe TodoService do
end
end
- describe '#create_member_access_request' do
- context 'snowplow event tracking' do
- it 'does not track snowplow event when todos are for access request for project', :snowplow do
- user = create(:user)
- project = create(:project)
- requester = create(:project_member, project: project, user: assignee)
- project.add_owner(user)
-
- expect_no_snowplow_event
+ describe '#create_member_access_request_todos' do
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:project) { create(:project, :public, group: group) }
+
+ shared_examples 'member access request is raised' do
+ context 'when the source has more than 10 owners' do
+ it 'creates todos for 10 recently active source owners' do
+ users = create_list(:user, 12, :with_sign_ins)
+ users.each do |user|
+ source.add_owner(user)
+ end
+ ten_most_recently_active_source_owners = users.sort_by(&:last_sign_in_at).last(10)
+ excluded_source_owners = users - ten_most_recently_active_source_owners
- service.create_member_access_request(requester)
- end
- end
+ service.create_member_access_request_todos(requester1)
- context 'when the group has more than 10 owners' do
- it 'creates todos for 10 recently active group owners' do
- group = create(:group, :public)
+ ten_most_recently_active_source_owners.each do |owner|
+ expect(Todo.where(user: owner, target: source, action: Todo::MEMBER_ACCESS_REQUESTED, author: requester1.user).count).to eq 1
+ end
- users = create_list(:user, 12, :with_sign_ins)
- users.each do |user|
- group.add_owner(user)
+ excluded_source_owners.each do |owner|
+ expect(Todo.where(user: owner, target: source, action: Todo::MEMBER_ACCESS_REQUESTED, author: requester1.user).count).to eq 0
+ end
end
- ten_most_recently_active_group_owners = users.sort_by(&:last_sign_in_at).last(10)
- excluded_group_owners = users - ten_most_recently_active_group_owners
-
- requester = create(:group_member, group: group, user: assignee)
+ end
- service.create_member_access_request(requester)
+ context 'when total owners are less than 10' do
+ it 'creates todos for all source owners' do
+ users = create_list(:user, 4, :with_sign_ins)
+ users.map do |user|
+ source.add_owner(user)
+ end
- ten_most_recently_active_group_owners.each do |owner|
- expect(Todo.where(user: owner, target: group, action: Todo::MEMBER_ACCESS_REQUESTED, author: assignee).count).to eq 1
- end
+ service.create_member_access_request_todos(requester1)
- excluded_group_owners.each do |owner|
- expect(Todo.where(user: owner, target: group, action: Todo::MEMBER_ACCESS_REQUESTED, author: assignee).count).to eq 0
+ users.each do |owner|
+ expect(Todo.where(user: owner, target: source, action: Todo::MEMBER_ACCESS_REQUESTED, author: requester1.user).count).to eq 1
+ end
end
end
- end
-
- context 'when total owners are less than 10' do
- it 'creates todos for all group owners' do
- group = create(:group, :public)
- users = create_list(:user, 4, :with_sign_ins)
- users.map do |user|
- group.add_owner(user)
- end
+ context 'when multiple access requests are raised' do
+ it 'creates todos for 10 recently active source owners for multiple requests' do
+ users = create_list(:user, 12, :with_sign_ins)
+ users.each do |user|
+ source.add_owner(user)
+ end
+ ten_most_recently_active_source_owners = users.sort_by(&:last_sign_in_at).last(10)
+ excluded_source_owners = users - ten_most_recently_active_source_owners
- requester = create(:group_member, user: assignee, group: group)
- requester.requested_at = Time.now.utc
- requester.save!
+ service.create_member_access_request_todos(requester1)
+ service.create_member_access_request_todos(requester2)
- service.create_member_access_request(requester)
+ ten_most_recently_active_source_owners.each do |owner|
+ expect(Todo.where(user: owner, target: source, action: Todo::MEMBER_ACCESS_REQUESTED, author: requester1.user).count).to eq 1
+ expect(Todo.where(user: owner, target: source, action: Todo::MEMBER_ACCESS_REQUESTED, author: requester2.user).count).to eq 1
+ end
- users.each do |owner|
- expect(Todo.where(user: owner, target: group, action: Todo::MEMBER_ACCESS_REQUESTED, author: assignee).count).to eq 1
+ excluded_source_owners.each do |owner|
+ expect(Todo.where(user: owner, target: source, action: Todo::MEMBER_ACCESS_REQUESTED, author: requester1.user).count).to eq 0
+ expect(Todo.where(user: owner, target: source, action: Todo::MEMBER_ACCESS_REQUESTED, author: requester2.user).count).to eq 0
+ end
end
end
end
- context 'when multiple access requests are raised' do
- it 'creates todos for 10 recently active group owners for multiple requests' do
- group = create(:group, :public)
-
- users = create_list(:user, 12, :with_sign_ins)
- users.each do |user|
- group.add_owner(user)
- end
- ten_most_recently_active_group_owners = users.sort_by(&:last_sign_in_at).last(10)
- excluded_group_owners = users - ten_most_recently_active_group_owners
-
- requester1 = create(:group_member, group: group, user: assignee)
- requester2 = create(:group_member, group: group, user: non_member)
-
- service.create_member_access_request(requester1)
- service.create_member_access_request(requester2)
-
- ten_most_recently_active_group_owners.each do |owner|
- expect(Todo.where(user: owner, target: group, action: Todo::MEMBER_ACCESS_REQUESTED, author: assignee).count).to eq 1
- expect(Todo.where(user: owner, target: group, action: Todo::MEMBER_ACCESS_REQUESTED, author: non_member).count).to eq 1
- end
+ context 'when request is raised for group' do
+ it_behaves_like 'member access request is raised' do
+ let_it_be(:source) { create(:group, :public) }
+ let_it_be(:requester1) { create(:group_member, :access_request, group: source, user: assignee) }
+ let_it_be(:requester2) { create(:group_member, :access_request, group: source, user: non_member) }
+ end
+ end
- excluded_group_owners.each do |owner|
- expect(Todo.where(user: owner, target: group, action: Todo::MEMBER_ACCESS_REQUESTED, author: assignee).count).to eq 0
- expect(Todo.where(user: owner, target: group, action: Todo::MEMBER_ACCESS_REQUESTED, author: non_member).count).to eq 0
- end
+ context 'when request is raised for project' do
+ it_behaves_like 'member access request is raised' do
+ let_it_be(:source) { create(:project, :public) }
+ let_it_be(:requester1) { create(:project_member, :access_request, project: source, user: assignee) }
+ let_it_be(:requester2) { create(:project_member, :access_request, project: source, user: non_member) }
end
end
end
diff --git a/spec/services/users/block_service_spec.rb b/spec/services/users/block_service_spec.rb
index 45a5b1e5100..7ff9a887f38 100644
--- a/spec/services/users/block_service_spec.rb
+++ b/spec/services/users/block_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Users::BlockService do
- let(:current_user) { create(:admin) }
+ let_it_be(:current_user) { create(:admin) }
subject(:service) { described_class.new(current_user) }
@@ -18,6 +18,15 @@ RSpec.describe Users::BlockService do
it "change the user's state" do
expect { operation }.to change { user.state }.to('blocked')
end
+
+ it 'saves a custom attribute', :aggregate_failures, :freeze_time, feature_category: :insider_threat do
+ operation
+
+ custom_attribute = user.custom_attributes.last
+
+ expect(custom_attribute.key).to eq(UserCustomAttribute::BLOCKED_BY)
+ expect(custom_attribute.value).to eq("#{current_user.username}/#{current_user.id}+#{Time.current}")
+ end
end
context 'when failed' do
diff --git a/spec/services/users/signup_service_spec.rb b/spec/services/users/signup_service_spec.rb
index 7169401ab34..ef532e01d0b 100644
--- a/spec/services/users/signup_service_spec.rb
+++ b/spec/services/users/signup_service_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Users::SignupService do
it 'updates the name attribute' do
result = update_user(user, name: 'New Name')
- expect(result).to eq(status: :success)
+ expect(result.success?).to be(true)
expect(user.reload.name).to eq('New Name')
end
@@ -18,8 +18,8 @@ RSpec.describe Users::SignupService do
result = update_user(user, name: '')
expect(user.reload.name).not_to be_blank
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to include("Name can't be blank")
+ expect(result.success?).to be(false)
+ expect(result.message).to include("Name can't be blank")
end
end
@@ -27,7 +27,7 @@ RSpec.describe Users::SignupService do
it 'updates the role attribute' do
result = update_user(user, role: 'development_team_lead')
- expect(result).to eq(status: :success)
+ expect(result.success?).to be(true)
expect(user.reload.role).to eq('development_team_lead')
end
@@ -35,8 +35,8 @@ RSpec.describe Users::SignupService do
result = update_user(user, role: '')
expect(user.reload.role).not_to be_blank
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq("Role can't be blank")
+ expect(result.success?).to be(false)
+ expect(result.message).to eq("Role can't be blank")
end
end
@@ -44,7 +44,7 @@ RSpec.describe Users::SignupService do
it 'updates the setup_for_company attribute' do
result = update_user(user, setup_for_company: 'false')
- expect(result).to eq(status: :success)
+ expect(result.success?).to be(true)
expect(user.reload.setup_for_company).to be(false)
end
@@ -57,8 +57,8 @@ RSpec.describe Users::SignupService do
result = update_user(user, setup_for_company: '')
expect(user.reload.setup_for_company).not_to be_blank
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq("Setup for company can't be blank")
+ expect(result.success?).to be(false)
+ expect(result.message).to eq("Setup for company can't be blank")
end
end
@@ -66,7 +66,7 @@ RSpec.describe Users::SignupService do
it 'returns success when setup_for_company is blank' do
result = update_user(user, setup_for_company: '')
- expect(result).to eq(status: :success)
+ expect(result.success?).to be(true)
expect(user.reload.setup_for_company).to be(nil)
end
end
diff --git a/spec/services/users/unblock_service_spec.rb b/spec/services/users/unblock_service_spec.rb
new file mode 100644
index 00000000000..25ee99427ab
--- /dev/null
+++ b/spec/services/users/unblock_service_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::UnblockService do
+ let_it_be(:current_user) { create(:admin) }
+
+ subject(:service) { described_class.new(current_user) }
+
+ describe '#execute' do
+ subject(:operation) { service.execute(user) }
+
+ context 'when successful' do
+ let(:user) { create(:user, :blocked) }
+
+ it { expect(operation.success?).to eq(true) }
+
+ it "change the user's state" do
+ expect { operation }.to change { user.active? }.to(true)
+ end
+
+ it 'saves a custom attribute', :aggregate_failures, :freeze_time, feature_category: :insider_threat do
+ operation
+
+ custom_attribute = user.custom_attributes.last
+
+ expect(custom_attribute.key).to eq(UserCustomAttribute::UNBLOCKED_BY)
+ expect(custom_attribute.value).to eq("#{current_user.username}/#{current_user.id}+#{Time.current}")
+ end
+ end
+
+ context 'when failed' do
+ let(:user) { create(:user) }
+
+ it 'returns error result', :aggregate_failures do
+ expect(operation.error?).to eq(true)
+ expect(operation[:message]).to include(/State cannot transition/)
+ end
+
+ it "does not change the user's state" do
+ expect { operation }.not_to change { user.state }
+ end
+ end
+ end
+end
diff --git a/spec/services/work_items/create_service_spec.rb b/spec/services/work_items/create_service_spec.rb
index a952486ee64..049c90f20b0 100644
--- a/spec/services/work_items/create_service_spec.rb
+++ b/spec/services/work_items/create_service_spec.rb
@@ -193,7 +193,7 @@ RSpec.describe WorkItems::CreateService do
end
it_behaves_like 'fails creating work item and returns errors' do
- let(:error_message) { 'No matching task found. Make sure that you are adding a valid task ID.' }
+ let(:error_message) { 'No matching work item found. Make sure that you are adding a valid work item ID.' }
end
end
end
diff --git a/spec/services/work_items/parent_links/create_service_spec.rb b/spec/services/work_items/parent_links/create_service_spec.rb
index 2f2e830845a..5884847eac3 100644
--- a/spec/services/work_items/parent_links/create_service_spec.rb
+++ b/spec/services/work_items/parent_links/create_service_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe WorkItems::ParentLinks::CreateService, feature_category: :portfol
shared_examples 'returns not found error' do
it 'returns error' do
- error = "No matching #{issuable_type} found. Make sure that you are adding a valid #{issuable_type} ID."
+ error = "No matching work item found. Make sure that you are adding a valid work item ID."
is_expected.to eq(service_error(error))
end
diff --git a/spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb b/spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb
index 5a5bb8a1674..6285b43311d 100644
--- a/spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb
+++ b/spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService, feature_cate
let_it_be(:existing_link) { create(:parent_link, work_item: child_work_item, work_item_parent: work_item) }
let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::Hierarchy) } }
- let(:not_found_error) { 'No matching task found. Make sure that you are adding a valid task ID.' }
+ let(:not_found_error) { 'No matching work item found. Make sure that you are adding a valid work item ID.' }
shared_examples 'raises a WidgetError' do
it { expect { subject }.to raise_error(described_class::WidgetError, message) }
@@ -70,7 +70,7 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService, feature_cate
let(:params) { { children: [child_work_item] } }
it_behaves_like 'raises a WidgetError' do
- let(:message) { 'Task(s) already assigned' }
+ let(:message) { 'Work item(s) already assigned' }
end
end