Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorDouglas Barbosa Alexandre <dbalexandre@gmail.com>2019-09-10 00:22:40 +0300
committerDouglas Barbosa Alexandre <dbalexandre@gmail.com>2019-09-10 00:22:40 +0300
commit58423fa8757726b4840d3b8db334212ed7c705a4 (patch)
tree7696ba5f1d40c6b9ecad50e6da4b45275cf36d4f /spec
parented1192c511057230dfd90f8b873b7813a48ecd6b (diff)
parent814d12b8c72b0e8a4f9025ffb1373c11b36a061a (diff)
Merge remote-tracking branch 'origin/master' into camilstaps/gitlab-ce-new-66023-public-private-fork-counts
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/admin/clusters_controller_spec.rb19
-rw-r--r--spec/controllers/groups/clusters_controller_spec.rb19
-rw-r--r--spec/controllers/projects/clusters_controller_spec.rb23
-rw-r--r--spec/controllers/projects/services_controller_spec.rb130
-rw-r--r--spec/controllers/registrations_controller_spec.rb20
-rw-r--r--spec/factories/external_pull_requests.rb17
-rw-r--r--spec/factories/pages_domains.rb83
-rw-r--r--spec/features/admin/clusters/applications_spec.rb21
-rw-r--r--spec/features/admin/dashboard_spec.rb2
-rw-r--r--spec/features/clusters/installing_applications_shared_examples.rb228
-rw-r--r--spec/features/groups/clusters/applications_spec.rb23
-rw-r--r--spec/features/merge_request/user_posts_notes_spec.rb11
-rw-r--r--spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb2
-rw-r--r--spec/features/projects/clusters/applications_spec.rb229
-rw-r--r--spec/features/projects/clusters/gcp_spec.rb3
-rw-r--r--spec/features/projects/clusters_spec.rb24
-rw-r--r--spec/finders/issues_finder_spec.rb50
-rw-r--r--spec/finders/merge_requests_finder_spec.rb20
-rw-r--r--spec/fixtures/api/schemas/entities/merge_request_noteable.json4
-rw-r--r--spec/fixtures/api/schemas/statistics.json29
-rw-r--r--spec/frontend/admin/statistics_panel/components/app_spec.js73
-rw-r--r--spec/frontend/admin/statistics_panel/mock_data.js15
-rw-r--r--spec/frontend/admin/statistics_panel/store/actions_spec.js115
-rw-r--r--spec/frontend/admin/statistics_panel/store/getters_spec.js48
-rw-r--r--spec/frontend/admin/statistics_panel/store/mutations_spec.js41
-rw-r--r--spec/frontend/clusters/components/applications_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/gl_toggle_vuex_spec.js115
-rw-r--r--spec/javascripts/boards/board_new_issue_spec.js26
-rw-r--r--spec/javascripts/boards/mock_data.js2
-rw-r--r--spec/lib/gitlab/ci/build/policy/refs_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/build_spec.rb34
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb38
-rw-r--r--spec/lib/gitlab/danger/helper_spec.rb52
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml5
-rw-r--r--spec/lib/gitlab/import_export/attribute_configuration_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/attributes_finder_spec.rb230
-rw-r--r--spec/lib/gitlab/import_export/config_spec.rb284
-rw-r--r--spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb272
-rw-r--r--spec/lib/gitlab/import_export/model_configuration_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/project_tree_restorer_spec.rb18
-rw-r--r--spec/lib/gitlab/import_export/project_tree_saver_spec.rb53
-rw-r--r--spec/lib/gitlab/import_export/reader_spec.rb105
-rw-r--r--spec/lib/gitlab/import_export/relation_rename_service_spec.rb27
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml14
-rw-r--r--spec/lib/gitlab/pages_spec.rb29
-rw-r--r--spec/lib/gitlab/project_search_results_spec.rb6
-rw-r--r--spec/lib/gitlab/search_results_spec.rb17
-rw-r--r--spec/lib/gitlab/snippet_search_results_spec.rb4
-rw-r--r--spec/models/ci/pipeline_spec.rb20
-rw-r--r--spec/models/external_pull_request_spec.rb220
-rw-r--r--spec/models/pages_domain_spec.rb18
-rw-r--r--spec/models/project_spec.rb1
-rw-r--r--spec/models/user_spec.rb2
-rw-r--r--spec/requests/api/internal/pages_spec.rb54
-rw-r--r--spec/requests/api/settings_spec.rb38
-rw-r--r--spec/requests/api/statistics_spec.rb91
-rw-r--r--spec/requests/api/wikis_spec.rb13
-rw-r--r--spec/requests/projects/uploads_spec.rb38
-rw-r--r--spec/serializers/merge_request_serializer_spec.rb5
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb489
-rw-r--r--spec/services/external_pull_requests/create_pipeline_service_spec.rb72
-rw-r--r--spec/services/git/branch_push_service_spec.rb14
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb13
-rw-r--r--spec/support/helpers/search_helpers.rb4
-rw-r--r--spec/support/helpers/workhorse_helpers.rb31
-rw-r--r--spec/support/import_export/import_export.yml25
-rw-r--r--spec/validators/named_ecdsa_key_validator_spec.rb54
-rw-r--r--spec/workers/update_external_pull_requests_worker_spec.rb54
69 files changed, 3289 insertions, 573 deletions
diff --git a/spec/controllers/admin/clusters_controller_spec.rb b/spec/controllers/admin/clusters_controller_spec.rb
index e5501535875..afc059d7561 100644
--- a/spec/controllers/admin/clusters_controller_spec.rb
+++ b/spec/controllers/admin/clusters_controller_spec.rb
@@ -73,8 +73,8 @@ describe Admin::ClustersController do
end
describe 'GET #new' do
- def get_new
- get :new
+ def get_new(provider: 'gke')
+ get :new, params: { provider: provider }
end
describe 'functionality for new cluster' do
@@ -85,6 +85,7 @@ describe Admin::ClustersController do
end
before do
+ stub_feature_flags(create_eks_clusters: false)
allow(SecureRandom).to receive(:hex).and_return(key)
end
@@ -94,6 +95,20 @@ describe Admin::ClustersController do
expect(assigns(:authorize_url)).to include(key)
expect(session[session_key_for_redirect_uri]).to eq(new_admin_cluster_path)
end
+
+ context 'when create_eks_clusters feature flag is enabled' do
+ before do
+ stub_feature_flags(create_eks_clusters: true)
+ end
+
+ context 'when selected provider is gke and no valid gcp token exists' do
+ it 'redirects to gcp authorize_url' do
+ get_new
+
+ expect(response).to redirect_to(assigns(:authorize_url))
+ end
+ end
+ end
end
context 'when omniauth has not configured' do
diff --git a/spec/controllers/groups/clusters_controller_spec.rb b/spec/controllers/groups/clusters_controller_spec.rb
index 09677b42887..5a3ba51d4df 100644
--- a/spec/controllers/groups/clusters_controller_spec.rb
+++ b/spec/controllers/groups/clusters_controller_spec.rb
@@ -85,8 +85,8 @@ describe Groups::ClustersController do
end
describe 'GET new' do
- def go
- get :new, params: { group_id: group }
+ def go(provider: 'gke')
+ get :new, params: { group_id: group, provider: provider }
end
describe 'functionality for new cluster' do
@@ -97,6 +97,7 @@ describe Groups::ClustersController do
end
before do
+ stub_feature_flags(create_eks_clusters: false)
allow(SecureRandom).to receive(:hex).and_return(key)
end
@@ -106,6 +107,20 @@ describe Groups::ClustersController do
expect(assigns(:authorize_url)).to include(key)
expect(session[session_key_for_redirect_uri]).to eq(new_group_cluster_path(group))
end
+
+ context 'when create_eks_clusters feature flag is enabled' do
+ before do
+ stub_feature_flags(create_eks_clusters: true)
+ end
+
+ context 'when selected provider is gke and no valid gcp token exists' do
+ it 'redirects to gcp authorize_url' do
+ go
+
+ expect(response).to redirect_to(assigns(:authorize_url))
+ end
+ end
+ end
end
context 'when omniauth has not configured' do
diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb
index 35cbab57037..8ac72df5d20 100644
--- a/spec/controllers/projects/clusters_controller_spec.rb
+++ b/spec/controllers/projects/clusters_controller_spec.rb
@@ -79,8 +79,12 @@ describe Projects::ClustersController do
end
describe 'GET new' do
- def go
- get :new, params: { namespace_id: project.namespace, project_id: project }
+ def go(provider: 'gke')
+ get :new, params: {
+ namespace_id: project.namespace,
+ project_id: project,
+ provider: provider
+ }
end
describe 'functionality for new cluster' do
@@ -91,6 +95,7 @@ describe Projects::ClustersController do
end
before do
+ stub_feature_flags(create_eks_clusters: false)
allow(SecureRandom).to receive(:hex).and_return(key)
end
@@ -100,6 +105,20 @@ describe Projects::ClustersController do
expect(assigns(:authorize_url)).to include(key)
expect(session[session_key_for_redirect_uri]).to eq(new_project_cluster_path(project))
end
+
+ context 'when create_eks_clusters feature flag is enabled' do
+ before do
+ stub_feature_flags(create_eks_clusters: true)
+ end
+
+ context 'when selected provider is gke and no valid gcp token exists' do
+ it 'redirects to gcp authorize_url' do
+ go
+
+ expect(response).to redirect_to(assigns(:authorize_url))
+ end
+ end
+ end
end
context 'when omniauth has not configured' do
diff --git a/spec/controllers/projects/services_controller_spec.rb b/spec/controllers/projects/services_controller_spec.rb
index 180d997a8e8..0c074714bf3 100644
--- a/spec/controllers/projects/services_controller_spec.rb
+++ b/spec/controllers/projects/services_controller_spec.rb
@@ -19,9 +19,9 @@ describe Projects::ServicesController do
it 'renders 404' do
allow_any_instance_of(Service).to receive(:can_test?).and_return(false)
- put :test, params: { namespace_id: project.namespace, project_id: project, id: service.to_param }
+ put :test, params: project_params
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -29,11 +29,11 @@ describe Projects::ServicesController do
let(:service_params) { { active: 'true', url: '' } }
it 'returns error messages in JSON response' do
- put :test, params: { namespace_id: project.namespace, project_id: project, id: service.to_param, service: service_params }
+ put :test, params: project_params(service: service_params)
- expect(json_response['message']).to eq "Validations failed."
+ expect(json_response['message']).to eq 'Validations failed.'
expect(json_response['service_response']).to include "Url can't be blank"
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to be_successful
end
end
@@ -47,9 +47,9 @@ describe Projects::ServicesController do
it 'returns success' do
allow_any_instance_of(MicrosoftTeams::Notifier).to receive(:ping).and_return(true)
- put :test, params: { namespace_id: project.namespace, project_id: project, id: service.to_param }
+ put :test, params: project_params
- expect(response.status).to eq(200)
+ expect(response).to be_successful
end
end
@@ -57,11 +57,11 @@ describe Projects::ServicesController do
stub_request(:get, 'http://example.com/rest/api/2/serverInfo')
.to_return(status: 200, body: '{}')
- expect(Gitlab::HTTP).to receive(:get).with("/rest/api/2/serverInfo", any_args).and_call_original
+ expect(Gitlab::HTTP).to receive(:get).with('/rest/api/2/serverInfo', any_args).and_call_original
- put :test, params: { namespace_id: project.namespace, project_id: project, id: service.to_param, service: service_params }
+ put :test, params: project_params(service: service_params)
- expect(response.status).to eq(200)
+ expect(response).to be_successful
end
end
@@ -69,14 +69,23 @@ describe Projects::ServicesController do
stub_request(:get, 'http://example.com/rest/api/2/serverInfo')
.to_return(status: 200, body: '{}')
- expect(Gitlab::HTTP).to receive(:get).with("/rest/api/2/serverInfo", any_args).and_call_original
+ expect(Gitlab::HTTP).to receive(:get).with('/rest/api/2/serverInfo', any_args).and_call_original
- put :test, params: { namespace_id: project.namespace, project_id: project, id: service.to_param, service: service_params }
+ put :test, params: project_params(service: service_params)
- expect(response.status).to eq(200)
+ expect(response).to be_successful
end
context 'when service is configured for the first time' do
+ let(:service_params) do
+ {
+ 'active' => '1',
+ 'push_events' => '1',
+ 'token' => 'token',
+ 'project_url' => 'http://test.com'
+ }
+ end
+
before do
allow_any_instance_of(ServiceHook).to receive(:execute).and_return(true)
end
@@ -84,7 +93,7 @@ describe Projects::ServicesController do
it 'persist the object' do
do_put
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to be_successful
expect(json_response).to be_empty
expect(BuildkiteService.first).to be_present
end
@@ -92,18 +101,14 @@ describe Projects::ServicesController do
it 'creates the ServiceHook object' do
do_put
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to be_successful
expect(json_response).to be_empty
expect(BuildkiteService.first.service_hook).to be_present
end
def do_put
- put :test, params: {
- namespace_id: project.namespace,
- project_id: project,
- id: 'buildkite',
- service: { 'active' => '1', 'push_events' => '1', token: 'token', 'project_url' => 'http://test.com' }
- }
+ put :test, params: project_params(id: 'buildkite',
+ service: service_params)
end
end
end
@@ -113,9 +118,9 @@ describe Projects::ServicesController do
stub_request(:get, 'http://example.com/rest/api/2/serverInfo')
.to_return(status: 404)
- put :test, params: { namespace_id: project.namespace, project_id: project, id: service.to_param, service: service_params }
+ put :test, params: project_params(service: service_params)
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to be_successful
expect(json_response).to eq(
'error' => true,
'message' => 'Test failed.',
@@ -127,39 +132,70 @@ describe Projects::ServicesController do
end
describe 'PUT #update' do
- context 'when param `active` is set to true' do
- it 'activates the service and redirects to integrations paths' do
- put :update,
- params: { namespace_id: project.namespace, project_id: project, id: service.to_param, service: { active: true } }
+ describe 'as HTML' do
+ let(:service_params) { { active: true } }
- expect(response).to redirect_to(project_settings_integrations_path(project))
- expect(flash[:notice]).to eq 'Jira activated.'
+ before do
+ put :update, params: project_params(service: service_params)
+ end
+
+ context 'when param `active` is set to true' do
+ it 'activates the service and redirects to integrations paths' do
+ expect(response).to redirect_to(project_settings_integrations_path(project))
+ expect(flash[:notice]).to eq 'Jira activated.'
+ end
+ end
+
+ context 'when param `active` is set to false' do
+ let(:service_params) { { active: false } }
+
+ it 'does not activate the service but saves the settings' do
+ expect(flash[:notice]).to eq 'Jira settings saved, but not activated.'
+ end
end
- end
- context 'when param `active` is set to false' do
- it 'does not activate the service but saves the settings' do
- put :update,
- params: { namespace_id: project.namespace, project_id: project, id: service.to_param, service: { active: false } }
+ context 'when activating Jira service from a template' do
+ let(:service) do
+ create(:jira_service, project: project, template: true)
+ end
- expect(flash[:notice]).to eq 'Jira settings saved, but not activated.'
+ it 'activate Jira service from template' do
+ expect(flash[:notice]).to eq 'Jira activated.'
+ end
end
end
- context 'when activating Jira service from a template' do
- let(:template_service) { create(:jira_service, project: project, template: true) }
+ describe 'as JSON' do
+ before do
+ put :update, params: project_params(service: service_params, format: :json)
+ end
+
+ context 'when update succeeds' do
+ let(:service_params) { { url: 'http://example.com' } }
+
+ it 'returns JSON response with no errors' do
+ expect(response).to be_successful
+ expect(json_response).to include('active' => true, 'errors' => {})
+ end
+ end
- it 'activate Jira service from template' do
- put :update, params: { namespace_id: project.namespace, project_id: project, id: service.to_param, service: { active: true } }
+ context 'when update fails' do
+ let(:service_params) { { url: '' } }
- expect(flash[:notice]).to eq 'Jira activated.'
+ it 'returns JSON response with errors' do
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(json_response).to include(
+ 'active' => true,
+ 'errors' => { 'url' => ['must be a valid URL', %{can't be blank}] }
+ )
+ end
end
end
end
- describe "GET #edit" do
+ describe 'GET #edit' do
before do
- get :edit, params: { namespace_id: project.namespace, project_id: project, id: 'jira' }
+ get :edit, params: project_params(id: 'jira')
end
context 'with approved services' do
@@ -168,4 +204,14 @@ describe Projects::ServicesController do
end
end
end
+
+ private
+
+ def project_params(opts = {})
+ opts.reverse_merge(
+ namespace_id: project.namespace,
+ project_id: project,
+ id: service.to_param
+ )
+ end
end
diff --git a/spec/controllers/registrations_controller_spec.rb b/spec/controllers/registrations_controller_spec.rb
index 35487682462..5d87dbdee8b 100644
--- a/spec/controllers/registrations_controller_spec.rb
+++ b/spec/controllers/registrations_controller_spec.rb
@@ -74,17 +74,19 @@ describe RegistrationsController do
end
context 'when reCAPTCHA is enabled' do
- def fail_recaptcha
- # Without this, `verify_recaptcha` arbitrarily returns true in test env
- Recaptcha.configuration.skip_verify_env.delete('test')
- end
-
before do
stub_application_setting(recaptcha_enabled: true)
end
+ after do
+ # Avoid test ordering issue and ensure `verify_recaptcha` returns true
+ unless Recaptcha.configuration.skip_verify_env.include?('test')
+ Recaptcha.configuration.skip_verify_env << 'test'
+ end
+ end
+
it 'displays an error when the reCAPTCHA is not solved' do
- fail_recaptcha
+ allow_any_instance_of(described_class).to receive(:verify_recaptcha).and_return(false)
post(:create, params: user_params)
@@ -93,11 +95,6 @@ describe RegistrationsController do
end
it 'redirects to the dashboard when the recaptcha is solved' do
- # Avoid test ordering issue and ensure `verify_recaptcha` returns true
- unless Recaptcha.configuration.skip_verify_env.include?('test')
- Recaptcha.configuration.skip_verify_env << 'test'
- end
-
post(:create, params: user_params)
expect(flash[:notice]).to include 'Welcome! You have signed up successfully.'
@@ -105,7 +102,6 @@ describe RegistrationsController do
it 'does not require reCAPTCHA if disabled by feature flag' do
stub_feature_flags(registrations_recaptcha: false)
- fail_recaptcha
post(:create, params: user_params)
diff --git a/spec/factories/external_pull_requests.rb b/spec/factories/external_pull_requests.rb
new file mode 100644
index 00000000000..08d0fa4d419
--- /dev/null
+++ b/spec/factories/external_pull_requests.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :external_pull_request do
+ sequence(:pull_request_iid)
+ project
+ source_branch 'feature'
+ source_repository 'the-repository'
+ source_sha '97de212e80737a608d939f648d959671fb0a0142'
+ target_branch 'master'
+ target_repository 'the-repository'
+ target_sha 'a09386439ca39abe575675ffd4b89ae824fec22f'
+ status :open
+
+ trait(:closed) { status 'closed' }
+ end
+end
diff --git a/spec/factories/pages_domains.rb b/spec/factories/pages_domains.rb
index ee5be82cd19..ae3988bdd69 100644
--- a/spec/factories/pages_domains.rb
+++ b/spec/factories/pages_domains.rb
@@ -271,5 +271,88 @@ ZDXgrA==
auto_ssl_enabled { true }
certificate_source { :gitlab_provided }
end
+
+ trait :explicit_ecdsa do
+ certificate '-----BEGIN CERTIFICATE-----
+MIID1zCCAzkCCQDatOIwBlktwjAKBggqhkjOPQQDAjBPMQswCQYDVQQGEwJVUzEL
+MAkGA1UECAwCTlkxCzAJBgNVBAcMAk5ZMQswCQYDVQQLDAJJVDEZMBcGA1UEAwwQ
+dGVzdC1jZXJ0aWZpY2F0ZTAeFw0xOTA4MjkxMTE1NDBaFw0yMTA4MjgxMTE1NDBa
+ME8xCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJOWTELMAkGA1UEBwwCTlkxCzAJBgNV
+BAsMAklUMRkwFwYDVQQDDBB0ZXN0LWNlcnRpZmljYXRlMIICXDCCAc8GByqGSM49
+AgEwggHCAgEBME0GByqGSM49AQECQgH/////////////////////////////////
+/////////////////////////////////////////////////////zCBngRCAf//
+////////////////////////////////////////////////////////////////
+///////////////////8BEFRlT65YY4cmh+SmiGgtoVA7qLacluZsxXzuLSJkY7x
+CeFWGTlR7H6TexZSwL07sb8HNXPfiD0sNPHvRR/Ua1A/AAMVANCeiAApHLhTlsxn
+FzkyhKqg2mS6BIGFBADGhY4GtwQE6c2ePstmI5W0QpxkgTkFP7Uh+CivYGtNPbqh
+S1537+dZKP4dwSei/6jeM0izwYVqQpv5fn4xwuW9ZgEYOSlqeJo7wARcil+0LH0b
+2Zj1RElXm0RoF6+9Fyc+ZiyX7nKZXvQmQMVQuQE/rQdhNTxwhqJywkCIvpR2n9Fm
+UAJCAf//////////////////////////////////////////+lGGh4O/L5Zrf8wB
+SPcJpdA7tcm4iZxHrrtvtx6ROGQJAgEBA4GGAAQBVG/4c/hgl36toHj+eGL4pqv7
+l7M+ZKQJ4vz0Y9E6xIx+gvfVaZ58krmbBAP53ikwneQbFdcvw3L/ACPEib/qWjkB
+ogykguy3OwHtKLYNnDWIsfiLumEjElhcBMZVXiXhb5txf11uXAWn5n6Qhey5YKPM
+NjLLqDqaG19efCLCd21A0TcwCgYIKoZIzj0EAwIDgYsAMIGHAkEm68kYFVnN1c2N
+OjSJpIDdFWGVYJHyMDI5WgQyhm4hAioXJ0T22Zab8Wmq+hBYRJNcHoaV894blfqR
+V3ZJgam8EQJCAcnPpJQ0IqoT1pAQkaL3+Ka8ZaaCd6/8RnoDtGvWljisuyH65SRu
+kmYv87bZe1KqOZDoaDBdfVsoxcGbik19lBPV
+-----END CERTIFICATE-----'
+
+ key '-----BEGIN EC PARAMETERS-----
+MIIBwgIBATBNBgcqhkjOPQEBAkIB////////////////////////////////////
+//////////////////////////////////////////////////8wgZ4EQgH/////
+////////////////////////////////////////////////////////////////
+/////////////////ARBUZU+uWGOHJofkpohoLaFQO6i2nJbmbMV87i0iZGO8Qnh
+Vhk5Uex+k3sWUsC9O7G/BzVz34g9LDTx70Uf1GtQPwADFQDQnogAKRy4U5bMZxc5
+MoSqoNpkugSBhQQAxoWOBrcEBOnNnj7LZiOVtEKcZIE5BT+1Ifgor2BrTT26oUte
+d+/nWSj+HcEnov+o3jNIs8GFakKb+X5+McLlvWYBGDkpaniaO8AEXIpftCx9G9mY
+9URJV5tEaBevvRcnPmYsl+5ymV70JkDFULkBP60HYTU8cIaicsJAiL6Udp/RZlAC
+QgH///////////////////////////////////////////pRhoeDvy+Wa3/MAUj3
+CaXQO7XJuImcR667b7cekThkCQIBAQ==
+-----END EC PARAMETERS-----
+-----BEGIN EC PRIVATE KEY-----
+MIICnQIBAQRCAZZRG4FJO+OK29ygycrNzjxQDB+dp+QPo1Pk6RAl5PcraohyhFnI
+MGUL4ba1efZUxCbAWxjVRSi7QEUNYCCdUPAtoIIBxjCCAcICAQEwTQYHKoZIzj0B
+AQJCAf//////////////////////////////////////////////////////////
+////////////////////////////MIGeBEIB////////////////////////////
+//////////////////////////////////////////////////////////wEQVGV
+PrlhjhyaH5KaIaC2hUDuotpyW5mzFfO4tImRjvEJ4VYZOVHsfpN7FlLAvTuxvwc1
+c9+IPSw08e9FH9RrUD8AAxUA0J6IACkcuFOWzGcXOTKEqqDaZLoEgYUEAMaFjga3
+BATpzZ4+y2YjlbRCnGSBOQU/tSH4KK9ga009uqFLXnfv51ko/h3BJ6L/qN4zSLPB
+hWpCm/l+fjHC5b1mARg5KWp4mjvABFyKX7QsfRvZmPVESVebRGgXr70XJz5mLJfu
+cple9CZAxVC5AT+tB2E1PHCGonLCQIi+lHaf0WZQAkIB////////////////////
+///////////////////////6UYaHg78vlmt/zAFI9wml0Du1ybiJnEeuu2+3HpE4
+ZAkCAQGhgYkDgYYABAFUb/hz+GCXfq2geP54Yvimq/uXsz5kpAni/PRj0TrEjH6C
+99VpnnySuZsEA/neKTCd5BsV1y/Dcv8AI8SJv+paOQGiDKSC7Lc7Ae0otg2cNYix
++Iu6YSMSWFwExlVeJeFvm3F/XW5cBafmfpCF7Llgo8w2MsuoOpobX158IsJ3bUDR
+Nw==
+-----END EC PRIVATE KEY-----'
+ end
+
+ trait :ecdsa do
+ certificate '-----BEGIN CERTIFICATE-----
+MIIB8zCCAVUCCQCGKuPQ6SBxUTAKBggqhkjOPQQDAjA+MQswCQYDVQQGEwJVUzEL
+MAkGA1UECAwCVVMxCzAJBgNVBAcMAlVTMRUwEwYDVQQDDAxzaHVzaGxpbi5kZXYw
+HhcNMTkwOTAyMDkyMDUxWhcNMjEwOTAxMDkyMDUxWjA+MQswCQYDVQQGEwJVUzEL
+MAkGA1UECAwCVVMxCzAJBgNVBAcMAlVTMRUwEwYDVQQDDAxzaHVzaGxpbi5kZXYw
+gZswEAYHKoZIzj0CAQYFK4EEACMDgYYABAH9Jd7ZWnTasgltZRbIMreihycOh/G4
+TXpkp8tTtEsuD+sh8au3Jywsi89RSZ6vgVoCY7//DQ2vamYnyBZqbL+cTQBsQ7wD
+UEaSyP0R3P4b6Ox347pYzXwSdSOra9Cm4TMQe+prVMesxulqIm7G7CTI+9J8LHlJ
+z0wUDQz/o+tUSYwv6zAKBggqhkjOPQQDAgOBiwAwgYcCQUOlTnn2QP/uYSh1dUSl
+R9WYUg5+PQMg7kS+4K/5+5gonWCvaMcP+2P7hltUcvq41l3uMKKCZRU/x60/FMHc
+1ZXdAkIBuVtm9RJXziNOKS4TcpH9os/FuREW8YQlpec58LDZdlivcHnikHZ4LCri
+T7zu3VY6Rq+V/IKpsQwQjmoTJ0IpCM8=
+-----END CERTIFICATE-----'
+
+ key '-----BEGIN EC PARAMETERS-----
+BgUrgQQAIw==
+-----END EC PARAMETERS-----
+-----BEGIN EC PRIVATE KEY-----
+MIHbAgEBBEFa72+eREW25IHbke0TiWFdW1R1ad9Nyqaz7CDtv5Kqdgd6Kcl8V2az
+Lr6z1PS+JSERWzRP+fps7kdFRrtqy/ECpKAHBgUrgQQAI6GBiQOBhgAEAf0l3tla
+dNqyCW1lFsgyt6KHJw6H8bhNemSny1O0Sy4P6yHxq7cnLCyLz1FJnq+BWgJjv/8N
+Da9qZifIFmpsv5xNAGxDvANQRpLI/RHc/hvo7HfjuljNfBJ1I6tr0KbhMxB76mtU
+x6zG6WoibsbsJMj70nwseUnPTBQNDP+j61RJjC/r
+-----END EC PRIVATE KEY-----'
+ end
end
end
diff --git a/spec/features/admin/clusters/applications_spec.rb b/spec/features/admin/clusters/applications_spec.rb
new file mode 100644
index 00000000000..8310811b43d
--- /dev/null
+++ b/spec/features/admin/clusters/applications_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_relative '../../../../spec/features/clusters/installing_applications_shared_examples'
+
+describe 'Instance-level Cluster Applications', :js do
+ include GoogleApi::CloudPlatformHelpers
+
+ let(:user) { create(:admin) }
+
+ before do
+ sign_in(user)
+ end
+
+ describe 'Installing applications' do
+ include_examples "installing applications on a cluster" do
+ let(:cluster_path) { admin_cluster_path(cluster) }
+ let(:cluster_factory_args) { [:instance] }
+ end
+ end
+end
diff --git a/spec/features/admin/dashboard_spec.rb b/spec/features/admin/dashboard_spec.rb
index e204e0a515d..6cb345c5066 100644
--- a/spec/features/admin/dashboard_spec.rb
+++ b/spec/features/admin/dashboard_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe 'admin visits dashboard' do
+describe 'admin visits dashboard', :js do
include ProjectForksHelper
before do
diff --git a/spec/features/clusters/installing_applications_shared_examples.rb b/spec/features/clusters/installing_applications_shared_examples.rb
new file mode 100644
index 00000000000..cb8fd8c607c
--- /dev/null
+++ b/spec/features/clusters/installing_applications_shared_examples.rb
@@ -0,0 +1,228 @@
+# frozen_string_literal: true
+
+shared_examples "installing applications on a cluster" do
+ before do
+ visit cluster_path
+ end
+
+ context 'when cluster is being created' do
+ let(:cluster) { create(:cluster, :providing_by_gcp, *cluster_factory_args) }
+
+ it 'user is unable to install applications' do
+ expect(page).not_to have_text('Helm')
+ expect(page).not_to have_text('Install')
+ end
+ end
+
+ context 'when cluster is created' do
+ let(:cluster) { create(:cluster, :provided_by_gcp, *cluster_factory_args) }
+
+ it 'user can install applications' do
+ wait_for_requests
+
+ page.within('.js-cluster-application-row-helm') do
+ expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to be_nil
+ expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Install')
+ end
+ end
+
+ context 'when user installs Helm' do
+ before do
+ allow(ClusterInstallAppWorker).to receive(:perform_async)
+
+ page.within('.js-cluster-application-row-helm') do
+ page.find(:css, '.js-cluster-application-install-button').click
+ end
+
+ wait_for_requests
+ end
+
+ it 'shows the status transition' do
+ page.within('.js-cluster-application-row-helm') do
+ # FE sends request and gets the response, then the buttons is "Installing"
+ expect(page).to have_css('.js-cluster-application-install-button[disabled]', exact_text: 'Installing')
+
+ Clusters::Cluster.last.application_helm.make_installing!
+
+ # FE starts polling and update the buttons to "Installing"
+ expect(page).to have_css('.js-cluster-application-install-button[disabled]', exact_text: 'Installing')
+
+ Clusters::Cluster.last.application_helm.make_installed!
+
+ expect(page).not_to have_css('button', exact_text: 'Install', visible: :all)
+ expect(page).not_to have_css('button', exact_text: 'Installing', visible: :all)
+ expect(page).to have_css('.js-cluster-application-uninstall-button:not([disabled])', exact_text: 'Uninstall')
+ end
+
+ expect(page).to have_content('Helm Tiller was successfully installed on your Kubernetes cluster')
+ end
+ end
+
+ context 'when user installs Knative' do
+ before do
+ create(:clusters_applications_helm, :installed, cluster: cluster)
+ end
+
+ context 'on an abac cluster' do
+ let(:cluster) { create(:cluster, :provided_by_gcp, :rbac_disabled, *cluster_factory_args) }
+
+ it 'shows info block and not be installable' do
+ page.within('.js-cluster-application-row-knative') do
+ expect(page).to have_css('.rbac-notice')
+ expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true')
+ end
+ end
+ end
+
+ context 'on an rbac cluster' do
+ let(:cluster) { create(:cluster, :provided_by_gcp, *cluster_factory_args) }
+
+ it 'does not show callout block and be installable' do
+ page.within('.js-cluster-application-row-knative') do
+ expect(page).not_to have_css('p', text: 'You must have an RBAC-enabled cluster', visible: :all)
+ expect(page).to have_css('.js-cluster-application-install-button:not([disabled])')
+ end
+ end
+
+ describe 'when user clicks install button' do
+ def domainname_form_value
+ page.find('.js-knative-domainname').value
+ end
+
+ before do
+ allow(ClusterInstallAppWorker).to receive(:perform_async)
+ allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_in)
+ allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_async)
+
+ page.within('.js-cluster-application-row-knative') do
+ expect(page).to have_css('.js-cluster-application-install-button:not([disabled])')
+
+ page.find('.js-knative-domainname').set("domain.example.org")
+
+ click_button 'Install'
+
+ wait_for_requests
+
+ expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Installing')
+
+ Clusters::Cluster.last.application_knative.make_installing!
+ Clusters::Cluster.last.application_knative.make_installed!
+ Clusters::Cluster.last.application_knative.update_attribute(:external_ip, '127.0.0.1')
+ end
+ end
+
+ it 'shows status transition' do
+ page.within('.js-cluster-application-row-knative') do
+ expect(domainname_form_value).to eq('domain.example.org')
+ expect(page).to have_css('.js-cluster-application-uninstall-button', exact_text: 'Uninstall')
+ end
+
+ expect(page).to have_content('Knative was successfully installed on your Kubernetes cluster')
+ expect(page).to have_css('.js-knative-save-domain-button'), exact_text: 'Save changes'
+ end
+
+ it 'can then update the domain' do
+ page.within('.js-cluster-application-row-knative') do
+ expect(ClusterPatchAppWorker).to receive(:perform_async)
+
+ expect(domainname_form_value).to eq('domain.example.org')
+
+ page.find('.js-knative-domainname').set("new.domain.example.org")
+
+ click_button 'Save changes'
+
+ wait_for_requests
+
+ expect(domainname_form_value).to eq('new.domain.example.org')
+ end
+ end
+ end
+ end
+ end
+
+ context 'when user installs Cert Manager' do
+ before do
+ allow(ClusterInstallAppWorker).to receive(:perform_async)
+ allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_in)
+ allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_async)
+
+ create(:clusters_applications_helm, :installed, cluster: cluster)
+
+ page.within('.js-cluster-application-row-cert_manager') do
+ click_button 'Install'
+ end
+ end
+
+ it 'shows status transition' do
+ def email_form_value
+ page.find('.js-email').value
+ end
+
+ page.within('.js-cluster-application-row-cert_manager') do
+ expect(email_form_value).to eq(cluster.user.email)
+ expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Installing')
+
+ page.find('.js-email').set("new_email@example.org")
+ Clusters::Cluster.last.application_cert_manager.make_installing!
+
+ expect(email_form_value).to eq('new_email@example.org')
+ expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Installing')
+
+ Clusters::Cluster.last.application_cert_manager.make_installed!
+
+ expect(email_form_value).to eq('new_email@example.org')
+ expect(page).to have_css('.js-cluster-application-uninstall-button', exact_text: 'Uninstall')
+ end
+
+ expect(page).to have_content('Cert-Manager was successfully installed on your Kubernetes cluster')
+ end
+ end
+
+ context 'when user installs Ingress' do
+ before do
+ allow(ClusterInstallAppWorker).to receive(:perform_async)
+ allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_in)
+ allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_async)
+
+ create(:clusters_applications_helm, :installed, cluster: cluster)
+
+ page.within('.js-cluster-application-row-ingress') do
+ expect(page).to have_css('.js-cluster-application-install-button:not([disabled])')
+ page.find(:css, '.js-cluster-application-install-button').click
+
+ wait_for_requests
+ end
+ end
+
+ it 'shows the status transition' do
+ page.within('.js-cluster-application-row-ingress') do
+ # FE sends request and gets the response, then the buttons is "Installing"
+ expect(page).to have_css('.js-cluster-application-install-button[disabled]', exact_text: 'Installing')
+
+ Clusters::Cluster.last.application_ingress.make_installing!
+
+ # FE starts polling and update the buttons to "Installing"
+ expect(page).to have_css('.js-cluster-application-install-button[disabled]', exact_text: 'Installing')
+
+ # The application becomes installed but we keep waiting for external IP address
+ Clusters::Cluster.last.application_ingress.make_installed!
+
+ expect(page).to have_css('.js-cluster-application-install-button[disabled]', exact_text: 'Installed')
+ expect(page).to have_selector('.js-no-endpoint-message')
+ expect(page).to have_selector('.js-ingress-ip-loading-icon')
+
+ # We receive the external IP address and display
+ Clusters::Cluster.last.application_ingress.update!(external_ip: '192.168.1.100')
+
+ expect(page).not_to have_css('button', exact_text: 'Install', visible: :all)
+ expect(page).not_to have_css('button', exact_text: 'Installing', visible: :all)
+ expect(page).to have_css('.js-cluster-application-uninstall-button:not([disabled])', exact_text: 'Uninstall')
+ expect(page).not_to have_css('p', text: 'The endpoint is in the process of being assigned', visible: :all)
+ expect(page.find('.js-endpoint').value).to eq('192.168.1.100')
+ end
+
+ expect(page).to have_content('Ingress was successfully installed on your Kubernetes cluster')
+ end
+ end
+ end
+end
diff --git a/spec/features/groups/clusters/applications_spec.rb b/spec/features/groups/clusters/applications_spec.rb
new file mode 100644
index 00000000000..5d48df234eb
--- /dev/null
+++ b/spec/features/groups/clusters/applications_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_relative '../../../../spec/features/clusters/installing_applications_shared_examples'
+
+describe 'Group-level Cluster Applications', :js do
+ include GoogleApi::CloudPlatformHelpers
+
+ let(:group) { create(:group) }
+ let(:user) { create(:user) }
+
+ before do
+ group.add_maintainer(user)
+ sign_in(user)
+ end
+
+ describe 'Installing applications' do
+ include_examples "installing applications on a cluster" do
+ let(:cluster_path) { group_cluster_path(group, cluster) }
+ let(:cluster_factory_args) { [:group, groups: [group]] }
+ end
+ end
+end
diff --git a/spec/features/merge_request/user_posts_notes_spec.rb b/spec/features/merge_request/user_posts_notes_spec.rb
index 435b3cd2555..7d89b8e97a6 100644
--- a/spec/features/merge_request/user_posts_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_notes_spec.rb
@@ -5,7 +5,8 @@ require 'spec_helper'
describe 'Merge request > User posts notes', :js do
include NoteInteractionHelpers
- let(:project) { create(:project, :repository) }
+ set(:project) { create(:project, :repository) }
+
let(:user) { project.creator }
let(:merge_request) do
create(:merge_request, source_project: project, target_project: project)
@@ -33,17 +34,21 @@ describe 'Merge request > User posts notes', :js do
end
describe 'with text' do
+ let(:text) { 'This is awesome' }
+
before do
page.within('.js-main-target-form') do
- fill_in 'note[note]', with: 'This is awesome'
+ fill_in 'note[note]', with: text
end
end
- it 'has enable submit button and preview button' do
+ it 'has enable submit button, preview button and saves content to local storage' do
page.within('.js-main-target-form') do
expect(page).not_to have_css('.js-comment-button[disabled]')
expect(page).to have_css('.js-md-preview-button', visible: true)
end
+
+ expect(page.evaluate_script("localStorage['autosave/Note/MergeRequest/#{merge_request.id}']")).to eq(text)
end
end
end
diff --git a/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb b/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb
index 6262f1ce055..c42eb8560a4 100644
--- a/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb
+++ b/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb
@@ -64,7 +64,7 @@ describe 'Merge request > User selects branches for new MR', :js do
click_button "Check out branch"
- expect(page).to have_content 'git checkout -b orphaned-branch origin/orphaned-branch'
+ expect(page).to have_content 'git checkout -b "orphaned-branch" "origin/orphaned-branch"'
end
it 'allows filtering multiple dropdowns' do
diff --git a/spec/features/projects/clusters/applications_spec.rb b/spec/features/projects/clusters/applications_spec.rb
index 3d15095e2da..ce971b158a3 100644
--- a/spec/features/projects/clusters/applications_spec.rb
+++ b/spec/features/projects/clusters/applications_spec.rb
@@ -1,8 +1,9 @@
# frozen_string_literal: true
require 'spec_helper'
+require_relative '../../../../spec/features/clusters/installing_applications_shared_examples'
-describe 'Clusters Applications', :js do
+describe 'Project-level Cluster Applications', :js do
include GoogleApi::CloudPlatformHelpers
let(:project) { create(:project) }
@@ -14,229 +15,9 @@ describe 'Clusters Applications', :js do
end
describe 'Installing applications' do
- before do
- visit project_cluster_path(project, cluster)
- end
-
- context 'when cluster is being created' do
- let(:cluster) { create(:cluster, :providing_by_gcp, projects: [project]) }
-
- it 'user is unable to install applications' do
- expect(page).not_to have_css('.js-cluster-application-row-helm')
- expect(page).not_to have_css('.js-cluster-application-install-button')
- end
- end
-
- context 'when cluster is created' do
- let(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
-
- it 'user can install applications' do
- wait_for_requests
-
- page.within('.js-cluster-application-row-helm') do
- expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to be_nil
- expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Install')
- end
- end
-
- context 'when user installs Helm' do
- before do
- allow(ClusterInstallAppWorker).to receive(:perform_async)
-
- page.within('.js-cluster-application-row-helm') do
- page.find(:css, '.js-cluster-application-install-button').click
- end
-
- wait_for_requests
- end
-
- it 'they see status transition' do
- page.within('.js-cluster-application-row-helm') do
- # FE sends request and gets the response, then the buttons is "Installing"
- expect(page).to have_css('.js-cluster-application-install-button[disabled]', exact_text: 'Installing')
-
- Clusters::Cluster.last.application_helm.make_installing!
-
- # FE starts polling and update the buttons to "Installing"
- expect(page).to have_css('.js-cluster-application-install-button[disabled]', exact_text: 'Installing')
-
- Clusters::Cluster.last.application_helm.make_installed!
-
- expect(page).not_to have_css('.js-cluster-application-install-button')
- expect(page).to have_css('.js-cluster-application-uninstall-button:not([disabled])', exact_text: 'Uninstall')
- end
-
- expect(page).to have_content('Helm Tiller was successfully installed on your Kubernetes cluster')
- end
- end
-
- context 'when user installs Knative' do
- before do
- create(:clusters_applications_helm, :installed, cluster: cluster)
- end
-
- context 'on an abac cluster' do
- let(:cluster) { create(:cluster, :provided_by_gcp, :rbac_disabled, projects: [project]) }
-
- it 'shows info block and not be installable' do
- page.within('.js-cluster-application-row-knative') do
- expect(page).to have_css('.rbac-notice')
- expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true')
- end
- end
- end
-
- context 'on an rbac cluster' do
- let(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
-
- it 'does not show callout block and be installable' do
- page.within('.js-cluster-application-row-knative') do
- expect(page).not_to have_css('.rbac-notice')
- expect(page).to have_css('.js-cluster-application-install-button:not([disabled])')
- end
- end
-
- describe 'when user clicks install button' do
- def domainname_form_value
- page.find('.js-knative-domainname').value
- end
-
- before do
- allow(ClusterInstallAppWorker).to receive(:perform_async)
- allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_in)
- allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_async)
-
- page.within('.js-cluster-application-row-knative') do
- expect(page).to have_css('.js-cluster-application-install-button:not([disabled])')
-
- page.find('.js-knative-domainname').set("domain.example.org")
-
- click_button 'Install'
-
- wait_for_requests
-
- expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Installing')
-
- Clusters::Cluster.last.application_knative.make_installing!
- Clusters::Cluster.last.application_knative.make_installed!
- Clusters::Cluster.last.application_knative.update_attribute(:external_ip, '127.0.0.1')
- end
- end
-
- it 'shows status transition' do
- page.within('.js-cluster-application-row-knative') do
- expect(domainname_form_value).to eq('domain.example.org')
- expect(page).to have_css('.js-cluster-application-uninstall-button', exact_text: 'Uninstall')
- end
-
- expect(page).to have_content('Knative was successfully installed on your Kubernetes cluster')
- expect(page).to have_css('.js-knative-save-domain-button'), exact_text: 'Save changes'
- end
-
- it 'can then update the domain' do
- page.within('.js-cluster-application-row-knative') do
- expect(ClusterPatchAppWorker).to receive(:perform_async)
-
- expect(domainname_form_value).to eq('domain.example.org')
-
- page.find('.js-knative-domainname').set("new.domain.example.org")
-
- click_button 'Save changes'
-
- wait_for_requests
-
- expect(domainname_form_value).to eq('new.domain.example.org')
- end
- end
- end
- end
- end
-
- context 'when user installs Cert Manager' do
- before do
- allow(ClusterInstallAppWorker).to receive(:perform_async)
- allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_in)
- allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_async)
-
- create(:clusters_applications_helm, :installed, cluster: cluster)
-
- page.within('.js-cluster-application-row-cert_manager') do
- click_button 'Install'
- end
- end
-
- it 'shows status transition' do
- def email_form_value
- page.find('.js-email').value
- end
-
- page.within('.js-cluster-application-row-cert_manager') do
- expect(email_form_value).to eq(cluster.user.email)
- expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Installing')
-
- page.find('.js-email').set("new_email@example.org")
- Clusters::Cluster.last.application_cert_manager.make_installing!
-
- expect(email_form_value).to eq('new_email@example.org')
- expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Installing')
-
- Clusters::Cluster.last.application_cert_manager.make_installed!
-
- expect(email_form_value).to eq('new_email@example.org')
- expect(page).to have_css('.js-cluster-application-uninstall-button', exact_text: 'Uninstall')
- end
-
- expect(page).to have_content('Cert-Manager was successfully installed on your Kubernetes cluster')
- end
- end
-
- context 'when user installs Ingress' do
- context 'when user installs application: Ingress' do
- before do
- allow(ClusterInstallAppWorker).to receive(:perform_async)
- allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_in)
- allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_async)
-
- create(:clusters_applications_helm, :installed, cluster: cluster)
-
- page.within('.js-cluster-application-row-ingress') do
- expect(page).to have_css('.js-cluster-application-install-button:not([disabled])')
- page.find(:css, '.js-cluster-application-install-button').click
-
- wait_for_requests
- end
- end
-
- it 'they see status transition' do
- page.within('.js-cluster-application-row-ingress') do
- # FE sends request and gets the response, then the buttons is "Installing"
- expect(page).to have_css('.js-cluster-application-install-button[disabled]', exact_text: 'Installing')
-
- Clusters::Cluster.last.application_ingress.make_installing!
-
- # FE starts polling and update the buttons to "Installing"
- expect(page).to have_css('.js-cluster-application-install-button[disabled]', exact_text: 'Installing')
-
- # The application becomes installed but we keep waiting for external IP address
- Clusters::Cluster.last.application_ingress.make_installed!
-
- expect(page).to have_css('.js-cluster-application-install-button[disabled]', exact_text: 'Installed')
- expect(page).to have_selector('.js-no-endpoint-message')
- expect(page).to have_selector('.js-ingress-ip-loading-icon')
-
- # We receive the external IP address and display
- Clusters::Cluster.last.application_ingress.update!(external_ip: '192.168.1.100')
-
- expect(page).not_to have_css('.js-cluster-application-install-button')
- expect(page).to have_css('.js-cluster-application-uninstall-button:not([disabled])', exact_text: 'Uninstall')
- expect(page).not_to have_selector('.js-no-endpoint-message')
- expect(page.find('.js-endpoint').value).to eq('192.168.1.100')
- end
-
- expect(page).to have_content('Ingress was successfully installed on your Kubernetes cluster')
- end
- end
- end
+ include_examples "installing applications on a cluster" do
+ let(:cluster_path) { project_cluster_path(project, cluster) }
+ let(:cluster_factory_args) { [projects: [project]] }
end
end
end
diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb
index 820ce48e52c..a11237db508 100644
--- a/spec/features/projects/clusters/gcp_spec.rb
+++ b/spec/features/projects/clusters/gcp_spec.rb
@@ -18,6 +18,8 @@ describe 'Gcp Cluster', :js do
let(:project_id) { 'test-project-1234' }
before do
+ stub_feature_flags(create_eks_clusters: false)
+
allow_any_instance_of(Projects::ClustersController)
.to receive(:token_in_session).and_return('token')
allow_any_instance_of(Projects::ClustersController)
@@ -147,6 +149,7 @@ describe 'Gcp Cluster', :js do
context 'when user has not signed with Google' do
before do
+ stub_feature_flags(create_eks_clusters: false)
visit project_clusters_path(project)
click_link 'Add Kubernetes cluster'
diff --git a/spec/features/projects/clusters_spec.rb b/spec/features/projects/clusters_spec.rb
index ce382c19fc1..d1cd19dff2d 100644
--- a/spec/features/projects/clusters_spec.rb
+++ b/spec/features/projects/clusters_spec.rb
@@ -51,6 +51,7 @@ describe 'Clusters', :js do
context 'when user has not signed in Google' do
before do
+ stub_feature_flags(create_eks_clusters: false)
visit project_clusters_path(project)
click_link 'Add Kubernetes cluster'
@@ -62,4 +63,27 @@ describe 'Clusters', :js do
expect(page).to have_link('Google account')
end
end
+
+ context 'when create_eks_clusters feature flag is enabled' do
+ before do
+ stub_feature_flags(create_eks_clusters: true)
+ end
+
+ context 'when user access create cluster page' do
+ before do
+ visit project_clusters_path(project)
+
+ click_link 'Add Kubernetes cluster'
+ click_link 'Create new Cluster on GKE'
+ end
+
+ it 'user sees a link to create a GKE cluster' do
+ expect(page).to have_link('Google GKE')
+ end
+
+ it 'user sees a link to create an EKS cluster' do
+ expect(page).to have_link('Amazon EKS')
+ end
+ end
+ end
end
diff --git a/spec/finders/issues_finder_spec.rb b/spec/finders/issues_finder_spec.rb
index 879ff01f294..ef8749be0be 100644
--- a/spec/finders/issues_finder_spec.rb
+++ b/spec/finders/issues_finder_spec.rb
@@ -42,6 +42,24 @@ describe IssuesFinder do
end
end
+ context 'filtering by projects' do
+ context 'when projects are passed in a list of ids' do
+ let(:params) { { projects: [project1.id] } }
+
+ it 'returns the issue belonging to the projects' do
+ expect(issues).to contain_exactly(issue1)
+ end
+ end
+
+ context 'when projects are passed in a subquery' do
+ let(:params) { { projects: Project.id_in(project1.id) } }
+
+ it 'returns the issue belonging to the projects' do
+ expect(issues).to contain_exactly(issue1)
+ end
+ end
+ end
+
context 'filtering by group_id' do
let(:params) { { group_id: group.id } }
@@ -49,6 +67,30 @@ describe IssuesFinder do
it 'returns all group issues' do
expect(issues).to contain_exactly(issue1)
end
+
+ context 'when projects outside the group are passed' do
+ let(:params) { { group_id: group.id, projects: [project2.id] } }
+
+ it 'returns no issues' do
+ expect(issues).to be_empty
+ end
+ end
+
+ context 'when projects of the group are passed' do
+ let(:params) { { group_id: group.id, projects: [project1.id] } }
+
+ it 'returns the issue within the group and projects' do
+ expect(issues).to contain_exactly(issue1)
+ end
+ end
+
+ context 'when projects of the group are passed as a subquery' do
+ let(:params) { { group_id: group.id, projects: Project.id_in(project1.id) } }
+
+ it 'returns the issue within the group and projects' do
+ expect(issues).to contain_exactly(issue1)
+ end
+ end
end
context 'when include_subgroup param is true' do
@@ -59,6 +101,14 @@ describe IssuesFinder do
it 'returns all group and subgroup issues' do
expect(issues).to contain_exactly(issue1, issue4)
end
+
+ context 'when mixed projects are passed' do
+ let(:params) { { group_id: group.id, projects: [project2.id, project3.id] } }
+
+ it 'returns the issue within the group and projects' do
+ expect(issues).to contain_exactly(issue4)
+ end
+ end
end
end
diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb
index 78224f0b9da..6c0bbeff4f4 100644
--- a/spec/finders/merge_requests_finder_spec.rb
+++ b/spec/finders/merge_requests_finder_spec.rb
@@ -13,7 +13,7 @@ describe MergeRequestsFinder do
expect(merge_requests).to contain_exactly(merge_request1, merge_request4, merge_request5)
end
- it 'filters by project' do
+ it 'filters by project_id' do
params = { project_id: project1.id, scope: 'authored', state: 'opened' }
merge_requests = described_class.new(user, params).execute
@@ -21,6 +21,14 @@ describe MergeRequestsFinder do
expect(merge_requests).to contain_exactly(merge_request1)
end
+ it 'filters by projects' do
+ params = { projects: [project2.id, project3.id] }
+
+ merge_requests = described_class.new(user, params).execute
+
+ expect(merge_requests).to contain_exactly(merge_request3, merge_request4)
+ end
+
it 'filters by commit sha' do
merge_requests = described_class.new(
user,
@@ -49,6 +57,16 @@ describe MergeRequestsFinder do
expect(merge_requests).to contain_exactly(merge_request1, merge_request2, merge_request5)
end
+
+ it 'filters by group projects including subgroups' do
+ # project3 is not in the group, so it should not return merge_request4
+ projects = [project3.id, project4.id]
+ params = { group_id: group.id, include_subgroups: true, projects: projects }
+
+ merge_requests = described_class.new(user, params).execute
+
+ expect(merge_requests).to contain_exactly(merge_request5)
+ end
end
it 'filters by non_archived' do
diff --git a/spec/fixtures/api/schemas/entities/merge_request_noteable.json b/spec/fixtures/api/schemas/entities/merge_request_noteable.json
index 88b0fecc24c..d37f5b864d7 100644
--- a/spec/fixtures/api/schemas/entities/merge_request_noteable.json
+++ b/spec/fixtures/api/schemas/entities/merge_request_noteable.json
@@ -1,6 +1,10 @@
{
"type": "object",
"properties" : {
+ "id": { "type": "integer" },
+ "iid": { "type": "integer" },
+ "title": { "type": "string" },
+ "description": { "type": "string" },
"merge_params": { "type": ["object", "null"] },
"state": { "type": "string" },
"source_branch": { "type": "string" },
diff --git a/spec/fixtures/api/schemas/statistics.json b/spec/fixtures/api/schemas/statistics.json
new file mode 100644
index 00000000000..ef2f39aad9d
--- /dev/null
+++ b/spec/fixtures/api/schemas/statistics.json
@@ -0,0 +1,29 @@
+{
+ "type": "object",
+ "required" : [
+ "forks",
+ "issues",
+ "merge_requests",
+ "notes",
+ "snippets",
+ "ssh_keys",
+ "milestones",
+ "users",
+ "projects",
+ "groups",
+ "active_users"
+ ],
+ "properties" : {
+ "forks": { "type": "string" },
+ "issues'": { "type": "string" },
+ "merge_requests'": { "type": "string" },
+ "notes'": { "type": "string" },
+ "snippets'": { "type": "string" },
+ "ssh_keys'": { "type": "string" },
+ "milestones'": { "type": "string" },
+ "users'": { "type": "string" },
+ "projects'": { "type": "string" },
+ "groups'": { "type": "string" },
+ "active_users'": { "type": "string" }
+ }
+}
diff --git a/spec/frontend/admin/statistics_panel/components/app_spec.js b/spec/frontend/admin/statistics_panel/components/app_spec.js
new file mode 100644
index 00000000000..25b1d432e2d
--- /dev/null
+++ b/spec/frontend/admin/statistics_panel/components/app_spec.js
@@ -0,0 +1,73 @@
+import Vuex from 'vuex';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import AxiosMockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+import StatisticsPanelApp from '~/admin/statistics_panel/components/app.vue';
+import statisticsLabels from '~/admin/statistics_panel/constants';
+import createStore from '~/admin/statistics_panel/store';
+import { GlLoadingIcon } from '@gitlab/ui';
+import mockStatistics from '../mock_data';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('Admin statistics app', () => {
+ let wrapper;
+ let store;
+ let axiosMock;
+
+ const createComponent = () => {
+ wrapper = shallowMount(StatisticsPanelApp, {
+ localVue,
+ store,
+ sync: false,
+ });
+ };
+
+ beforeEach(() => {
+ axiosMock = new AxiosMockAdapter(axios);
+ axiosMock.onGet(/api\/(.*)\/application\/statistics/).reply(200);
+ store = createStore();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findStats = idx => wrapper.findAll('.js-stats').at(idx);
+
+ describe('template', () => {
+ describe('when app is loading', () => {
+ it('renders a loading indicator', () => {
+ store.dispatch('requestStatistics');
+ createComponent();
+
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ });
+ });
+
+ describe('when app has finished loading', () => {
+ const statistics = convertObjectPropsToCamelCase(mockStatistics, { deep: true });
+
+ it.each`
+ statistic | count | index
+ ${'forks'} | ${12} | ${0}
+ ${'issues'} | ${180} | ${1}
+ ${'mergeRequests'} | ${31} | ${2}
+ ${'notes'} | ${986} | ${3}
+ ${'snippets'} | ${50} | ${4}
+ ${'sshKeys'} | ${10} | ${5}
+ ${'milestones'} | ${40} | ${6}
+ ${'activeUsers'} | ${50} | ${7}
+ `('renders the count for the $statistic statistic', ({ statistic, count, index }) => {
+ const label = statisticsLabels[statistic];
+ store.dispatch('receiveStatisticsSuccess', statistics);
+ createComponent();
+
+ expect(findStats(index).text()).toContain(label);
+ expect(findStats(index).text()).toContain(count);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/admin/statistics_panel/mock_data.js b/spec/frontend/admin/statistics_panel/mock_data.js
new file mode 100644
index 00000000000..6d861059dfd
--- /dev/null
+++ b/spec/frontend/admin/statistics_panel/mock_data.js
@@ -0,0 +1,15 @@
+const mockStatistics = {
+ forks: 12,
+ issues: 180,
+ merge_requests: 31,
+ notes: 986,
+ snippets: 50,
+ ssh_keys: 10,
+ milestones: 40,
+ users: 50,
+ projects: 29,
+ groups: 9,
+ active_users: 50,
+};
+
+export default mockStatistics;
diff --git a/spec/frontend/admin/statistics_panel/store/actions_spec.js b/spec/frontend/admin/statistics_panel/store/actions_spec.js
new file mode 100644
index 00000000000..9b18b1aebda
--- /dev/null
+++ b/spec/frontend/admin/statistics_panel/store/actions_spec.js
@@ -0,0 +1,115 @@
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
+import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+import * as actions from '~/admin/statistics_panel/store/actions';
+import * as types from '~/admin/statistics_panel/store/mutation_types';
+import getInitialState from '~/admin/statistics_panel/store/state';
+import mockStatistics from '../mock_data';
+
+describe('Admin statistics panel actions', () => {
+ let mock;
+ let state;
+
+ beforeEach(() => {
+ state = getInitialState();
+ mock = new MockAdapter(axios);
+ });
+
+ describe('fetchStatistics', () => {
+ describe('success', () => {
+ beforeEach(() => {
+ mock.onGet(/api\/(.*)\/application\/statistics/).replyOnce(200, mockStatistics);
+ });
+
+ it('dispatches success with received data', done =>
+ testAction(
+ actions.fetchStatistics,
+ null,
+ state,
+ [],
+ [
+ { type: 'requestStatistics' },
+ {
+ type: 'receiveStatisticsSuccess',
+ payload: expect.objectContaining(
+ convertObjectPropsToCamelCase(mockStatistics, { deep: true }),
+ ),
+ },
+ ],
+ done,
+ ));
+ });
+
+ describe('error', () => {
+ beforeEach(() => {
+ mock.onGet(/api\/(.*)\/application\/statistics/).replyOnce(500);
+ });
+
+ it('dispatches error', done =>
+ testAction(
+ actions.fetchStatistics,
+ null,
+ state,
+ [],
+ [
+ {
+ type: 'requestStatistics',
+ },
+ {
+ type: 'receiveStatisticsError',
+ payload: new Error('Request failed with status code 500'),
+ },
+ ],
+ done,
+ ));
+ });
+ });
+
+ describe('requestStatistic', () => {
+ it('should commit the request mutation', done =>
+ testAction(
+ actions.requestStatistics,
+ null,
+ state,
+ [{ type: types.REQUEST_STATISTICS }],
+ [],
+ done,
+ ));
+ });
+
+ describe('receiveStatisticsSuccess', () => {
+ it('should commit received data', done =>
+ testAction(
+ actions.receiveStatisticsSuccess,
+ mockStatistics,
+ state,
+ [
+ {
+ type: types.RECEIVE_STATISTICS_SUCCESS,
+ payload: mockStatistics,
+ },
+ ],
+ [],
+ done,
+ ));
+ });
+
+ describe('receiveStatisticsError', () => {
+ it('should commit error', done => {
+ testAction(
+ actions.receiveStatisticsError,
+ 500,
+ state,
+ [
+ {
+ type: types.RECEIVE_STATISTICS_ERROR,
+ payload: 500,
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/admin/statistics_panel/store/getters_spec.js b/spec/frontend/admin/statistics_panel/store/getters_spec.js
new file mode 100644
index 00000000000..152d82531ed
--- /dev/null
+++ b/spec/frontend/admin/statistics_panel/store/getters_spec.js
@@ -0,0 +1,48 @@
+import createState from '~/admin/statistics_panel/store/state';
+import * as getters from '~/admin/statistics_panel/store/getters';
+
+describe('Admin statistics panel getters', () => {
+ let state;
+
+ beforeEach(() => {
+ state = createState();
+ });
+
+ describe('getStatistics', () => {
+ describe('when statistics data exists', () => {
+ it('returns an array of statistics objects with key, label and value', () => {
+ state.statistics = { forks: 10, issues: 20 };
+
+ const statisticsLabels = {
+ forks: 'Forks',
+ issues: 'Issues',
+ };
+
+ const statisticsData = [
+ { key: 'forks', label: 'Forks', value: 10 },
+ { key: 'issues', label: 'Issues', value: 20 },
+ ];
+
+ expect(getters.getStatistics(state)(statisticsLabels)).toEqual(statisticsData);
+ });
+ });
+
+ describe('when no statistics data exists', () => {
+ it('returns an array of statistics objects with key, label and sets value to null', () => {
+ state.statistics = null;
+
+ const statisticsLabels = {
+ forks: 'Forks',
+ issues: 'Issues',
+ };
+
+ const statisticsData = [
+ { key: 'forks', label: 'Forks', value: null },
+ { key: 'issues', label: 'Issues', value: null },
+ ];
+
+ expect(getters.getStatistics(state)(statisticsLabels)).toEqual(statisticsData);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/admin/statistics_panel/store/mutations_spec.js b/spec/frontend/admin/statistics_panel/store/mutations_spec.js
new file mode 100644
index 00000000000..179f38d2bc5
--- /dev/null
+++ b/spec/frontend/admin/statistics_panel/store/mutations_spec.js
@@ -0,0 +1,41 @@
+import mutations from '~/admin/statistics_panel/store/mutations';
+import * as types from '~/admin/statistics_panel/store/mutation_types';
+import getInitialState from '~/admin/statistics_panel/store/state';
+import mockStatistics from '../mock_data';
+
+describe('Admin statistics panel mutations', () => {
+ let state;
+
+ beforeEach(() => {
+ state = getInitialState();
+ });
+
+ describe(`${types.REQUEST_STATISTICS}`, () => {
+ it('sets isLoading to true', () => {
+ mutations[types.REQUEST_STATISTICS](state);
+
+ expect(state.isLoading).toBe(true);
+ });
+ });
+
+ describe(`${types.RECEIVE_STATISTICS_SUCCESS}`, () => {
+ it('updates the store with the with statistics', () => {
+ mutations[types.RECEIVE_STATISTICS_SUCCESS](state, mockStatistics);
+
+ expect(state.isLoading).toBe(false);
+ expect(state.error).toBe(null);
+ expect(state.statistics).toEqual(mockStatistics);
+ });
+ });
+
+ describe(`${types.RECEIVE_STATISTICS_ERROR}`, () => {
+ it('sets error and clears data', () => {
+ const error = 500;
+ mutations[types.RECEIVE_STATISTICS_ERROR](state, error);
+
+ expect(state.isLoading).toBe(false);
+ expect(state.error).toBe(error);
+ expect(state.statistics).toEqual(null);
+ });
+ });
+});
diff --git a/spec/frontend/clusters/components/applications_spec.js b/spec/frontend/clusters/components/applications_spec.js
index 1d8984cea0a..fbcab078993 100644
--- a/spec/frontend/clusters/components/applications_spec.js
+++ b/spec/frontend/clusters/components/applications_spec.js
@@ -89,7 +89,7 @@ describe('Applications', () => {
});
it('renders a row for Knative', () => {
- expect(vm.$el.querySelector('.js-cluster-application-row-knative')).toBeNull();
+ expect(vm.$el.querySelector('.js-cluster-application-row-knative')).not.toBeNull();
});
});
@@ -126,7 +126,7 @@ describe('Applications', () => {
});
it('renders a row for Knative', () => {
- expect(vm.$el.querySelector('.js-cluster-application-row-knative')).toBeNull();
+ expect(vm.$el.querySelector('.js-cluster-application-row-knative')).not.toBeNull();
});
});
diff --git a/spec/frontend/vue_shared/components/gl_toggle_vuex_spec.js b/spec/frontend/vue_shared/components/gl_toggle_vuex_spec.js
new file mode 100644
index 00000000000..f076c45e56c
--- /dev/null
+++ b/spec/frontend/vue_shared/components/gl_toggle_vuex_spec.js
@@ -0,0 +1,115 @@
+import Vuex from 'vuex';
+import GlToggleVuex from '~/vue_shared/components/gl_toggle_vuex.vue';
+import { GlToggle } from '@gitlab/ui';
+import { mount, createLocalVue } from '@vue/test-utils';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('GlToggleVuex component', () => {
+ let wrapper;
+ let store;
+
+ const findButton = () => wrapper.find('button');
+
+ const createWrapper = (props = {}) => {
+ wrapper = mount(GlToggleVuex, {
+ localVue,
+ store,
+ propsData: {
+ stateProperty: 'toggleState',
+ ...props,
+ },
+ sync: false,
+ });
+ };
+
+ beforeEach(() => {
+ store = new Vuex.Store({
+ state: {
+ toggleState: false,
+ },
+ actions: {
+ setToggleState: ({ commit }, { key, value }) => commit('setToggleState', { key, value }),
+ },
+ mutations: {
+ setToggleState: (state, { key, value }) => {
+ state[key] = value;
+ },
+ },
+ });
+ createWrapper();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders gl-toggle', () => {
+ expect(wrapper.find(GlToggle).exists()).toBe(true);
+ });
+
+ it('properly computes default value for setAction', () => {
+ expect(wrapper.props('setAction')).toBe('setToggleState');
+ });
+
+ describe('without a store module', () => {
+ it('calls action with new value when value changes', () => {
+ jest.spyOn(store, 'dispatch');
+
+ findButton().trigger('click');
+ expect(store.dispatch).toHaveBeenCalledWith('setToggleState', {
+ key: 'toggleState',
+ value: true,
+ });
+ });
+
+ it('updates store property when value changes', () => {
+ findButton().trigger('click');
+ expect(store.state.toggleState).toBe(true);
+ });
+ });
+
+ describe('with a store module', () => {
+ beforeEach(() => {
+ store = new Vuex.Store({
+ modules: {
+ someModule: {
+ namespaced: true,
+ state: {
+ toggleState: false,
+ },
+ actions: {
+ setToggleState: ({ commit }, { key, value }) =>
+ commit('setToggleState', { key, value }),
+ },
+ mutations: {
+ setToggleState: (state, { key, value }) => {
+ state[key] = value;
+ },
+ },
+ },
+ },
+ });
+
+ createWrapper({
+ storeModule: 'someModule',
+ });
+ });
+
+ it('calls action with new value when value changes', () => {
+ jest.spyOn(store, 'dispatch');
+
+ findButton().trigger('click');
+ expect(store.dispatch).toHaveBeenCalledWith('someModule/setToggleState', {
+ key: 'toggleState',
+ value: true,
+ });
+ });
+
+ it('updates store property when value changes', () => {
+ findButton().trigger('click');
+ expect(store.state.someModule.toggleState).toBe(true);
+ });
+ });
+});
diff --git a/spec/javascripts/boards/board_new_issue_spec.js b/spec/javascripts/boards/board_new_issue_spec.js
index 721d0b8172d..76675a78db2 100644
--- a/spec/javascripts/boards/board_new_issue_spec.js
+++ b/spec/javascripts/boards/board_new_issue_spec.js
@@ -171,6 +171,32 @@ describe('Issue boards new issue form', () => {
.then(done)
.catch(done.fail);
});
+
+ it('sets detail weight after submit', done => {
+ boardsStore.weightFeatureAvailable = true;
+ vm.title = 'submit issue';
+
+ Vue.nextTick()
+ .then(submitIssue)
+ .then(() => {
+ expect(boardsStore.detail.list.weight).toBe(list.weight);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('does not set detail weight after submit', done => {
+ boardsStore.weightFeatureAvailable = false;
+ vm.title = 'submit issue';
+
+ Vue.nextTick()
+ .then(submitIssue)
+ .then(() => {
+ expect(boardsStore.detail.list.weight).toBe(list.weight);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
});
describe('submit error', () => {
diff --git a/spec/javascripts/boards/mock_data.js b/spec/javascripts/boards/mock_data.js
index ea22ae5c4e7..50ad1442873 100644
--- a/spec/javascripts/boards/mock_data.js
+++ b/spec/javascripts/boards/mock_data.js
@@ -12,6 +12,7 @@ export const listObj = {
position: 0,
title: 'Test',
list_type: 'label',
+ weight: 3,
label: {
id: 5000,
title: 'Testing',
@@ -26,6 +27,7 @@ export const listObjDuplicate = {
position: 1,
title: 'Test',
list_type: 'label',
+ weight: 3,
label: {
id: listObj.label.id,
title: 'Testing',
diff --git a/spec/lib/gitlab/ci/build/policy/refs_spec.rb b/spec/lib/gitlab/ci/build/policy/refs_spec.rb
index 43c5d3ec980..8fc1e0a4e88 100644
--- a/spec/lib/gitlab/ci/build/policy/refs_spec.rb
+++ b/spec/lib/gitlab/ci/build/policy/refs_spec.rb
@@ -84,6 +84,20 @@ describe Gitlab::Ci::Build::Policy::Refs do
.not_to be_satisfied_by(pipeline)
end
end
+
+ context 'when source is external_pull_request_event' do
+ let(:pipeline) { build_stubbed(:ci_pipeline, source: :external_pull_request_event) }
+
+ it 'is satisfied with only: external_pull_request' do
+ expect(described_class.new(%w[external_pull_requests]))
+ .to be_satisfied_by(pipeline)
+ end
+
+ it 'is not satisfied with only: external_pull_request_event' do
+ expect(described_class.new(%w[external_pull_request_events]))
+ .not_to be_satisfied_by(pipeline)
+ end
+ end
end
context 'when matching a ref by a regular expression' do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
index bf9ff922c05..ba4f841cf43 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
@@ -128,4 +128,38 @@ describe Gitlab::Ci::Pipeline::Chain::Build do
expect(pipeline.target_sha).to eq(merge_request.target_branch_sha)
end
end
+
+ context 'when pipeline is running for an external pull request' do
+ let(:command) do
+ Gitlab::Ci::Pipeline::Chain::Command.new(
+ source: :external_pull_request_event,
+ origin_ref: 'feature',
+ checkout_sha: project.commit.id,
+ after_sha: nil,
+ before_sha: nil,
+ source_sha: external_pull_request.source_sha,
+ target_sha: external_pull_request.target_sha,
+ trigger_request: nil,
+ schedule: nil,
+ external_pull_request: external_pull_request,
+ project: project,
+ current_user: user)
+ end
+
+ let(:external_pull_request) { build(:external_pull_request, project: project) }
+
+ before do
+ step.perform!
+ end
+
+ it 'correctly indicated that this is an external pull request pipeline' do
+ expect(pipeline).to be_external_pull_request_event
+ expect(pipeline.external_pull_request).to eq(external_pull_request)
+ end
+
+ it 'correctly sets source sha and target sha to pipeline' do
+ expect(pipeline.source_sha).to eq(external_pull_request.source_sha)
+ expect(pipeline.target_sha).to eq(external_pull_request.target_sha)
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index 89431b80be3..023d7530b4b 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -46,7 +46,7 @@ describe Gitlab::Ci::Pipeline::Seed::Build do
context 'is matched' do
let(:attributes) { { name: 'rspec', ref: 'master', rules: [{ if: '$VAR == null', when: 'delayed', start_in: '3 hours' }] } }
- it { is_expected.to include(when: 'delayed', start_in: '3 hours') }
+ it { is_expected.to include(when: 'delayed', options: { start_in: '3 hours' }) }
end
context 'is not matched' do
@@ -541,7 +541,7 @@ describe Gitlab::Ci::Pipeline::Seed::Build do
it { is_expected.to be_included }
it 'correctly populates when:' do
- expect(seed_build.attributes).to include(when: 'delayed', start_in: '1 day')
+ expect(seed_build.attributes).to include(when: 'delayed', options: { start_in: '1 day' })
end
end
end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index cf496b79a62..9d9a9ecda33 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -16,7 +16,10 @@ module Gitlab
let(:config) do
YAML.dump(
before_script: ['pwd'],
- rspec: { script: 'rspec' }
+ rspec: {
+ script: 'rspec',
+ interruptible: true
+ }
)
end
@@ -29,6 +32,7 @@ module Gitlab
before_script: ["pwd"],
script: ["rspec"]
},
+ interruptible: true,
allow_failure: false,
when: "on_success",
yaml_variables: []
@@ -36,6 +40,36 @@ module Gitlab
end
end
+ context 'with job rules' do
+ let(:config) do
+ YAML.dump(
+ rspec: {
+ script: 'rspec',
+ rules: [
+ { if: '$CI_COMMIT_REF_NAME == "master"' },
+ { changes: %w[README.md] }
+ ]
+ }
+ )
+ end
+
+ it 'returns valid build attributes' do
+ expect(subject).to eq({
+ stage: 'test',
+ stage_idx: 1,
+ name: 'rspec',
+ options: { script: ['rspec'] },
+ rules: [
+ { if: '$CI_COMMIT_REF_NAME == "master"' },
+ { changes: %w[README.md] }
+ ],
+ allow_failure: false,
+ when: 'on_success',
+ yaml_variables: []
+ })
+ end
+ end
+
describe 'coverage entry' do
describe 'code coverage regexp' do
let(:config) do
@@ -1252,7 +1286,7 @@ module Gitlab
end
end
- describe 'rules' do
+ context 'with when/rules conflict' do
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
let(:config) do
diff --git a/spec/lib/gitlab/danger/helper_spec.rb b/spec/lib/gitlab/danger/helper_spec.rb
index 710564b7540..1b4d366ce7b 100644
--- a/spec/lib/gitlab/danger/helper_spec.rb
+++ b/spec/lib/gitlab/danger/helper_spec.rb
@@ -11,16 +11,62 @@ describe Gitlab::Danger::Helper do
class FakeDanger
include Gitlab::Danger::Helper
- attr_reader :git
+ attr_reader :git, :gitlab
- def initialize(git:)
+ def initialize(git:, gitlab:)
@git = git
+ @gitlab = gitlab
end
end
let(:fake_git) { double('fake-git') }
- subject(:helper) { FakeDanger.new(git: fake_git) }
+ let(:mr_author) { nil }
+ let(:fake_gitlab) { double('fake-gitlab', mr_author: mr_author) }
+
+ subject(:helper) { FakeDanger.new(git: fake_git, gitlab: fake_gitlab) }
+
+ describe '#gitlab_helper' do
+ context 'when gitlab helper is not available' do
+ let(:fake_gitlab) { nil }
+
+ it 'returns nil' do
+ expect(helper.gitlab_helper).to be_nil
+ end
+ end
+
+ context 'when gitlab helper is available' do
+ it 'returns the gitlab helper' do
+ expect(helper.gitlab_helper).to eq(fake_gitlab)
+ end
+ end
+ end
+
+ describe '#release_automation?' do
+ context 'when gitlab helper is not available' do
+ it 'returns false' do
+ expect(helper.release_automation?).to be_falsey
+ end
+ end
+
+ context 'when gitlab helper is available' do
+ context "but the MR author isn't the RELEASE_TOOLS_BOT" do
+ let(:mr_author) { 'johnmarston' }
+
+ it 'returns false' do
+ expect(helper.release_automation?).to be_falsey
+ end
+ end
+
+ context 'and the MR author is the RELEASE_TOOLS_BOT' do
+ let(:mr_author) { described_class::RELEASE_TOOLS_BOT }
+
+ it 'returns true' do
+ expect(helper.release_automation?).to be_truthy
+ end
+ end
+ end
+ end
describe '#all_changed_files' do
subject { helper.all_changed_files }
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 47ba7eff8ed..dafa4243145 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -23,6 +23,7 @@ issues:
- epic_issue
- epic
- designs
+- design_versions
events:
- author
- project
@@ -126,6 +127,8 @@ merge_requests:
- blocks_as_blockee
- blocking_merge_requests
- blocked_merge_requests
+external_pull_requests:
+- project
merge_request_diff:
- merge_request
- merge_request_diff_commits
@@ -155,6 +158,7 @@ ci_pipelines:
- pipeline_schedule
- merge_requests_as_head_pipeline
- merge_request
+- external_pull_request
- deployments
- environments
- chat_data
@@ -402,6 +406,7 @@ project:
- merge_trains
- designs
- project_aliases
+- external_pull_requests
award_emoji:
- awardable
- user
diff --git a/spec/lib/gitlab/import_export/attribute_configuration_spec.rb b/spec/lib/gitlab/import_export/attribute_configuration_spec.rb
index fef84c87509..cc8ca1d87e3 100644
--- a/spec/lib/gitlab/import_export/attribute_configuration_spec.rb
+++ b/spec/lib/gitlab/import_export/attribute_configuration_spec.rb
@@ -12,7 +12,7 @@ describe 'Import/Export attribute configuration' do
let(:config_hash) { Gitlab::ImportExport::Config.new.to_h.deep_stringify_keys }
let(:relation_names) do
- names = names_from_tree(config_hash['project_tree'])
+ names = names_from_tree(config_hash.dig('tree', 'project'))
# Remove duplicated or add missing models
# - project is not part of the tree, so it has to be added manually.
diff --git a/spec/lib/gitlab/import_export/attributes_finder_spec.rb b/spec/lib/gitlab/import_export/attributes_finder_spec.rb
new file mode 100644
index 00000000000..3cbc1375d6e
--- /dev/null
+++ b/spec/lib/gitlab/import_export/attributes_finder_spec.rb
@@ -0,0 +1,230 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+describe Gitlab::ImportExport::AttributesFinder do
+ describe '#find_root' do
+ subject { described_class.new(config: config).find_root(model_key) }
+
+ let(:test_config) { 'spec/support/import_export/import_export.yml' }
+ let(:config) { Gitlab::ImportExport::Config.new.to_h }
+ let(:model_key) { :project }
+
+ let(:project_tree_hash) do
+ {
+ except: [:id, :created_at],
+ include: [
+ { issues: { include: [] } },
+ { labels: { include: [] } },
+ { merge_requests: {
+ except: [:iid],
+ include: [
+ { merge_request_diff: {
+ include: [],
+ preload: { source_project: nil }
+ } },
+ { merge_request_test: { include: [] } }
+ ],
+ only: [:id],
+ preload: {
+ merge_request_diff: { source_project: nil },
+ merge_request_test: nil
+ }
+ } },
+ { commit_statuses: {
+ include: [{ commit: { include: [] } }],
+ preload: { commit: nil }
+ } },
+ { project_members: {
+ include: [{ user: { include: [],
+ only: [:email] } }],
+ preload: { user: nil }
+ } }
+ ],
+ preload: {
+ commit_statuses: {
+ commit: nil
+ },
+ issues: nil,
+ labels: nil,
+ merge_requests: {
+ merge_request_diff: { source_project: nil },
+ merge_request_test: nil
+ },
+ project_members: {
+ user: nil
+ }
+ }
+ }
+ end
+
+ before do
+ allow_any_instance_of(Gitlab::ImportExport).to receive(:config_file).and_return(test_config)
+ end
+
+ it 'generates hash from project tree config' do
+ is_expected.to match(project_tree_hash)
+ end
+
+ context 'individual scenarios' do
+ it 'generates the correct hash for a single project relation' do
+ setup_yaml(tree: { project: [:issues] })
+
+ is_expected.to match(
+ include: [{ issues: { include: [] } }],
+ preload: { issues: nil }
+ )
+ end
+
+ it 'generates the correct hash for a single project feature relation' do
+ setup_yaml(tree: { project: [:project_feature] })
+
+ is_expected.to match(
+ include: [{ project_feature: { include: [] } }],
+ preload: { project_feature: nil }
+ )
+ end
+
+ it 'generates the correct hash for a multiple project relation' do
+ setup_yaml(tree: { project: [:issues, :snippets] })
+
+ is_expected.to match(
+ include: [{ issues: { include: [] } },
+ { snippets: { include: [] } }],
+ preload: { issues: nil, snippets: nil }
+ )
+ end
+
+ it 'generates the correct hash for a single sub-relation' do
+ setup_yaml(tree: { project: [issues: [:notes]] })
+
+ is_expected.to match(
+ include: [{ issues: { include: [{ notes: { include: [] } }],
+ preload: { notes: nil } } }],
+ preload: { issues: { notes: nil } }
+ )
+ end
+
+ it 'generates the correct hash for a multiple sub-relation' do
+ setup_yaml(tree: { project: [merge_requests: [:notes, :merge_request_diff]] })
+
+ is_expected.to match(
+ include: [{ merge_requests:
+ { include: [{ notes: { include: [] } },
+ { merge_request_diff: { include: [] } }],
+ preload: { merge_request_diff: nil, notes: nil } } }],
+ preload: { merge_requests: { merge_request_diff: nil, notes: nil } }
+ )
+ end
+
+ it 'generates the correct hash for a sub-relation with another sub-relation' do
+ setup_yaml(tree: { project: [merge_requests: [notes: [:author]]] })
+
+ is_expected.to match(
+ include: [{ merge_requests: {
+ include: [{ notes: { include: [{ author: { include: [] } }],
+ preload: { author: nil } } }],
+ preload: { notes: { author: nil } }
+ } }],
+ preload: { merge_requests: { notes: { author: nil } } }
+ )
+ end
+
+ it 'generates the correct hash for a relation with included attributes' do
+ setup_yaml(tree: { project: [:issues] },
+ included_attributes: { issues: [:name, :description] })
+
+ is_expected.to match(
+ include: [{ issues: { include: [],
+ only: [:name, :description] } }],
+ preload: { issues: nil }
+ )
+ end
+
+ it 'generates the correct hash for a relation with excluded attributes' do
+ setup_yaml(tree: { project: [:issues] },
+ excluded_attributes: { issues: [:name] })
+
+ is_expected.to match(
+ include: [{ issues: { except: [:name],
+ include: [] } }],
+ preload: { issues: nil }
+ )
+ end
+
+ it 'generates the correct hash for a relation with both excluded and included attributes' do
+ setup_yaml(tree: { project: [:issues] },
+ excluded_attributes: { issues: [:name] },
+ included_attributes: { issues: [:description] })
+
+ is_expected.to match(
+ include: [{ issues: { except: [:name],
+ include: [],
+ only: [:description] } }],
+ preload: { issues: nil }
+ )
+ end
+
+ it 'generates the correct hash for a relation with custom methods' do
+ setup_yaml(tree: { project: [:issues] },
+ methods: { issues: [:name] })
+
+ is_expected.to match(
+ include: [{ issues: { include: [],
+ methods: [:name] } }],
+ preload: { issues: nil }
+ )
+ end
+
+ def setup_yaml(hash)
+ allow(YAML).to receive(:load_file).with(test_config).and_return(hash)
+ end
+ end
+ end
+
+ describe '#find_relations_tree' do
+ subject { described_class.new(config: config).find_relations_tree(model_key) }
+
+ let(:tree) { { project: { issues: {} } } }
+ let(:model_key) { :project }
+
+ context 'when initialized with config including tree' do
+ let(:config) { { tree: tree } }
+
+ context 'when relation is in top-level keys of the tree' do
+ it { is_expected.to eq({ issues: {} }) }
+ end
+
+ context 'when the relation is not in top-level keys' do
+ let(:model_key) { :issues }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ context 'when tree is not present in config' do
+ let(:config) { {} }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#find_excluded_keys' do
+ subject { described_class.new(config: config).find_excluded_keys(klass_name) }
+
+ let(:klass_name) { 'project' }
+
+ context 'when initialized with excluded_attributes' do
+ let(:config) { { excluded_attributes: excluded_attributes } }
+ let(:excluded_attributes) { { project: [:name, :path], issues: [:milestone_id] } }
+
+ it { is_expected.to eq(%w[name path]) }
+ end
+
+ context 'when excluded_attributes are not present in config' do
+ let(:config) { {} }
+
+ it { is_expected.to eq([]) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/config_spec.rb b/spec/lib/gitlab/import_export/config_spec.rb
index cf396dba382..f09a29b84db 100644
--- a/spec/lib/gitlab/import_export/config_spec.rb
+++ b/spec/lib/gitlab/import_export/config_spec.rb
@@ -1,163 +1,177 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
+require 'rspec-parameterized'
describe Gitlab::ImportExport::Config do
let(:yaml_file) { described_class.new }
describe '#to_h' do
- context 'when using CE' do
- before do
- allow(yaml_file)
- .to receive(:merge?)
- .and_return(false)
+ subject { yaml_file.to_h }
+
+ context 'when using default config' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:ee) do
+ [true, false]
end
- it 'just returns the parsed Hash without the EE section' do
- expected = YAML.load_file(Gitlab::ImportExport.config_file)
- expected.delete('ee')
+ with_them do
+ before do
+ allow(Gitlab).to receive(:ee?) { ee }
+ end
- expect(yaml_file.to_h).to eq(expected)
+ it 'parses default config' do
+ expect { subject }.not_to raise_error
+ expect(subject).to be_a(Hash)
+ expect(subject.keys).to contain_exactly(
+ :tree, :excluded_attributes, :included_attributes, :methods, :preloads)
+ end
end
end
- context 'when using EE' do
- before do
- allow(yaml_file)
- .to receive(:merge?)
- .and_return(true)
- end
+ context 'when using custom config' do
+ let(:config) do
+ <<-EOF.strip_heredoc
+ tree:
+ project:
+ - labels:
+ - :priorities
+ - milestones:
+ - events:
+ - :push_event_payload
- it 'merges the EE project tree into the CE project tree' do
- allow(yaml_file)
- .to receive(:parse_yaml)
- .and_return({
- 'project_tree' => [
- {
- 'issues' => [
- :id,
- :title,
- { 'notes' => [:id, :note, { 'author' => [:name] }] }
- ]
- }
- ],
- 'ee' => {
- 'project_tree' => [
- {
- 'issues' => [
- :description,
- { 'notes' => [:date, { 'author' => [:email] }] }
- ]
- },
- { 'foo' => [{ 'bar' => %i[baz] }] }
- ]
- }
- })
+ included_attributes:
+ user:
+ - :id
- expect(yaml_file.to_h).to eq({
- 'project_tree' => [
- {
- 'issues' => [
- :id,
- :title,
- {
- 'notes' => [
- :id,
- :note,
- { 'author' => [:name, :email] },
- :date
- ]
- },
- :description
- ]
- },
- { 'foo' => [{ 'bar' => %i[baz] }] }
- ]
- })
+ excluded_attributes:
+ project:
+ - :name
+
+ methods:
+ labels:
+ - :type
+ events:
+ - :action
+
+ preloads:
+ statuses:
+ project:
+
+ ee:
+ tree:
+ project:
+ protected_branches:
+ - :unprotect_access_levels
+ included_attributes:
+ user:
+ - :name_ee
+ excluded_attributes:
+ project:
+ - :name_without_ee
+ methods:
+ labels:
+ - :type_ee
+ events_ee:
+ - :action_ee
+ preloads:
+ statuses:
+ bridge_ee:
+ EOF
end
- it 'merges the excluded attributes list' do
- allow(yaml_file)
- .to receive(:parse_yaml)
- .and_return({
- 'project_tree' => [],
- 'excluded_attributes' => {
- 'project' => %i[id title],
- 'notes' => %i[id]
- },
- 'ee' => {
- 'project_tree' => [],
- 'excluded_attributes' => {
- 'project' => %i[date],
- 'foo' => %i[bar baz]
- }
- }
- })
-
- expect(yaml_file.to_h).to eq({
- 'project_tree' => [],
- 'excluded_attributes' => {
- 'project' => %i[id title date],
- 'notes' => %i[id],
- 'foo' => %i[bar baz]
- }
- })
+ let(:config_hash) { YAML.safe_load(config, [Symbol]) }
+
+ before do
+ allow_any_instance_of(described_class).to receive(:parse_yaml) do
+ config_hash.deep_dup
+ end
end
- it 'merges the included attributes list' do
- allow(yaml_file)
- .to receive(:parse_yaml)
- .and_return({
- 'project_tree' => [],
- 'included_attributes' => {
- 'project' => %i[id title],
- 'notes' => %i[id]
- },
- 'ee' => {
- 'project_tree' => [],
- 'included_attributes' => {
- 'project' => %i[date],
- 'foo' => %i[bar baz]
+ context 'when using CE' do
+ before do
+ allow(Gitlab).to receive(:ee?) { false }
+ end
+
+ it 'just returns the normalized Hash' do
+ is_expected.to eq(
+ {
+ tree: {
+ project: {
+ labels: {
+ priorities: {}
+ },
+ milestones: {
+ events: {
+ push_event_payload: {}
+ }
+ }
+ }
+ },
+ included_attributes: {
+ user: [:id]
+ },
+ excluded_attributes: {
+ project: [:name]
+ },
+ methods: {
+ labels: [:type],
+ events: [:action]
+ },
+ preloads: {
+ statuses: {
+ project: nil
+ }
}
}
- })
-
- expect(yaml_file.to_h).to eq({
- 'project_tree' => [],
- 'included_attributes' => {
- 'project' => %i[id title date],
- 'notes' => %i[id],
- 'foo' => %i[bar baz]
- }
- })
+ )
+ end
end
- it 'merges the methods list' do
- allow(yaml_file)
- .to receive(:parse_yaml)
- .and_return({
- 'project_tree' => [],
- 'methods' => {
- 'project' => %i[id title],
- 'notes' => %i[id]
- },
- 'ee' => {
- 'project_tree' => [],
- 'methods' => {
- 'project' => %i[date],
- 'foo' => %i[bar baz]
+ context 'when using EE' do
+ before do
+ allow(Gitlab).to receive(:ee?) { true }
+ end
+
+ it 'just returns the normalized Hash' do
+ is_expected.to eq(
+ {
+ tree: {
+ project: {
+ labels: {
+ priorities: {}
+ },
+ milestones: {
+ events: {
+ push_event_payload: {}
+ }
+ },
+ protected_branches: {
+ unprotect_access_levels: {}
+ }
+ }
+ },
+ included_attributes: {
+ user: [:id, :name_ee]
+ },
+ excluded_attributes: {
+ project: [:name, :name_without_ee]
+ },
+ methods: {
+ labels: [:type, :type_ee],
+ events: [:action],
+ events_ee: [:action_ee]
+ },
+ preloads: {
+ statuses: {
+ project: nil,
+ bridge_ee: nil
+ }
}
}
- })
-
- expect(yaml_file.to_h).to eq({
- 'project_tree' => [],
- 'methods' => {
- 'project' => %i[id title date],
- 'notes' => %i[id],
- 'foo' => %i[bar baz]
- }
- })
+ )
+ end
end
end
end
diff --git a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
new file mode 100644
index 00000000000..d23b27c9d8e
--- /dev/null
+++ b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
@@ -0,0 +1,272 @@
+require 'spec_helper'
+
+describe Gitlab::ImportExport::FastHashSerializer do
+ subject { described_class.new(project, tree).execute }
+
+ let!(:project) { setup_project }
+ let(:user) { create(:user) }
+ let(:shared) { project.import_export_shared }
+ let(:reader) { Gitlab::ImportExport::Reader.new(shared: shared) }
+ let(:tree) { reader.project_tree }
+
+ before do
+ project.add_maintainer(user)
+ allow_any_instance_of(MergeRequest).to receive(:source_branch_sha).and_return('ABCD')
+ allow_any_instance_of(MergeRequest).to receive(:target_branch_sha).and_return('DCBA')
+ end
+
+ it 'saves the correct hash' do
+ is_expected.to include({ 'description' => 'description', 'visibility_level' => 20 })
+ end
+
+ it 'has approvals_before_merge set' do
+ expect(subject['approvals_before_merge']).to eq(1)
+ end
+
+ it 'has milestones' do
+ expect(subject['milestones']).not_to be_empty
+ end
+
+ it 'has merge requests' do
+ expect(subject['merge_requests']).not_to be_empty
+ end
+
+ it 'has merge request\'s milestones' do
+ expect(subject['merge_requests'].first['milestone']).not_to be_empty
+ end
+
+ it 'has merge request\'s source branch SHA' do
+ expect(subject['merge_requests'].first['source_branch_sha']).to eq('ABCD')
+ end
+
+ it 'has merge request\'s target branch SHA' do
+ expect(subject['merge_requests'].first['target_branch_sha']).to eq('DCBA')
+ end
+
+ it 'has events' do
+ expect(subject['merge_requests'].first['milestone']['events']).not_to be_empty
+ end
+
+ it 'has snippets' do
+ expect(subject['snippets']).not_to be_empty
+ end
+
+ it 'has snippet notes' do
+ expect(subject['snippets'].first['notes']).not_to be_empty
+ end
+
+ it 'has releases' do
+ expect(subject['releases']).not_to be_empty
+ end
+
+ it 'has no author on releases' do
+ expect(subject['releases'].first['author']).to be_nil
+ end
+
+ it 'has the author ID on releases' do
+ expect(subject['releases'].first['author_id']).not_to be_nil
+ end
+
+ it 'has issues' do
+ expect(subject['issues']).not_to be_empty
+ end
+
+ it 'has issue comments' do
+ notes = subject['issues'].first['notes']
+
+ expect(notes).not_to be_empty
+ expect(notes.first['type']).to eq('DiscussionNote')
+ end
+
+ it 'has issue assignees' do
+ expect(subject['issues'].first['issue_assignees']).not_to be_empty
+ end
+
+ it 'has author on issue comments' do
+ expect(subject['issues'].first['notes'].first['author']).not_to be_empty
+ end
+
+ it 'has project members' do
+ expect(subject['project_members']).not_to be_empty
+ end
+
+ it 'has merge requests diffs' do
+ expect(subject['merge_requests'].first['merge_request_diff']).not_to be_empty
+ end
+
+ it 'has merge request diff files' do
+ expect(subject['merge_requests'].first['merge_request_diff']['merge_request_diff_files']).not_to be_empty
+ end
+
+ it 'has merge request diff commits' do
+ expect(subject['merge_requests'].first['merge_request_diff']['merge_request_diff_commits']).not_to be_empty
+ end
+
+ it 'has merge requests comments' do
+ expect(subject['merge_requests'].first['notes']).not_to be_empty
+ end
+
+ it 'has author on merge requests comments' do
+ expect(subject['merge_requests'].first['notes'].first['author']).not_to be_empty
+ end
+
+ it 'has pipeline stages' do
+ expect(subject.dig('ci_pipelines', 0, 'stages')).not_to be_empty
+ end
+
+ it 'has pipeline statuses' do
+ expect(subject.dig('ci_pipelines', 0, 'stages', 0, 'statuses')).not_to be_empty
+ end
+
+ it 'has pipeline builds' do
+ builds_count = subject
+ .dig('ci_pipelines', 0, 'stages', 0, 'statuses')
+ .count { |hash| hash['type'] == 'Ci::Build' }
+
+ expect(builds_count).to eq(1)
+ end
+
+ it 'has no when YML attributes but only the DB column' do
+ allow_any_instance_of(Ci::Pipeline)
+ .to receive(:ci_yaml_file)
+ .and_return(File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')))
+
+ expect_any_instance_of(Gitlab::Ci::YamlProcessor).not_to receive(:build_attributes)
+
+ subject
+ end
+
+ it 'has pipeline commits' do
+ expect(subject['ci_pipelines']).not_to be_empty
+ end
+
+ it 'has ci pipeline notes' do
+ expect(subject['ci_pipelines'].first['notes']).not_to be_empty
+ end
+
+ it 'has labels with no associations' do
+ expect(subject['labels']).not_to be_empty
+ end
+
+ it 'has labels associated to records' do
+ expect(subject['issues'].first['label_links'].first['label']).not_to be_empty
+ end
+
+ it 'has project and group labels' do
+ label_types = subject['issues'].first['label_links'].map { |link| link['label']['type'] }
+
+ expect(label_types).to match_array(%w(ProjectLabel GroupLabel))
+ end
+
+ it 'has priorities associated to labels' do
+ priorities = subject['issues'].first['label_links'].flat_map { |link| link['label']['priorities'] }
+
+ expect(priorities).not_to be_empty
+ end
+
+ it 'has issue resource label events' do
+ expect(subject['issues'].first['resource_label_events']).not_to be_empty
+ end
+
+ it 'has merge request resource label events' do
+ expect(subject['merge_requests'].first['resource_label_events']).not_to be_empty
+ end
+
+ it 'saves the correct service type' do
+ expect(subject['services'].first['type']).to eq('CustomIssueTrackerService')
+ end
+
+ it 'saves the properties for a service' do
+ expect(subject['services'].first['properties']).to eq('one' => 'value')
+ end
+
+ it 'has project feature' do
+ project_feature = subject['project_feature']
+ expect(project_feature).not_to be_empty
+ expect(project_feature["issues_access_level"]).to eq(ProjectFeature::DISABLED)
+ expect(project_feature["wiki_access_level"]).to eq(ProjectFeature::ENABLED)
+ expect(project_feature["builds_access_level"]).to eq(ProjectFeature::PRIVATE)
+ end
+
+ it 'has custom attributes' do
+ expect(subject['custom_attributes'].count).to eq(2)
+ end
+
+ it 'has badges' do
+ expect(subject['project_badges'].count).to eq(2)
+ end
+
+ it 'does not complain about non UTF-8 characters in MR diff files' do
+ ActiveRecord::Base.connection.execute("UPDATE merge_request_diff_files SET diff = '---\n- :diff: !binary |-\n LS0tIC9kZXYvbnVsbAorKysgYi9pbWFnZXMvbnVjb3IucGRmCkBAIC0wLDAg\n KzEsMTY3OSBAQAorJVBERi0xLjUNJeLjz9MNCisxIDAgb2JqDTw8L01ldGFk\n YXR'")
+
+ expect(subject['merge_requests'].first['merge_request_diff']).not_to be_empty
+ end
+
+ context 'project attributes' do
+ it 'does not contain the runners token' do
+ expect(subject).not_to include("runners_token" => 'token')
+ end
+ end
+
+ it 'has a board and a list' do
+ expect(subject['boards'].first['lists']).not_to be_empty
+ end
+
+ def setup_project
+ issue = create(:issue, assignees: [user])
+ snippet = create(:project_snippet)
+ release = create(:release)
+ group = create(:group)
+
+ project = create(:project,
+ :public,
+ :repository,
+ :issues_disabled,
+ :wiki_enabled,
+ :builds_private,
+ description: 'description',
+ issues: [issue],
+ snippets: [snippet],
+ releases: [release],
+ group: group,
+ approvals_before_merge: 1
+ )
+ project_label = create(:label, project: project)
+ group_label = create(:group_label, group: group)
+ create(:label_link, label: project_label, target: issue)
+ create(:label_link, label: group_label, target: issue)
+ create(:label_priority, label: group_label, priority: 1)
+ milestone = create(:milestone, project: project)
+ merge_request = create(:merge_request, source_project: project, milestone: milestone)
+
+ ci_build = create(:ci_build, project: project, when: nil)
+ ci_build.pipeline.update(project: project)
+ create(:commit_status, project: project, pipeline: ci_build.pipeline)
+
+ create(:milestone, project: project)
+ create(:discussion_note, noteable: issue, project: project)
+ create(:note, noteable: merge_request, project: project)
+ create(:note, noteable: snippet, project: project)
+ create(:note_on_commit,
+ author: user,
+ project: project,
+ commit_id: ci_build.pipeline.sha)
+
+ create(:resource_label_event, label: project_label, issue: issue)
+ create(:resource_label_event, label: group_label, merge_request: merge_request)
+
+ create(:event, :created, target: milestone, project: project, author: user)
+ create(:service, project: project, type: 'CustomIssueTrackerService', category: 'issue_tracker', properties: { one: 'value' })
+
+ create(:project_custom_attribute, project: project)
+ create(:project_custom_attribute, project: project)
+
+ create(:project_badge, project: project)
+ create(:project_badge, project: project)
+
+ board = create(:board, project: project, name: 'TestBoard')
+ create(:list, board: board, position: 0, label: project_label)
+
+ project
+ end
+end
diff --git a/spec/lib/gitlab/import_export/model_configuration_spec.rb b/spec/lib/gitlab/import_export/model_configuration_spec.rb
index 5ed9fef1597..3442e22c11f 100644
--- a/spec/lib/gitlab/import_export/model_configuration_spec.rb
+++ b/spec/lib/gitlab/import_export/model_configuration_spec.rb
@@ -8,7 +8,7 @@ describe 'Import/Export model configuration' do
let(:config_hash) { Gitlab::ImportExport::Config.new.to_h.deep_stringify_keys }
let(:model_names) do
- names = names_from_tree(config_hash['project_tree'])
+ names = names_from_tree(config_hash.dig('tree', 'project'))
# Remove duplicated or add missing models
# - project is not part of the tree, so it has to be added manually.
diff --git a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
index 0aef4887c75..87be7857e67 100644
--- a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
@@ -512,6 +512,24 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
expect(Milestone.find_by_title('Group-level milestone').iid).to eq(2)
end
end
+
+ context 'with external authorization classification labels' do
+ it 'converts empty external classification authorization labels to nil' do
+ project.create_import_data(data: { override_params: { external_authorization_classification_label: "" } })
+
+ restored_project_json
+
+ expect(project.external_authorization_classification_label).to be_nil
+ end
+
+ it 'preserves valid external classification authorization labels' do
+ project.create_import_data(data: { override_params: { external_authorization_classification_label: "foobar" } })
+
+ restored_project_json
+
+ expect(project.external_authorization_classification_label).to eq("foobar")
+ end
+ end
end
describe '#restored_project' do
diff --git a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb b/spec/lib/gitlab/import_export/project_tree_saver_spec.rb
index fefbed93316..ff46e062a5d 100644
--- a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/project_tree_saver_spec.rb
@@ -23,12 +23,65 @@ describe Gitlab::ImportExport::ProjectTreeSaver do
expect(project_tree_saver.save).to be true
end
+ context ':export_fast_serialize feature flag checks' do
+ before do
+ expect(Gitlab::ImportExport::Reader).to receive(:new).with(shared: shared).and_return(reader)
+ expect(reader).to receive(:project_tree).and_return(project_tree)
+ end
+
+ let(:serializer) { instance_double('Gitlab::ImportExport::FastHashSerializer') }
+ let(:reader) { instance_double('Gitlab::ImportExport::Reader') }
+ let(:project_tree) do
+ {
+ include: [{ issues: { include: [] } }],
+ preload: { issues: nil }
+ }
+ end
+
+ context 'when :export_fast_serialize feature is enabled' do
+ before do
+ stub_feature_flags(export_fast_serialize: true)
+ end
+
+ it 'uses FastHashSerializer' do
+ expect(Gitlab::ImportExport::FastHashSerializer)
+ .to receive(:new)
+ .with(project, project_tree)
+ .and_return(serializer)
+
+ expect(serializer).to receive(:execute)
+
+ project_tree_saver.save
+ end
+ end
+
+ context 'when :export_fast_serialize feature is disabled' do
+ before do
+ stub_feature_flags(export_fast_serialize: false)
+ end
+
+ it 'is serialized via built-in `as_json`' do
+ expect(project).to receive(:as_json).with(project_tree)
+
+ project_tree_saver.save
+ end
+ end
+ end
+
+ # It is mostly duplicated in
+ # `spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb`
+ # except:
+ # context 'with description override' do
+ # context 'group members' do
+ # ^ These are specific for the ProjectTreeSaver
context 'JSON' do
let(:saved_project_json) do
project_tree_saver.save
project_json(project_tree_saver.full_path)
end
+ # It is not duplicated in
+ # `spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb`
context 'with description override' do
let(:params) { { description: 'Foo Bar' } }
let(:project_tree_saver) { described_class.new(project: project, current_user: user, shared: shared, params: params) }
diff --git a/spec/lib/gitlab/import_export/reader_spec.rb b/spec/lib/gitlab/import_export/reader_spec.rb
index f93ff074770..87f665bd995 100644
--- a/spec/lib/gitlab/import_export/reader_spec.rb
+++ b/spec/lib/gitlab/import_export/reader_spec.rb
@@ -2,96 +2,45 @@ require 'spec_helper'
describe Gitlab::ImportExport::Reader do
let(:shared) { Gitlab::ImportExport::Shared.new(nil) }
- let(:test_config) { 'spec/support/import_export/import_export.yml' }
- let(:project_tree_hash) do
- {
- except: [:id, :created_at],
- include: [:issues, :labels,
- { merge_requests: {
- only: [:id],
- except: [:iid],
- include: [:merge_request_diff, :merge_request_test]
- } },
- { commit_statuses: { include: :commit } },
- { project_members: { include: { user: { only: [:email] } } } }]
- }
- end
-
- before do
- allow_any_instance_of(Gitlab::ImportExport).to receive(:config_file).and_return(test_config)
- end
-
- it 'generates hash from project tree config' do
- expect(described_class.new(shared: shared).project_tree).to match(project_tree_hash)
- end
-
- context 'individual scenarios' do
- it 'generates the correct hash for a single project relation' do
- setup_yaml(project_tree: [:issues])
-
- expect(described_class.new(shared: shared).project_tree).to match(include: [:issues])
- end
-
- it 'generates the correct hash for a single project feature relation' do
- setup_yaml(project_tree: [:project_feature])
- expect(described_class.new(shared: shared).project_tree).to match(include: [:project_feature])
- end
+ describe '#project_tree' do
+ subject { described_class.new(shared: shared).project_tree }
- it 'generates the correct hash for a multiple project relation' do
- setup_yaml(project_tree: [:issues, :snippets])
+ it 'delegates to AttributesFinder#find_root' do
+ expect_any_instance_of(Gitlab::ImportExport::AttributesFinder)
+ .to receive(:find_root)
+ .with(:project)
- expect(described_class.new(shared: shared).project_tree).to match(include: [:issues, :snippets])
+ subject
end
- it 'generates the correct hash for a single sub-relation' do
- setup_yaml(project_tree: [issues: [:notes]])
+ context 'when exception raised' do
+ before do
+ expect_any_instance_of(Gitlab::ImportExport::AttributesFinder)
+ .to receive(:find_root)
+ .with(:project)
+ .and_raise(StandardError)
+ end
- expect(described_class.new(shared: shared).project_tree).to match(include: [{ issues: { include: :notes } }])
- end
-
- it 'generates the correct hash for a multiple sub-relation' do
- setup_yaml(project_tree: [merge_requests: [:notes, :merge_request_diff]])
-
- expect(described_class.new(shared: shared).project_tree).to match(include: [{ merge_requests: { include: [:notes, :merge_request_diff] } }])
- end
+ it { is_expected.to be false }
- it 'generates the correct hash for a sub-relation with another sub-relation' do
- setup_yaml(project_tree: [merge_requests: [notes: :author]])
+ it 'logs the error' do
+ expect(shared).to receive(:error).with(instance_of(StandardError))
- expect(described_class.new(shared: shared).project_tree).to match(include: [{ merge_requests: { include: { notes: { include: :author } } } }])
+ subject
+ end
end
+ end
- it 'generates the correct hash for a relation with included attributes' do
- setup_yaml(project_tree: [:issues], included_attributes: { issues: [:name, :description] })
-
- expect(described_class.new(shared: shared).project_tree).to match(include: [{ issues: { only: [:name, :description] } }])
- end
-
- it 'generates the correct hash for a relation with excluded attributes' do
- setup_yaml(project_tree: [:issues], excluded_attributes: { issues: [:name] })
-
- expect(described_class.new(shared: shared).project_tree).to match(include: [{ issues: { except: [:name] } }])
- end
-
- it 'generates the correct hash for a relation with both excluded and included attributes' do
- setup_yaml(project_tree: [:issues], excluded_attributes: { issues: [:name] }, included_attributes: { issues: [:description] })
-
- expect(described_class.new(shared: shared).project_tree).to match(include: [{ issues: { except: [:name], only: [:description] } }])
- end
-
- it 'generates the correct hash for a relation with custom methods' do
- setup_yaml(project_tree: [:issues], methods: { issues: [:name] })
-
- expect(described_class.new(shared: shared).project_tree).to match(include: [{ issues: { methods: [:name] } }])
- end
+ describe '#group_members_tree' do
+ subject { described_class.new(shared: shared).group_members_tree }
- it 'generates the correct hash for group members' do
- expect(described_class.new(shared: shared).group_members_tree).to match({ include: { user: { only: [:email] } } })
- end
+ it 'delegates to AttributesFinder#find_root' do
+ expect_any_instance_of(Gitlab::ImportExport::AttributesFinder)
+ .to receive(:find_root)
+ .with(:group_members)
- def setup_yaml(hash)
- allow(YAML).to receive(:load_file).with(test_config).and_return(hash)
+ subject
end
end
end
diff --git a/spec/lib/gitlab/import_export/relation_rename_service_spec.rb b/spec/lib/gitlab/import_export/relation_rename_service_spec.rb
index 15748407f0c..17bb5bcc155 100644
--- a/spec/lib/gitlab/import_export/relation_rename_service_spec.rb
+++ b/spec/lib/gitlab/import_export/relation_rename_service_spec.rb
@@ -12,7 +12,7 @@ describe Gitlab::ImportExport::RelationRenameService do
let(:user) { create(:admin) }
let(:group) { create(:group, :nested) }
- let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') }
+ let!(:project) { create(:project, :builds_disabled, :issues_disabled, group: group, name: 'project', path: 'project') }
let(:shared) { project.import_export_shared }
before do
@@ -24,7 +24,6 @@ describe Gitlab::ImportExport::RelationRenameService do
let(:import_path) { 'spec/lib/gitlab/import_export' }
let(:file_content) { IO.read("#{import_path}/project.json") }
let!(:json_file) { ActiveSupport::JSON.decode(file_content) }
- let(:tree_hash) { project_tree_restorer.instance_variable_get(:@tree_hash) }
before do
allow(shared).to receive(:export_path).and_return(import_path)
@@ -92,21 +91,25 @@ describe Gitlab::ImportExport::RelationRenameService do
end
context 'when exporting' do
- let(:project_tree_saver) { Gitlab::ImportExport::ProjectTreeSaver.new(project: project, current_user: user, shared: shared) }
- let(:project_tree) { project_tree_saver.send(:project_json) }
+ let(:export_content_path) { project_tree_saver.full_path }
+ let(:export_content_hash) { ActiveSupport::JSON.decode(File.read(export_content_path)) }
+ let(:injected_hash) { renames.values.product([{}]).to_h }
- it 'adds old relationships to the exported file' do
- project_tree.merge!(renames.values.map { |new_name| [new_name, []] }.to_h)
+ let(:project_tree_saver) do
+ Gitlab::ImportExport::ProjectTreeSaver.new(
+ project: project, current_user: user, shared: shared)
+ end
- allow(project_tree_saver).to receive(:save) do |arg|
- project_tree_saver.send(:project_json_tree)
+ it 'adds old relationships to the exported file' do
+ # we inject relations with new names that should be rewritten
+ expect(project_tree_saver).to receive(:serialize_project_tree).and_wrap_original do |method, *args|
+ method.call(*args).merge(injected_hash)
end
- result = project_tree_saver.save
-
- saved_data = ActiveSupport::JSON.decode(result)
+ expect(project_tree_saver.save).to eq(true)
- expect(saved_data.keys).to include(*(renames.keys + renames.values))
+ expect(export_content_hash.keys).to include(*renames.keys)
+ expect(export_content_hash.keys).to include(*renames.values)
end
end
end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index d34c6d2421b..e9750d23c53 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -270,6 +270,7 @@ Ci::Pipeline:
- protected
- iid
- merge_request_id
+- external_pull_request_id
Ci::Stage:
- id
- name
@@ -715,3 +716,16 @@ List:
- updated_at
- milestone_id
- user_id
+ExternalPullRequest:
+- id
+- created_at
+- updated_at
+- project_id
+- pull_request_iid
+- status
+- source_branch
+- target_branch
+- source_repository
+- target_repository
+- source_sha
+- target_sha
diff --git a/spec/lib/gitlab/pages_spec.rb b/spec/lib/gitlab/pages_spec.rb
new file mode 100644
index 00000000000..affa2ebab2a
--- /dev/null
+++ b/spec/lib/gitlab/pages_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Pages do
+ let(:pages_shared_secret) { SecureRandom.random_bytes(Gitlab::Pages::SECRET_LENGTH) }
+
+ before do
+ allow(described_class).to receive(:secret).and_return(pages_shared_secret)
+ end
+
+ describe '.verify_api_request' do
+ let(:payload) { { 'iss' => 'gitlab-pages' } }
+
+ it 'returns false if fails to validate the JWT' do
+ encoded_token = JWT.encode(payload, 'wrongsecret', 'HS256')
+ headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+
+ expect(described_class.verify_api_request(headers)).to eq(false)
+ end
+
+ it 'returns the decoded JWT' do
+ encoded_token = JWT.encode(payload, described_class.secret, 'HS256')
+ headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+
+ expect(described_class.verify_api_request(headers)).to eq([{ "iss" => "gitlab-pages" }, { "alg" => "HS256" }])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb
index 0dbfcf96124..e0b9581c75c 100644
--- a/spec/lib/gitlab/project_search_results_spec.rb
+++ b/spec/lib/gitlab/project_search_results_spec.rb
@@ -4,6 +4,8 @@
require 'spec_helper'
describe Gitlab::ProjectSearchResults do
+ include SearchHelpers
+
let(:user) { create(:user) }
let(:project) { create(:project) }
let(:query) { 'hello world' }
@@ -31,10 +33,10 @@ describe Gitlab::ProjectSearchResults do
where(:scope, :count_method, :expected) do
'blobs' | :blobs_count | '1234'
- 'notes' | :limited_notes_count | '1000+'
+ 'notes' | :limited_notes_count | max_limited_count
'wiki_blobs' | :wiki_blobs_count | '1234'
'commits' | :commits_count | '1234'
- 'projects' | :limited_projects_count | '1000+'
+ 'projects' | :limited_projects_count | max_limited_count
'unknown' | nil | nil
end
diff --git a/spec/lib/gitlab/search_results_spec.rb b/spec/lib/gitlab/search_results_spec.rb
index 5621c686b8a..26cba53502d 100644
--- a/spec/lib/gitlab/search_results_spec.rb
+++ b/spec/lib/gitlab/search_results_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
describe Gitlab::SearchResults do
include ProjectForksHelper
+ include SearchHelpers
let(:user) { create(:user) }
let!(:project) { create(:project, name: 'foo') }
@@ -35,11 +36,11 @@ describe Gitlab::SearchResults do
using RSpec::Parameterized::TableSyntax
where(:scope, :count_method, :expected) do
- 'projects' | :limited_projects_count | '1000+'
- 'issues' | :limited_issues_count | '1000+'
- 'merge_requests' | :limited_merge_requests_count | '1000+'
- 'milestones' | :limited_milestones_count | '1000+'
- 'users' | :limited_users_count | '1000+'
+ 'projects' | :limited_projects_count | max_limited_count
+ 'issues' | :limited_issues_count | max_limited_count
+ 'merge_requests' | :limited_merge_requests_count | max_limited_count
+ 'milestones' | :limited_milestones_count | max_limited_count
+ 'users' | :limited_users_count | max_limited_count
'unknown' | nil | nil
end
@@ -56,9 +57,9 @@ describe Gitlab::SearchResults do
where(:count, :expected) do
23 | '23'
- 1000 | '1000'
- 1001 | '1000+'
- 1234 | '1000+'
+ 100 | '100'
+ 101 | max_limited_count
+ 1234 | max_limited_count
end
with_them do
diff --git a/spec/lib/gitlab/snippet_search_results_spec.rb b/spec/lib/gitlab/snippet_search_results_spec.rb
index 89d290aaa81..d3353b76c15 100644
--- a/spec/lib/gitlab/snippet_search_results_spec.rb
+++ b/spec/lib/gitlab/snippet_search_results_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
describe Gitlab::SnippetSearchResults do
+ include SearchHelpers
+
let!(:snippet) { create(:snippet, content: 'foo', file_name: 'foo') }
let(:results) { described_class.new(Snippet.all, 'foo') }
@@ -25,7 +27,7 @@ describe Gitlab::SnippetSearchResults do
where(:scope, :count_method, :expected) do
'snippet_titles' | :snippet_titles_count | '1234'
'snippet_blobs' | :snippet_blobs_count | '1234'
- 'projects' | :limited_projects_count | '1000+'
+ 'projects' | :limited_projects_count | max_limited_count
'unknown' | nil | nil
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 63ca383ac4b..146e479adef 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -20,6 +20,7 @@ describe Ci::Pipeline, :mailer do
it { is_expected.to belong_to(:auto_canceled_by) }
it { is_expected.to belong_to(:pipeline_schedule) }
it { is_expected.to belong_to(:merge_request) }
+ it { is_expected.to belong_to(:external_pull_request) }
it { is_expected.to have_many(:statuses) }
it { is_expected.to have_many(:trigger_requests) }
@@ -885,6 +886,25 @@ describe Ci::Pipeline, :mailer do
end
end
end
+
+ context 'when source is external pull request' do
+ let(:pipeline) do
+ create(:ci_pipeline, source: :external_pull_request_event, external_pull_request: pull_request)
+ end
+
+ let(:pull_request) { create(:external_pull_request, project: project) }
+
+ it 'exposes external pull request pipeline variables' do
+ expect(subject.to_hash)
+ .to include(
+ 'CI_EXTERNAL_PULL_REQUEST_IID' => pull_request.pull_request_iid.to_s,
+ 'CI_EXTERNAL_PULL_REQUEST_SOURCE_BRANCH_SHA' => pull_request.source_sha,
+ 'CI_EXTERNAL_PULL_REQUEST_TARGET_BRANCH_SHA' => pull_request.target_sha,
+ 'CI_EXTERNAL_PULL_REQUEST_SOURCE_BRANCH_NAME' => pull_request.source_branch,
+ 'CI_EXTERNAL_PULL_REQUEST_TARGET_BRANCH_NAME' => pull_request.target_branch
+ )
+ end
+ end
end
describe '#protected_ref?' do
diff --git a/spec/models/external_pull_request_spec.rb b/spec/models/external_pull_request_spec.rb
new file mode 100644
index 00000000000..e85d5b2f6c7
--- /dev/null
+++ b/spec/models/external_pull_request_spec.rb
@@ -0,0 +1,220 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ExternalPullRequest do
+ let(:project) { create(:project) }
+ let(:source_branch) { 'the-branch' }
+ let(:status) { :open }
+
+ it { is_expected.to belong_to(:project) }
+
+ shared_examples 'has errors on' do |attribute|
+ it "has errors for #{attribute}" do
+ expect(subject).not_to be_valid
+ expect(subject.errors[attribute]).not_to be_empty
+ end
+ end
+
+ describe 'validations' do
+ context 'when source branch not present' do
+ subject { build(:external_pull_request, source_branch: nil) }
+
+ it_behaves_like 'has errors on', :source_branch
+ end
+
+ context 'when status not present' do
+ subject { build(:external_pull_request, status: nil) }
+
+ it_behaves_like 'has errors on', :status
+ end
+
+ context 'when pull request is from a fork' do
+ subject { build(:external_pull_request, source_repository: 'the-fork', target_repository: 'the-target') }
+
+ it_behaves_like 'has errors on', :base
+ end
+ end
+
+ describe 'create_or_update_from_params' do
+ subject { described_class.create_or_update_from_params(params) }
+
+ context 'when pull request does not exist' do
+ context 'when params are correct' do
+ let(:params) do
+ {
+ project_id: project.id,
+ pull_request_iid: 123,
+ source_branch: 'feature',
+ target_branch: 'master',
+ source_repository: 'the-repository',
+ target_repository: 'the-repository',
+ source_sha: '97de212e80737a608d939f648d959671fb0a0142',
+ target_sha: 'a09386439ca39abe575675ffd4b89ae824fec22f',
+ status: :open
+ }
+ end
+
+ it 'saves the model successfully and returns it' do
+ expect(subject).to be_persisted
+ expect(subject).to be_valid
+ end
+
+ it 'yields the model' do
+ yielded_value = nil
+
+ result = described_class.create_or_update_from_params(params) do |pull_request|
+ yielded_value = pull_request
+ end
+
+ expect(result).to eq(yielded_value)
+ end
+ end
+
+ context 'when params are not correct' do
+ let(:params) do
+ {
+ pull_request_iid: 123,
+ source_branch: 'feature',
+ target_branch: 'master',
+ source_repository: 'the-repository',
+ target_repository: 'the-repository',
+ source_sha: nil,
+ target_sha: nil,
+ status: :open
+ }
+ end
+
+ it 'returns an invalid model' do
+ expect(subject).not_to be_persisted
+ expect(subject).not_to be_valid
+ end
+ end
+ end
+
+ context 'when pull request exists' do
+ let!(:pull_request) do
+ create(:external_pull_request,
+ project: project,
+ source_sha: '97de212e80737a608d939f648d959671fb0a0142')
+ end
+
+ context 'when params are correct' do
+ let(:params) do
+ {
+ pull_request_iid: pull_request.pull_request_iid,
+ source_branch: pull_request.source_branch,
+ target_branch: pull_request.target_branch,
+ source_repository: 'the-repository',
+ target_repository: 'the-repository',
+ source_sha: 'ce84140e8b878ce6e7c4d298c7202ff38170e3ac',
+ target_sha: pull_request.target_sha,
+ status: :open
+ }
+ end
+
+ it 'updates the model successfully and returns it' do
+ expect(subject).to be_valid
+ expect(subject.source_sha).to eq(params[:source_sha])
+ expect(pull_request.reload.source_sha).to eq(params[:source_sha])
+ end
+ end
+
+ context 'when params are not correct' do
+ let(:params) do
+ {
+ pull_request_iid: pull_request.pull_request_iid,
+ source_branch: pull_request.source_branch,
+ target_branch: pull_request.target_branch,
+ source_repository: 'the-repository',
+ target_repository: 'the-repository',
+ source_sha: nil,
+ target_sha: nil,
+ status: :open
+ }
+ end
+
+ it 'returns an invalid model' do
+ expect(subject).not_to be_valid
+ expect(pull_request.reload.source_sha).not_to be_nil
+ expect(pull_request.target_sha).not_to be_nil
+ end
+ end
+ end
+ end
+
+ describe '#open?' do
+ it 'returns true if status is open' do
+ pull_request = create(:external_pull_request, status: :open)
+
+ expect(pull_request).to be_open
+ end
+
+ it 'returns false if status is not open' do
+ pull_request = create(:external_pull_request, status: :closed)
+
+ expect(pull_request).not_to be_open
+ end
+ end
+
+ describe '#closed?' do
+ it 'returns true if status is closed' do
+ pull_request = build(:external_pull_request, status: :closed)
+
+ expect(pull_request).to be_closed
+ end
+
+ it 'returns false if status is not closed' do
+ pull_request = build(:external_pull_request, status: :open)
+
+ expect(pull_request).not_to be_closed
+ end
+ end
+
+ describe '#actual_branch_head?' do
+ let(:project) { create(:project, :repository) }
+ let(:branch) { project.repository.branches.first }
+ let(:source_branch) { branch.name }
+
+ let(:pull_request) do
+ create(:external_pull_request,
+ project: project,
+ source_branch: source_branch,
+ source_sha: source_sha)
+ end
+
+ context 'when source sha matches the head of the branch' do
+ let(:source_sha) { branch.target }
+
+ it 'returns true' do
+ expect(pull_request).to be_actual_branch_head
+ end
+ end
+
+ context 'when source sha does not match the head of the branch' do
+ let(:source_sha) { project.repository.commit('HEAD').sha }
+
+ it 'returns true' do
+ expect(pull_request).not_to be_actual_branch_head
+ end
+ end
+ end
+
+ describe '#from_fork?' do
+ it 'returns true if source_repository differs from target_repository' do
+ pull_request = build(:external_pull_request,
+ source_repository: 'repository-1',
+ target_repository: 'repository-2')
+
+ expect(pull_request).to be_from_fork
+ end
+
+ it 'returns false if source_repository is the same as target_repository' do
+ pull_request = build(:external_pull_request,
+ source_repository: 'repository-1',
+ target_repository: 'repository-1')
+
+ expect(pull_request).not_to be_from_fork
+ end
+ end
+end
diff --git a/spec/models/pages_domain_spec.rb b/spec/models/pages_domain_spec.rb
index 519c519fbcf..5168064bb84 100644
--- a/spec/models/pages_domain_spec.rb
+++ b/spec/models/pages_domain_spec.rb
@@ -151,6 +151,24 @@ describe PagesDomain do
end
end
end
+
+ context 'with ecdsa certificate' do
+ it "is valid" do
+ domain = build(:pages_domain, :ecdsa)
+
+ expect(domain).to be_valid
+ end
+
+ context 'when curve is set explicitly by parameters' do
+ it 'adds errors to private key' do
+ domain = build(:pages_domain, :explicit_ecdsa)
+
+ expect(domain).to be_invalid
+
+ expect(domain.errors[:key]).not_to be_empty
+ end
+ end
+ end
end
describe 'validations' do
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index bfbcac60fea..e2a684c42ae 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -99,6 +99,7 @@ describe Project do
it { is_expected.to have_many(:project_deploy_tokens) }
it { is_expected.to have_many(:deploy_tokens).through(:project_deploy_tokens) }
it { is_expected.to have_many(:cycle_analytics_stages) }
+ it { is_expected.to have_many(:external_pull_requests) }
it 'has an inverse relationship with merge requests' do
expect(described_class.reflect_on_association(:merge_requests).has_inverse?).to eq(:target_project)
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index b8c323904b8..6722a3c627d 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -32,7 +32,7 @@ describe User do
it { is_expected.to have_many(:groups) }
it { is_expected.to have_many(:keys).dependent(:destroy) }
it { is_expected.to have_many(:deploy_keys).dependent(:nullify) }
- it { is_expected.to have_many(:events).dependent(:destroy) }
+ it { is_expected.to have_many(:events).dependent(:delete_all) }
it { is_expected.to have_many(:issues).dependent(:destroy) }
it { is_expected.to have_many(:notes).dependent(:destroy) }
it { is_expected.to have_many(:merge_requests).dependent(:destroy) }
diff --git a/spec/requests/api/internal/pages_spec.rb b/spec/requests/api/internal/pages_spec.rb
new file mode 100644
index 00000000000..0b3c5be9c45
--- /dev/null
+++ b/spec/requests/api/internal/pages_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::Internal::Pages do
+ describe "GET /internal/pages" do
+ let(:pages_shared_secret) { SecureRandom.random_bytes(Gitlab::Pages::SECRET_LENGTH) }
+
+ before do
+ allow(Gitlab::Pages).to receive(:secret).and_return(pages_shared_secret)
+ end
+
+ def query_host(host, headers = {})
+ get api("/internal/pages"), headers: headers, params: { host: host }
+ end
+
+ context 'feature flag disabled' do
+ before do
+ stub_feature_flags(pages_internal_api: false)
+ end
+
+ it 'responds with 404 Not Found' do
+ query_host('pages.gitlab.io')
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+
+ context 'feature flag enabled' do
+ context 'not authenticated' do
+ it 'responds with 401 Unauthorized' do
+ query_host('pages.gitlab.io')
+
+ expect(response).to have_gitlab_http_status(401)
+ end
+ end
+
+ context 'authenticated' do
+ def query_host(host)
+ jwt_token = JWT.encode({ 'iss' => 'gitlab-pages' }, Gitlab::Pages.secret, 'HS256')
+ headers = { Gitlab::Pages::INTERNAL_API_REQUEST_HEADER => jwt_token }
+
+ super(host, headers)
+ end
+
+ it 'responds with 200 OK' do
+ query_host('pages.gitlab.io')
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index 048d04cdefd..d98b9be726a 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -252,5 +252,43 @@ describe API::Settings, 'Settings' do
expect(json_response['asset_proxy_whitelist']).to eq(['example.com', '*.example.com', 'localhost'])
end
end
+
+ context 'domain_blacklist settings' do
+ it 'rejects domain_blacklist_enabled when domain_blacklist is empty' do
+ put api('/application/settings', admin),
+ params: {
+ domain_blacklist_enabled: true,
+ domain_blacklist: []
+ }
+
+ expect(response).to have_gitlab_http_status(400)
+ message = json_response["message"]
+ expect(message["domain_blacklist"]).to eq(["Domain blacklist cannot be empty if Blacklist is enabled."])
+ end
+
+ it 'allows array for domain_blacklist' do
+ put api('/application/settings', admin),
+ params: {
+ domain_blacklist_enabled: true,
+ domain_blacklist: ['domain1.com', 'domain2.com']
+ }
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response['domain_blacklist_enabled']).to be(true)
+ expect(json_response['domain_blacklist']).to eq(['domain1.com', 'domain2.com'])
+ end
+
+ it 'allows a string for domain_blacklist' do
+ put api('/application/settings', admin),
+ params: {
+ domain_blacklist_enabled: true,
+ domain_blacklist: 'domain3.com, *.domain4.com'
+ }
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response['domain_blacklist_enabled']).to be(true)
+ expect(json_response['domain_blacklist']).to eq(['domain3.com', '*.domain4.com'])
+ end
+ end
end
end
diff --git a/spec/requests/api/statistics_spec.rb b/spec/requests/api/statistics_spec.rb
new file mode 100644
index 00000000000..91fc4d4c123
--- /dev/null
+++ b/spec/requests/api/statistics_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::Statistics, 'Statistics' do
+ include ProjectForksHelper
+ TABLES_TO_ANALYZE = %w[
+ projects
+ users
+ namespaces
+ issues
+ merge_requests
+ notes
+ snippets
+ fork_networks
+ fork_network_members
+ keys
+ milestones
+ ].freeze
+
+ let(:path) { "/application/statistics" }
+
+ describe "GET /application/statistics" do
+ context 'when no user' do
+ it "returns authentication error" do
+ get api(path, nil)
+
+ expect(response).to have_gitlab_http_status(401)
+ end
+ end
+
+ context "when not an admin" do
+ let(:user) { create(:user) }
+
+ it "returns forbidden error" do
+ get api(path, user)
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
+
+ context 'when authenticated as admin' do
+ let(:admin) { create(:admin) }
+
+ it 'matches the response schema' do
+ get api(path, admin)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to match_response_schema('statistics')
+ end
+
+ it 'gives the right statistics' do
+ projects = create_list(:project, 4, namespace: create(:namespace, owner: admin))
+ issues = create_list(:issue, 2, project: projects.first, updated_by: admin)
+
+ create_list(:snippet, 2, :public, author: admin)
+ create_list(:note, 2, author: admin, project: projects.first, noteable: issues.first)
+ create_list(:milestone, 3, project: projects.first)
+ create(:key, user: admin)
+ create(:merge_request, source_project: projects.first)
+ fork_project(projects.first, admin)
+
+ # Make sure the reltuples have been updated
+ # to get a correct count on postgresql
+ TABLES_TO_ANALYZE.each do |table|
+ ActiveRecord::Base.connection.execute("ANALYZE #{table}")
+ end
+
+ get api(path, admin)
+
+ expected_statistics = {
+ issues: 2,
+ merge_requests: 1,
+ notes: 2,
+ snippets: 2,
+ forks: 1,
+ ssh_keys: 1,
+ milestones: 3,
+ users: 1,
+ projects: 5,
+ groups: 1,
+ active_users: 1
+ }
+
+ expected_statistics.each do |entity, count|
+ expect(json_response[entity.to_s]).to eq(count.to_s)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/wikis_spec.rb b/spec/requests/api/wikis_spec.rb
index d1b58aac104..97de26650db 100644
--- a/spec/requests/api/wikis_spec.rb
+++ b/spec/requests/api/wikis_spec.rb
@@ -11,6 +11,8 @@ require 'spec_helper'
# because they are 3 edge cases of using wiki pages.
describe API::Wikis do
+ include WorkhorseHelpers
+
let(:user) { create(:user) }
let(:group) { create(:group).tap { |g| g.add_owner(user) } }
let(:project_wiki) { create(:project_wiki, project: project, user: user) }
@@ -155,7 +157,7 @@ describe API::Wikis do
it 'pushes attachment to the wiki repository' do
allow(SecureRandom).to receive(:hex).and_return('fixed_hex')
- post(api(url, user), params: payload)
+ workhorse_post_with_file(api(url, user), file_key: :file, params: payload)
expect(response).to have_gitlab_http_status(201)
expect(json_response).to eq result_hash.deep_stringify_keys
@@ -180,6 +182,15 @@ describe API::Wikis do
expect(json_response.size).to eq(1)
expect(json_response['error']).to eq('file is invalid')
end
+
+ it 'is backward compatible with regular multipart uploads' do
+ allow(SecureRandom).to receive(:hex).and_return('fixed_hex')
+
+ post(api(url, user), params: payload)
+
+ expect(response).to have_gitlab_http_status(201)
+ expect(json_response).to eq result_hash.deep_stringify_keys
+ end
end
describe 'GET /projects/:id/wikis' do
diff --git a/spec/requests/projects/uploads_spec.rb b/spec/requests/projects/uploads_spec.rb
new file mode 100644
index 00000000000..aca4644289d
--- /dev/null
+++ b/spec/requests/projects/uploads_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'File uploads' do
+ include WorkhorseHelpers
+
+ let(:project) { create(:project, :public, :repository) }
+ let(:user) { create(:user) }
+
+ describe 'POST /:namespace/:project/create/:branch' do
+ let(:branch) { 'master' }
+ let(:create_url) { project_blob_path(project, branch) }
+ let(:blob_url) { project_blob_path(project, "#{branch}/dk.png") }
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ id: branch,
+ branch_name: branch,
+ file: fixture_file_upload('spec/fixtures/dk.png'),
+ commit_message: 'Add an image'
+ }
+ end
+
+ before do
+ project.add_maintainer(user)
+
+ login_as(user)
+ end
+
+ it 'redirects to blob' do
+ workhorse_post_with_file(create_url, file_key: :file, params: params)
+
+ expect(response).to redirect_to(blob_url)
+ end
+ end
+end
diff --git a/spec/serializers/merge_request_serializer_spec.rb b/spec/serializers/merge_request_serializer_spec.rb
index d1483c3c41e..cf0b8ea9b40 100644
--- a/spec/serializers/merge_request_serializer_spec.rb
+++ b/spec/serializers/merge_request_serializer_spec.rb
@@ -1,8 +1,9 @@
require 'spec_helper'
describe MergeRequestSerializer do
- let(:user) { create(:user) }
- let(:resource) { create(:merge_request) }
+ set(:user) { create(:user) }
+ set(:resource) { create(:merge_request, description: "Description") }
+
let(:json_entity) do
described_class.new(current_user: user)
.represent(resource, serializer: serializer)
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 6cec93a53fd..fe86982af91 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -23,6 +23,7 @@ describe Ci::CreatePipelineService do
trigger_request: nil,
variables_attributes: nil,
merge_request: nil,
+ external_pull_request: nil,
push_options: nil,
source_sha: nil,
target_sha: nil,
@@ -36,8 +37,11 @@ describe Ci::CreatePipelineService do
source_sha: source_sha,
target_sha: target_sha }
- described_class.new(project, user, params).execute(
- source, save_on_errors: save_on_errors, trigger_request: trigger_request, merge_request: merge_request)
+ described_class.new(project, user, params).execute(source,
+ save_on_errors: save_on_errors,
+ trigger_request: trigger_request,
+ merge_request: merge_request,
+ external_pull_request: external_pull_request)
end
# rubocop:enable Metrics/ParameterLists
@@ -756,33 +760,32 @@ describe Ci::CreatePipelineService do
end
context 'when builds with auto-retries are configured' do
+ let(:pipeline) { execute_service }
+ let(:rspec_job) { pipeline.builds.find_by(name: 'rspec') }
+
+ before do
+ stub_ci_pipeline_yaml_file(YAML.dump({
+ rspec: { script: 'rspec', retry: retry_value }
+ }))
+ end
+
context 'as an integer' do
- before do
- config = YAML.dump(rspec: { script: 'rspec', retry: 2 })
- stub_ci_pipeline_yaml_file(config)
- end
+ let(:retry_value) { 2 }
it 'correctly creates builds with auto-retry value configured' do
- pipeline = execute_service
-
expect(pipeline).to be_persisted
- expect(pipeline.builds.find_by(name: 'rspec').retries_max).to eq 2
- expect(pipeline.builds.find_by(name: 'rspec').retry_when).to eq ['always']
+ expect(rspec_job.retries_max).to eq 2
+ expect(rspec_job.retry_when).to eq ['always']
end
end
context 'as hash' do
- before do
- config = YAML.dump(rspec: { script: 'rspec', retry: { max: 2, when: 'runner_system_failure' } })
- stub_ci_pipeline_yaml_file(config)
- end
+ let(:retry_value) { { max: 2, when: 'runner_system_failure' } }
it 'correctly creates builds with auto-retry value configured' do
- pipeline = execute_service
-
expect(pipeline).to be_persisted
- expect(pipeline.builds.find_by(name: 'rspec').retries_max).to eq 2
- expect(pipeline.builds.find_by(name: 'rspec').retry_when).to eq ['runner_system_failure']
+ expect(rspec_job.retries_max).to eq 2
+ expect(rspec_job.retry_when).to eq ['runner_system_failure']
end
end
end
@@ -969,6 +972,152 @@ describe Ci::CreatePipelineService do
end
end
+ describe 'Pipeline for external pull requests' do
+ let(:pipeline) do
+ execute_service(source: source,
+ external_pull_request: pull_request,
+ ref: ref_name,
+ source_sha: source_sha,
+ target_sha: target_sha)
+ end
+
+ before do
+ stub_ci_pipeline_yaml_file(YAML.dump(config))
+ end
+
+ let(:ref_name) { 'refs/heads/feature' }
+ let(:source_sha) { project.commit(ref_name).id }
+ let(:target_sha) { nil }
+
+ context 'when source is external pull request' do
+ let(:source) { :external_pull_request_event }
+
+ context 'when config has external_pull_requests keywords' do
+ let(:config) do
+ {
+ build: {
+ stage: 'build',
+ script: 'echo'
+ },
+ test: {
+ stage: 'test',
+ script: 'echo',
+ only: ['external_pull_requests']
+ },
+ pages: {
+ stage: 'deploy',
+ script: 'echo',
+ except: ['external_pull_requests']
+ }
+ }
+ end
+
+ context 'when external pull request is specified' do
+ let(:pull_request) { create(:external_pull_request, project: project, source_branch: 'feature', target_branch: 'master') }
+ let(:ref_name) { pull_request.source_ref }
+
+ it 'creates an external pull request pipeline' do
+ expect(pipeline).to be_persisted
+ expect(pipeline).to be_external_pull_request_event
+ expect(pipeline.external_pull_request).to eq(pull_request)
+ expect(pipeline.source_sha).to eq(source_sha)
+ expect(pipeline.builds.order(:stage_id)
+ .map(&:name))
+ .to eq(%w[build test])
+ end
+
+ context 'when ref is tag' do
+ let(:ref_name) { 'refs/tags/v1.1.0' }
+
+ it 'does not create an extrnal pull request pipeline' do
+ expect(pipeline).not_to be_persisted
+ expect(pipeline.errors[:tag]).to eq(["is not included in the list"])
+ end
+ end
+
+ context 'when pull request is created from fork' do
+ it 'does not create an external pull request pipeline'
+ end
+
+ context "when there are no matched jobs" do
+ let(:config) do
+ {
+ test: {
+ stage: 'test',
+ script: 'echo',
+ except: ['external_pull_requests']
+ }
+ }
+ end
+
+ it 'does not create a detached merge request pipeline' do
+ expect(pipeline).not_to be_persisted
+ expect(pipeline.errors[:base]).to eq(["No stages / jobs for this pipeline."])
+ end
+ end
+ end
+
+ context 'when external pull request is not specified' do
+ let(:pull_request) { nil }
+
+ it 'does not create an external pull request pipeline' do
+ expect(pipeline).not_to be_persisted
+ expect(pipeline.errors[:external_pull_request]).to eq(["can't be blank"])
+ end
+ end
+ end
+
+ context "when config does not have external_pull_requests keywords" do
+ let(:config) do
+ {
+ build: {
+ stage: 'build',
+ script: 'echo'
+ },
+ test: {
+ stage: 'test',
+ script: 'echo'
+ },
+ pages: {
+ stage: 'deploy',
+ script: 'echo'
+ }
+ }
+ end
+
+ context 'when external pull request is specified' do
+ let(:pull_request) do
+ create(:external_pull_request,
+ project: project,
+ source_branch: Gitlab::Git.ref_name(ref_name),
+ target_branch: 'master')
+ end
+
+ it 'creates an external pull request pipeline' do
+ expect(pipeline).to be_persisted
+ expect(pipeline).to be_external_pull_request_event
+ expect(pipeline.external_pull_request).to eq(pull_request)
+ expect(pipeline.source_sha).to eq(source_sha)
+ expect(pipeline.builds.order(:stage_id)
+ .map(&:name))
+ .to eq(%w[build test pages])
+ end
+ end
+
+ context 'when external pull request is not specified' do
+ let(:pull_request) { nil }
+
+ it 'does not create an external pull request pipeline' do
+ expect(pipeline).not_to be_persisted
+
+ expect(pipeline.errors[:base])
+ .to eq(['Failed to build the pipeline!'])
+ end
+ end
+ end
+ end
+ end
+
describe 'Pipelines for merge requests' do
let(:pipeline) do
execute_service(source: source,
@@ -1024,7 +1173,7 @@ describe Ci::CreatePipelineService do
expect(pipeline).to be_persisted
expect(pipeline).to be_merge_request_event
expect(pipeline.merge_request).to eq(merge_request)
- expect(pipeline.builds.order(:stage_id).map(&:name)).to eq(%w[test])
+ expect(pipeline.builds.order(:stage_id).pluck(:name)).to eq(%w[test])
end
it 'persists the specified source sha' do
@@ -1289,7 +1438,7 @@ describe Ci::CreatePipelineService do
expect(pipeline).to be_persisted
expect(pipeline).to be_web
expect(pipeline.merge_request).to be_nil
- expect(pipeline.builds.order(:stage_id).map(&:name)).to eq(%w[build pages])
+ expect(pipeline.builds.order(:stage_id).pluck(:name)).to eq(%w[build pages])
end
end
end
@@ -1329,7 +1478,7 @@ describe Ci::CreatePipelineService do
it 'creates a pipeline with build_a and test_a' do
expect(pipeline).to be_persisted
- expect(pipeline.builds.map(&:name)).to contain_exactly("build_a", "test_a")
+ expect(pipeline.builds.pluck(:name)).to contain_exactly("build_a", "test_a")
end
end
@@ -1364,7 +1513,303 @@ describe Ci::CreatePipelineService do
it 'does create a pipeline only with deploy' do
expect(pipeline).to be_persisted
- expect(pipeline.builds.map(&:name)).to contain_exactly("deploy")
+ expect(pipeline.builds.pluck(:name)).to contain_exactly("deploy")
+ end
+ end
+ end
+
+ context 'when rules are used' do
+ let(:ref_name) { 'refs/heads/master' }
+ let(:pipeline) { execute_service }
+ let(:build_names) { pipeline.builds.pluck(:name) }
+ let(:regular_job) { pipeline.builds.find_by(name: 'regular-job') }
+ let(:rules_job) { pipeline.builds.find_by(name: 'rules-job') }
+ let(:delayed_job) { pipeline.builds.find_by(name: 'delayed-job') }
+
+ shared_examples 'rules jobs are excluded' do
+ it 'only persists the job without rules' do
+ expect(pipeline).to be_persisted
+ expect(regular_job).to be_persisted
+ expect(rules_job).to be_nil
+ expect(delayed_job).to be_nil
+ end
+ end
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ allow_any_instance_of(Ci::BuildScheduleWorker).to receive(:perform).and_return(true)
+ end
+
+ context 'with simple if: clauses' do
+ let(:config) do
+ <<-EOY
+ regular-job:
+ script: 'echo Hello, World!'
+
+ master-job:
+ script: "echo hello world, $CI_COMMIT_REF_NAME"
+ rules:
+ - if: $CI_COMMIT_REF_NAME == "nonexistant-branch"
+ when: never
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ when: manual
+
+ delayed-job:
+ script: "echo See you later, World!"
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ when: delayed
+ start_in: 1 hour
+
+ never-job:
+ script: "echo Goodbye, World!"
+ rules:
+ - if: $CI_COMMIT_REF_NAME
+ when: never
+ EOY
+ end
+
+ context 'with matches' do
+ it 'creates a pipeline with the vanilla and manual jobs' do
+ expect(pipeline).to be_persisted
+ expect(build_names).to contain_exactly('regular-job', 'delayed-job', 'master-job')
+ end
+
+ it 'assigns job:when values to the builds' do
+ expect(pipeline.builds.pluck(:when)).to contain_exactly('on_success', 'delayed', 'manual')
+ end
+
+ it 'assigns start_in for delayed jobs' do
+ expect(delayed_job.options[:start_in]).to eq('1 hour')
+ end
+ end
+
+ context 'with no matches' do
+ let(:ref_name) { 'refs/heads/feature' }
+
+ it_behaves_like 'rules jobs are excluded'
+ end
+ end
+
+ context 'with complex if: clauses' do
+ let(:config) do
+ <<-EOY
+ regular-job:
+ script: 'echo Hello, World!'
+ rules:
+ - if: $VAR == 'present' && $OTHER || $CI_COMMIT_REF_NAME
+ when: manual
+ EOY
+ end
+
+ it 'matches the first rule' do
+ expect(pipeline).to be_persisted
+ expect(build_names).to contain_exactly('regular-job')
+ expect(regular_job.when).to eq('manual')
+ end
+ end
+
+ context 'with changes:' do
+ let(:config) do
+ <<-EOY
+ regular-job:
+ script: 'echo Hello, World!'
+
+ rules-job:
+ script: "echo hello world, $CI_COMMIT_REF_NAME"
+ rules:
+ - changes:
+ - README.md
+ when: manual
+ - changes:
+ - app.rb
+ when: on_success
+
+ delayed-job:
+ script: "echo See you later, World!"
+ rules:
+ - changes:
+ - README.md
+ when: delayed
+ start_in: 4 hours
+ EOY
+ end
+
+ context 'and matches' do
+ before do
+ allow_any_instance_of(Ci::Pipeline)
+ .to receive(:modified_paths).and_return(%w[README.md])
+ end
+
+ it 'creates two jobs' do
+ expect(pipeline).to be_persisted
+ expect(build_names)
+ .to contain_exactly('regular-job', 'rules-job', 'delayed-job')
+ end
+
+ it 'sets when: for all jobs' do
+ expect(regular_job.when).to eq('on_success')
+ expect(rules_job.when).to eq('manual')
+ expect(delayed_job.when).to eq('delayed')
+ expect(delayed_job.options[:start_in]).to eq('4 hours')
+ end
+ end
+
+ context 'and matches the second rule' do
+ before do
+ allow_any_instance_of(Ci::Pipeline)
+ .to receive(:modified_paths).and_return(%w[app.rb])
+ end
+
+ it 'includes both jobs' do
+ expect(pipeline).to be_persisted
+ expect(build_names).to contain_exactly('regular-job', 'rules-job')
+ end
+
+ it 'sets when: for the created rules job based on the second clause' do
+ expect(regular_job.when).to eq('on_success')
+ expect(rules_job.when).to eq('on_success')
+ end
+ end
+
+ context 'and does not match' do
+ before do
+ allow_any_instance_of(Ci::Pipeline)
+ .to receive(:modified_paths).and_return(%w[useless_script.rb])
+ end
+
+ it_behaves_like 'rules jobs are excluded'
+
+ it 'sets when: for the created job' do
+ expect(regular_job.when).to eq('on_success')
+ end
+ end
+ end
+
+ context 'with mixed if: and changes: rules' do
+ let(:config) do
+ <<-EOY
+ regular-job:
+ script: 'echo Hello, World!'
+
+ rules-job:
+ script: "echo hello world, $CI_COMMIT_REF_NAME"
+ rules:
+ - changes:
+ - README.md
+ when: manual
+ - if: $CI_COMMIT_REF_NAME == "master"
+ when: on_success
+
+ delayed-job:
+ script: "echo See you later, World!"
+ rules:
+ - changes:
+ - README.md
+ when: delayed
+ start_in: 4 hours
+ - if: $CI_COMMIT_REF_NAME == "master"
+ when: delayed
+ start_in: 1 hour
+ EOY
+ end
+
+ context 'and changes: matches before if' do
+ before do
+ allow_any_instance_of(Ci::Pipeline)
+ .to receive(:modified_paths).and_return(%w[README.md])
+ end
+
+ it 'creates two jobs' do
+ expect(pipeline).to be_persisted
+ expect(build_names)
+ .to contain_exactly('regular-job', 'rules-job', 'delayed-job')
+ end
+
+ it 'sets when: for all jobs' do
+ expect(regular_job.when).to eq('on_success')
+ expect(rules_job.when).to eq('manual')
+ expect(delayed_job.when).to eq('delayed')
+ expect(delayed_job.options[:start_in]).to eq('4 hours')
+ end
+ end
+
+ context 'and if: matches after changes' do
+ it 'includes both jobs' do
+ expect(pipeline).to be_persisted
+ expect(build_names).to contain_exactly('regular-job', 'rules-job', 'delayed-job')
+ end
+
+ it 'sets when: for the created rules job based on the second clause' do
+ expect(regular_job.when).to eq('on_success')
+ expect(rules_job.when).to eq('on_success')
+ expect(delayed_job.when).to eq('delayed')
+ expect(delayed_job.options[:start_in]).to eq('1 hour')
+ end
+ end
+
+ context 'and does not match' do
+ let(:ref_name) { 'refs/heads/wip' }
+
+ it_behaves_like 'rules jobs are excluded'
+
+ it 'sets when: for the created job' do
+ expect(regular_job.when).to eq('on_success')
+ end
+ end
+ end
+
+ context 'with mixed if: and changes: clauses' do
+ let(:config) do
+ <<-EOY
+ regular-job:
+ script: 'echo Hello, World!'
+
+ rules-job:
+ script: "echo hello world, $CI_COMMIT_REF_NAME"
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ changes: [README.md]
+ when: on_success
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ changes: [app.rb]
+ when: manual
+ EOY
+ end
+
+ context 'with if matches and changes matches' do
+ before do
+ allow_any_instance_of(Ci::Pipeline)
+ .to receive(:modified_paths).and_return(%w[app.rb])
+ end
+
+ it 'persists all jobs' do
+ expect(pipeline).to be_persisted
+ expect(regular_job).to be_persisted
+ expect(rules_job).to be_persisted
+ expect(rules_job.when).to eq('manual')
+ end
+ end
+
+ context 'with if matches and no change matches' do
+ it_behaves_like 'rules jobs are excluded'
+ end
+
+ context 'with change matches and no if matches' do
+ let(:ref_name) { 'refs/heads/feature' }
+
+ before do
+ allow_any_instance_of(Ci::Pipeline)
+ .to receive(:modified_paths).and_return(%w[README.md])
+ end
+
+ it_behaves_like 'rules jobs are excluded'
+ end
+
+ context 'and no matches' do
+ let(:ref_name) { 'refs/heads/feature' }
+
+ it_behaves_like 'rules jobs are excluded'
end
end
end
diff --git a/spec/services/external_pull_requests/create_pipeline_service_spec.rb b/spec/services/external_pull_requests/create_pipeline_service_spec.rb
new file mode 100644
index 00000000000..a4da5b38b97
--- /dev/null
+++ b/spec/services/external_pull_requests/create_pipeline_service_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ExternalPullRequests::CreatePipelineService do
+ describe '#execute' do
+ set(:project) { create(:project, :repository) }
+ set(:user) { create(:user) }
+ let(:pull_request) { create(:external_pull_request, project: project) }
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ subject { described_class.new(project, user).execute(pull_request) }
+
+ context 'when pull request is open' do
+ before do
+ pull_request.update!(status: :open)
+ end
+
+ context 'when source sha is the head of the source branch' do
+ let(:source_branch) { project.repository.branches.last }
+ let(:create_pipeline_service) { instance_double(Ci::CreatePipelineService) }
+
+ before do
+ pull_request.update!(source_branch: source_branch.name, source_sha: source_branch.target)
+ end
+
+ it 'creates a pipeline for external pull request' do
+ expect(subject).to be_valid
+ expect(subject).to be_persisted
+ expect(subject).to be_external_pull_request_event
+ expect(subject).to eq(project.ci_pipelines.last)
+ expect(subject.external_pull_request).to eq(pull_request)
+ expect(subject.user).to eq(user)
+ expect(subject.status).to eq('pending')
+ expect(subject.ref).to eq(pull_request.source_branch)
+ expect(subject.sha).to eq(pull_request.source_sha)
+ expect(subject.source_sha).to eq(pull_request.source_sha)
+ end
+ end
+
+ context 'when source sha is not the head of the source branch (force push upon rebase)' do
+ let(:source_branch) { project.repository.branches.first }
+ let(:commit) { project.repository.commits(source_branch.name, limit: 2).last }
+
+ before do
+ pull_request.update!(source_branch: source_branch.name, source_sha: commit.sha)
+ end
+
+ it 'does nothing' do
+ expect(Ci::CreatePipelineService).not_to receive(:new)
+
+ expect(subject).to be_nil
+ end
+ end
+ end
+
+ context 'when pull request is not opened' do
+ before do
+ pull_request.update!(status: :closed)
+ end
+
+ it 'does nothing' do
+ expect(Ci::CreatePipelineService).not_to receive(:new)
+
+ expect(subject).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb
index d9e607cd251..c3a4f3dbe3f 100644
--- a/spec/services/git/branch_push_service_spec.rb
+++ b/spec/services/git/branch_push_service_spec.rb
@@ -99,6 +99,20 @@ describe Git::BranchPushService, services: true do
expect(pipeline).to be_push
expect(Gitlab::Git::BRANCH_REF_PREFIX + pipeline.ref).to eq(ref)
end
+
+ context 'when pipeline has errors' do
+ before do
+ config = YAML.dump({ test: { script: 'ls', only: ['feature'] } })
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ it 'reports an error' do
+ allow(Sidekiq).to receive(:server?).and_return(true)
+ expect(Sidekiq.logger).to receive(:warn)
+
+ expect { subject }.not_to change { Ci::Pipeline.count }
+ end
+ end
end
describe "Updates merge requests" do
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 6ca0a3fa448..b65ee16c189 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -1140,6 +1140,19 @@ describe QuickActions::InterpretService do
let(:todo_label) { create(:label, project: project, title: 'To Do') }
let(:inreview_label) { create(:label, project: project, title: 'In Review') }
+ it 'is available when the user is a developer' do
+ expect(service.available_commands(issue)).to include(a_hash_including(name: :copy_metadata))
+ end
+
+ context 'when the user does not have permission' do
+ let(:guest) { create(:user) }
+ let(:service) { described_class.new(project, guest) }
+
+ it 'is not available' do
+ expect(service.available_commands(issue)).not_to include(a_hash_including(name: :copy_metadata))
+ end
+ end
+
it_behaves_like 'empty command' do
let(:content) { '/copy_metadata' }
let(:issuable) { issue }
diff --git a/spec/support/helpers/search_helpers.rb b/spec/support/helpers/search_helpers.rb
index 2cf3f4b83c4..d1d25fbabcd 100644
--- a/spec/support/helpers/search_helpers.rb
+++ b/spec/support/helpers/search_helpers.rb
@@ -19,4 +19,8 @@ module SearchHelpers
click_link scope
end
end
+
+ def max_limited_count
+ Gitlab::SearchResults::COUNT_LIMIT_MESSAGE
+ end
end
diff --git a/spec/support/helpers/workhorse_helpers.rb b/spec/support/helpers/workhorse_helpers.rb
index 4488e5f227e..fdbfe53fa39 100644
--- a/spec/support/helpers/workhorse_helpers.rb
+++ b/spec/support/helpers/workhorse_helpers.rb
@@ -17,7 +17,36 @@ module WorkhorseHelpers
end
def workhorse_internal_api_request_header
- jwt_token = JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256')
{ 'HTTP_' + Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER.upcase.tr('-', '_') => jwt_token }
end
+
+ # workhorse_post_with_file will transform file_key inside params as if it was disk accelerated by workhorse
+ def workhorse_post_with_file(url, file_key:, params:)
+ workhorse_params = params.dup
+ file = workhorse_params.delete(file_key)
+
+ workhorse_params.merge!(workhorse_disk_accelerated_file_params(file_key, file))
+
+ post(url,
+ params: workhorse_params,
+ headers: workhorse_rewritten_fields_header('file' => file.path)
+ )
+ end
+
+ private
+
+ def jwt_token(data = {})
+ JWT.encode({ 'iss' => 'gitlab-workhorse' }.merge(data), Gitlab::Workhorse.secret, 'HS256')
+ end
+
+ def workhorse_rewritten_fields_header(fields)
+ { Gitlab::Middleware::Multipart::RACK_ENV_KEY => jwt_token('rewritten_fields' => fields) }
+ end
+
+ def workhorse_disk_accelerated_file_params(key, file)
+ {
+ "#{key}.name" => file.original_filename,
+ "#{key}.path" => file.path
+ }
+ end
end
diff --git a/spec/support/import_export/import_export.yml b/spec/support/import_export/import_export.yml
index 734d6838f4d..116bc8d0b9c 100644
--- a/spec/support/import_export/import_export.yml
+++ b/spec/support/import_export/import_export.yml
@@ -1,15 +1,22 @@
# Class relationships to be included in the project import/export
-project_tree:
- - :issues
- - :labels
- - merge_requests:
- - :merge_request_diff
- - :merge_request_test
- - commit_statuses:
- - :commit
- - project_members:
+tree:
+ project:
+ - :issues
+ - :labels
+ - merge_requests:
+ - :merge_request_diff
+ - :merge_request_test
+ - commit_statuses:
+ - :commit
+ - project_members:
+ - :user
+ group_members:
- :user
+preloads:
+ merge_request_diff:
+ source_project:
+
included_attributes:
merge_requests:
- :id
diff --git a/spec/validators/named_ecdsa_key_validator_spec.rb b/spec/validators/named_ecdsa_key_validator_spec.rb
new file mode 100644
index 00000000000..044c5b84a56
--- /dev/null
+++ b/spec/validators/named_ecdsa_key_validator_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe NamedEcdsaKeyValidator do
+ let(:validator) { described_class.new(attributes: [:key]) }
+ let!(:domain) { build(:pages_domain) }
+
+ subject { validator.validate_each(domain, :key, value) }
+
+ context 'with empty value' do
+ let(:value) { nil }
+
+ it 'does not add any error if value is empty' do
+ subject
+
+ expect(domain.errors).to be_empty
+ end
+ end
+
+ shared_examples 'does not add any error' do
+ it 'does not add any error' do
+ expect(value).to be_present
+
+ subject
+
+ expect(domain.errors).to be_empty
+ end
+ end
+
+ context 'when key is not EC' do
+ let(:value) { attributes_for(:pages_domain)[:key] }
+
+ include_examples 'does not add any error'
+ end
+
+ context 'with ECDSA certificate with named curve' do
+ let(:value) { attributes_for(:pages_domain, :ecdsa)[:key] }
+
+ include_examples 'does not add any error'
+ end
+
+ context 'with ECDSA certificate with explicit curve params' do
+ let(:value) { attributes_for(:pages_domain, :explicit_ecdsa)[:key] }
+
+ it 'adds errors' do
+ expect(value).to be_present
+
+ subject
+
+ expect(domain.errors[:key]).not_to be_empty
+ end
+ end
+end
diff --git a/spec/workers/update_external_pull_requests_worker_spec.rb b/spec/workers/update_external_pull_requests_worker_spec.rb
new file mode 100644
index 00000000000..f3956bb3514
--- /dev/null
+++ b/spec/workers/update_external_pull_requests_worker_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe UpdateExternalPullRequestsWorker do
+ describe '#perform' do
+ set(:project) { create(:project, import_source: 'tanuki/repository') }
+ set(:user) { create(:user) }
+ let(:worker) { described_class.new }
+
+ before do
+ create(:external_pull_request,
+ project: project,
+ source_repository: project.import_source,
+ target_repository: project.import_source,
+ source_branch: 'feature-1',
+ target_branch: 'master')
+
+ create(:external_pull_request,
+ project: project,
+ source_repository: project.import_source,
+ target_repository: project.import_source,
+ source_branch: 'feature-1',
+ target_branch: 'develop')
+ end
+
+ subject { worker.perform(project.id, user.id, ref) }
+
+ context 'when ref is a branch' do
+ let(:ref) { 'refs/heads/feature-1' }
+ let(:create_pipeline_service) { instance_double(ExternalPullRequests::CreatePipelineService) }
+
+ it 'runs CreatePipelineService for each pull request matching the source branch and repository' do
+ expect(ExternalPullRequests::CreatePipelineService)
+ .to receive(:new)
+ .and_return(create_pipeline_service)
+ .twice
+ expect(create_pipeline_service).to receive(:execute).twice
+
+ subject
+ end
+ end
+
+ context 'when ref is not a branch' do
+ let(:ref) { 'refs/tags/v1.2.3' }
+
+ it 'does nothing' do
+ expect(ExternalPullRequests::CreatePipelineService).not_to receive(:new)
+
+ subject
+ end
+ end
+ end
+end