Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/admin/clusters/applications_controller_spec.rb149
-rw-r--r--spec/controllers/admin/clusters_controller_spec.rb540
-rw-r--r--spec/controllers/concerns/enforces_admin_authentication_spec.rb40
-rw-r--r--spec/controllers/groups/clusters_controller_spec.rb27
-rw-r--r--spec/controllers/projects/clusters_controller_spec.rb35
-rw-r--r--spec/controllers/projects/pipeline_schedules_controller_spec.rb3
-rw-r--r--spec/controllers/projects/stages_controller_spec.rb72
-rw-r--r--spec/factories/ci/pipeline_schedule_variables.rb1
-rw-r--r--spec/factories/clusters/clusters.rb2
-rw-r--r--spec/factories/merge_requests.rb6
-rw-r--r--spec/factories/projects.rb1
-rw-r--r--spec/features/dashboard/projects_spec.rb14
-rw-r--r--spec/features/dashboard/user_filters_projects_spec.rb221
-rw-r--r--spec/features/groups/members/leave_group_spec.rb26
-rw-r--r--spec/features/issuables/sorting_list_spec.rb28
-rw-r--r--spec/features/issues/filtered_search/dropdown_hint_spec.rb34
-rw-r--r--spec/features/merge_request/user_sees_merge_widget_spec.rb22
-rw-r--r--spec/features/merge_request/user_suggests_changes_on_diff_spec.rb22
-rw-r--r--spec/features/oauth_login_spec.rb2
-rw-r--r--spec/features/profiles/user_edit_preferences_spec.rb32
-rw-r--r--spec/features/profiles/user_edit_profile_spec.rb32
-rw-r--r--spec/features/projects/jobs_spec.rb6
-rw-r--r--spec/features/projects/members/group_member_cannot_leave_group_project_spec.rb9
-rw-r--r--spec/features/projects/members/member_leaves_project_spec.rb13
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb22
-rw-r--r--spec/features/projects/settings/repository_settings_spec.rb31
-rw-r--r--spec/finders/cluster_ancestors_finder_spec.rb29
-rw-r--r--spec/finders/issues_finder_spec.rb8
-rw-r--r--spec/finders/merge_requests_finder_spec.rb2
-rw-r--r--spec/fixtures/api/schemas/pipeline_schedule.json2
-rw-r--r--spec/fixtures/api/schemas/pipeline_schedule_variable.json10
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/release.json35
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/release/release_for_guest.json22
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/release/releases_for_guest.json4
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/release/tag_release.json12
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/releases.json4
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/tag.json2
-rw-r--r--spec/frontend/clusters/clusters_bundle_spec.js82
-rw-r--r--spec/frontend/clusters/components/application_row_spec.js158
-rw-r--r--spec/frontend/clusters/components/applications_spec.js2
-rw-r--r--spec/frontend/clusters/components/uninstall_application_button_spec.js32
-rw-r--r--spec/frontend/clusters/components/uninstall_application_confirmation_modal_spec.js56
-rw-r--r--spec/frontend/clusters/services/application_state_machine_spec.js54
-rw-r--r--spec/frontend/clusters/services/mock_data.js7
-rw-r--r--spec/frontend/clusters/stores/clusters_store_spec.js14
-rw-r--r--spec/frontend/environment.js7
-rw-r--r--spec/frontend/helpers/fixtures.js4
-rw-r--r--spec/frontend/operation_settings/components/external_dashboard_spec.js100
-rw-r--r--spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js103
-rw-r--r--spec/graphql/resolvers/base_resolver_spec.rb15
-rw-r--r--spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb8
-rw-r--r--spec/graphql/resolvers/issues_resolver_spec.rb7
-rw-r--r--spec/graphql/resolvers/project_resolver_spec.rb8
-rw-r--r--spec/graphql/types/base_field_spec.rb26
-rw-r--r--spec/helpers/projects_helper_spec.rb4
-rw-r--r--spec/helpers/storage_helper_spec.rb24
-rw-r--r--spec/initializers/secret_token_spec.rb4
-rw-r--r--spec/javascripts/diffs/components/diff_content_spec.js14
-rw-r--r--spec/javascripts/diffs/mock_data/diff_file.js1
-rw-r--r--spec/javascripts/diffs/store/actions_spec.js92
-rw-r--r--spec/javascripts/diffs/store/mutations_spec.js94
-rw-r--r--spec/javascripts/fixtures/.gitignore2
-rw-r--r--spec/javascripts/ide/stores/actions/file_spec.js19
-rw-r--r--spec/javascripts/monitoring/dashboard_spec.js106
-rw-r--r--spec/javascripts/notes/stores/actions_spec.js130
-rw-r--r--spec/javascripts/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js77
-rw-r--r--spec/javascripts/pipelines/graph/action_component_spec.js11
-rw-r--r--spec/javascripts/pipelines/graph/stage_column_component_spec.js49
-rw-r--r--spec/javascripts/pipelines/pipeline_triggerer_spec.js54
-rw-r--r--spec/javascripts/pipelines/pipeline_url_spec.js48
-rw-r--r--spec/javascripts/pipelines/pipelines_table_row_spec.js4
-rw-r--r--spec/javascripts/test_constants.js4
-rw-r--r--spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js13
-rw-r--r--spec/javascripts/vue_shared/components/markdown/suggestion_diff_header_spec.js75
-rw-r--r--spec/lib/api/helpers/related_resources_helpers_spec.rb34
-rw-r--r--spec/lib/banzai/filter/table_of_contents_filter_spec.rb5
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/importer_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/status/build/factory_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/status/stage/factory_spec.rb18
-rw-r--r--spec/lib/gitlab/ci/status/stage/play_manual_spec.rb74
-rw-r--r--spec/lib/gitlab/data_builder/deployment_spec.rb3
-rw-r--r--spec/lib/gitlab/discussions_diff/highlight_cache_spec.rb110
-rw-r--r--spec/lib/gitlab/git/object_pool_spec.rb6
-rw-r--r--spec/lib/gitlab/git/repository_cleaner_spec.rb71
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb39
-rw-r--r--spec/lib/gitlab/gitaly_client/cleanup_service_spec.rb10
-rw-r--r--spec/lib/gitlab/gitaly_client/ref_service_spec.rb11
-rw-r--r--spec/lib/gitlab/gitaly_client/repository_service_spec.rb30
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb3
-rw-r--r--spec/lib/gitlab/graphql/generic_tracing_spec.rb67
-rw-r--r--spec/lib/gitlab/graphql/tracing_spec.rb33
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml2
-rw-r--r--spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb30
-rw-r--r--spec/lib/gitlab/metrics/samplers/unicorn_sampler_spec.rb25
-rw-r--r--spec/lib/gitlab/metrics/system_spec.rb24
-rw-r--r--spec/lib/gitlab/namespaced_session_store_spec.rb22
-rw-r--r--spec/lib/gitlab/path_regex_spec.rb10
-rw-r--r--spec/lib/gitlab/session_spec.rb27
-rw-r--r--spec/mailers/notify_spec.rb4
-rw-r--r--spec/migrations/schedule_sync_issuables_state_id_where_nil_spec.rb57
-rw-r--r--spec/models/ci/build_spec.rb8
-rw-r--r--spec/models/ci/group_variable_spec.rb3
-rw-r--r--spec/models/ci/legacy_stage_spec.rb2
-rw-r--r--spec/models/ci/pipeline_schedule_spec.rb9
-rw-r--r--spec/models/ci/pipeline_schedule_variable_spec.rb2
-rw-r--r--spec/models/ci/pipeline_spec.rb34
-rw-r--r--spec/models/ci/pipeline_variable_spec.rb3
-rw-r--r--spec/models/ci/stage_spec.rb2
-rw-r--r--spec/models/ci/variable_spec.rb3
-rw-r--r--spec/models/clusters/applications/runner_spec.rb18
-rw-r--r--spec/models/clusters/cluster_spec.rb27
-rw-r--r--spec/models/clusters/platforms/kubernetes_spec.rb12
-rw-r--r--spec/models/commit_status_spec.rb4
-rw-r--r--spec/models/concerns/has_ref_spec.rb4
-rw-r--r--spec/models/issue_spec.rb23
-rw-r--r--spec/models/member_spec.rb10
-rw-r--r--spec/models/merge_request_spec.rb23
-rw-r--r--spec/models/namespace_spec.rb42
-rw-r--r--spec/models/note_diff_file_spec.rb27
-rw-r--r--spec/models/project_services/chat_message/deployment_message_spec.rb13
-rw-r--r--spec/models/project_spec.rb104
-rw-r--r--spec/models/project_statistics_spec.rb30
-rw-r--r--spec/models/release_spec.rb5
-rw-r--r--spec/models/remote_mirror_spec.rb16
-rw-r--r--spec/models/repository_spec.rb85
-rw-r--r--spec/models/user_preference_spec.rb6
-rw-r--r--spec/policies/clusters/cluster_policy_spec.rb16
-rw-r--r--spec/policies/clusters/instance_policy_spec.rb36
-rw-r--r--spec/policies/personal_snippet_policy_spec.rb31
-rw-r--r--spec/policies/project_policy_spec.rb4
-rw-r--r--spec/presenters/clusters/cluster_presenter_spec.rb12
-rw-r--r--spec/requests/api/discussions_spec.rb4
-rw-r--r--spec/requests/api/group_variables_spec.rb8
-rw-r--r--spec/requests/api/internal_spec.rb4
-rw-r--r--spec/requests/api/jobs_spec.rb4
-rw-r--r--spec/requests/api/members_spec.rb2
-rw-r--r--spec/requests/api/merge_requests_spec.rb27
-rw-r--r--spec/requests/api/pipeline_schedules_spec.rb7
-rw-r--r--spec/requests/api/pipelines_spec.rb9
-rw-r--r--spec/requests/api/project_clusters_spec.rb2
-rw-r--r--spec/requests/api/projects_spec.rb3
-rw-r--r--spec/requests/api/releases_spec.rb49
-rw-r--r--spec/requests/api/tags_spec.rb2
-rw-r--r--spec/requests/api/variables_spec.rb8
-rw-r--r--spec/routing/uploads_routing_spec.rb22
-rw-r--r--spec/serializers/pipeline_entity_spec.rb4
-rw-r--r--spec/serializers/pipeline_serializer_spec.rb2
-rw-r--r--spec/serializers/stage_entity_spec.rb28
-rw-r--r--spec/serializers/stage_serializer_spec.rb31
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb34
-rw-r--r--spec/services/ci/play_manual_stage_service_spec.rb79
-rw-r--r--spec/services/clusters/applications/create_service_spec.rb2
-rw-r--r--spec/services/clusters/build_service_spec.rb8
-rw-r--r--spec/services/clusters/refresh_service_spec.rb6
-rw-r--r--spec/services/lfs/file_transformer_spec.rb19
-rw-r--r--spec/services/merge_requests/create_service_spec.rb6
-rw-r--r--spec/services/merge_requests/rebase_service_spec.rb18
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb22
-rw-r--r--spec/services/projects/cleanup_service_spec.rb89
-rw-r--r--spec/services/projects/housekeeping_service_spec.rb3
-rw-r--r--spec/services/system_hooks_service_spec.rb12
-rw-r--r--spec/services/users/update_service_spec.rb9
-rw-r--r--spec/spec_helper.rb2
-rw-r--r--spec/support/capybara.rb11
-rw-r--r--spec/support/helpers/features/notes_helpers.rb12
-rw-r--r--spec/support/helpers/filtered_search_helpers.rb17
-rw-r--r--spec/support/helpers/javascript_fixtures_helpers.rb2
-rw-r--r--spec/support/helpers/mobile_helpers.rb2
-rw-r--r--spec/support/helpers/select2_helper.rb4
-rw-r--r--spec/support/shared_context/policies/project_policy_shared_context.rb3
-rw-r--r--spec/support/shared_examples/application_setting_examples.rb4
-rw-r--r--spec/support/shared_examples/ci/stage_shared_examples.rb27
-rw-r--r--spec/support/shared_examples/ci_trace_shared_examples.rb39
-rw-r--r--spec/support/shared_examples/controllers/variables_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/models/ci_variable_shared_examples.rb29
-rw-r--r--spec/support/shared_examples/models/member_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb7
-rw-r--r--spec/support/shared_examples/requests/api/discussions.rb24
-rw-r--r--spec/uploaders/import_export_uploader_spec.rb32
-rw-r--r--spec/workers/cluster_configure_worker_spec.rb10
-rw-r--r--spec/workers/git_garbage_collect_worker_spec.rb13
183 files changed, 4394 insertions, 755 deletions
diff --git a/spec/controllers/admin/clusters/applications_controller_spec.rb b/spec/controllers/admin/clusters/applications_controller_spec.rb
new file mode 100644
index 00000000000..76f261e7d3f
--- /dev/null
+++ b/spec/controllers/admin/clusters/applications_controller_spec.rb
@@ -0,0 +1,149 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Admin::Clusters::ApplicationsController do
+ include AccessMatchersForController
+
+ def current_application
+ Clusters::Cluster::APPLICATIONS[application]
+ end
+
+ shared_examples 'a secure endpoint' do
+ it { expect { subject }.to be_allowed_for(:admin) }
+ it { expect { subject }.to be_denied_for(:user) }
+ it { expect { subject }.to be_denied_for(:external) }
+
+ context 'when instance clusters are disabled' do
+ before do
+ stub_feature_flags(instance_clusters: false)
+ end
+
+ it 'returns 404' do
+ is_expected.to have_http_status(:not_found)
+ end
+ end
+ end
+
+ let(:cluster) { create(:cluster, :instance, :provided_by_gcp) }
+
+ describe 'POST create' do
+ subject do
+ post :create, params: params
+ end
+
+ let(:application) { 'helm' }
+ let(:params) { { application: application, id: cluster.id } }
+
+ describe 'functionality' do
+ let(:admin) { create(:admin) }
+
+ before do
+ sign_in(admin)
+ end
+
+ it 'schedule an application installation' do
+ expect(ClusterInstallAppWorker).to receive(:perform_async).with(application, anything).once
+
+ expect { subject }.to change { current_application.count }
+ expect(response).to have_http_status(:no_content)
+ expect(cluster.application_helm).to be_scheduled
+ end
+
+ context 'when cluster do not exists' do
+ before do
+ cluster.destroy!
+ end
+
+ it 'return 404' do
+ expect { subject }.not_to change { current_application.count }
+ expect(response).to have_http_status(:not_found)
+ end
+ end
+
+ context 'when application is unknown' do
+ let(:application) { 'unkwnown-app' }
+
+ it 'return 404' do
+ is_expected.to have_http_status(:not_found)
+ end
+ end
+
+ context 'when application is already installing' do
+ before do
+ create(:clusters_applications_helm, :installing, cluster: cluster)
+ end
+
+ it 'returns 400' do
+ is_expected.to have_http_status(:bad_request)
+ end
+ end
+ end
+
+ describe 'security' do
+ before do
+ allow(ClusterInstallAppWorker).to receive(:perform_async)
+ end
+
+ it_behaves_like 'a secure endpoint'
+ end
+ end
+
+ describe 'PATCH update' do
+ subject do
+ patch :update, params: params
+ end
+
+ let!(:application) { create(:clusters_applications_cert_managers, :installed, cluster: cluster) }
+ let(:application_name) { application.name }
+ let(:params) { { application: application_name, id: cluster.id, email: "new-email@example.com" } }
+
+ describe 'functionality' do
+ let(:admin) { create(:admin) }
+
+ before do
+ sign_in(admin)
+ end
+
+ context "when cluster and app exists" do
+ it "schedules an application update" do
+ expect(ClusterPatchAppWorker).to receive(:perform_async).with(application.name, anything).once
+
+ is_expected.to have_http_status(:no_content)
+
+ expect(cluster.application_cert_manager).to be_scheduled
+ end
+ end
+
+ context 'when cluster do not exists' do
+ before do
+ cluster.destroy!
+ end
+
+ it { is_expected.to have_http_status(:not_found) }
+ end
+
+ context 'when application is unknown' do
+ let(:application_name) { 'unkwnown-app' }
+
+ it { is_expected.to have_http_status(:not_found) }
+ end
+
+ context 'when application is already scheduled' do
+ before do
+ application.make_scheduled!
+ end
+
+ it { is_expected.to have_http_status(:bad_request) }
+ end
+ end
+
+ describe 'security' do
+ before do
+ allow(ClusterPatchAppWorker).to receive(:perform_async)
+ end
+
+ it_behaves_like 'a secure endpoint'
+ end
+ end
+end
diff --git a/spec/controllers/admin/clusters_controller_spec.rb b/spec/controllers/admin/clusters_controller_spec.rb
new file mode 100644
index 00000000000..7b77cb186a4
--- /dev/null
+++ b/spec/controllers/admin/clusters_controller_spec.rb
@@ -0,0 +1,540 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Admin::ClustersController do
+ include AccessMatchersForController
+ include GoogleApi::CloudPlatformHelpers
+
+ let(:admin) { create(:admin) }
+
+ before do
+ sign_in(admin)
+ end
+
+ describe 'GET #index' do
+ def get_index(params = {})
+ get :index, params: params
+ end
+
+ context 'when feature flag is not enabled' do
+ before do
+ stub_feature_flags(instance_clusters: false)
+ end
+
+ it 'responds with not found' do
+ get_index
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+
+ context 'when feature flag is enabled' do
+ before do
+ stub_feature_flags(instance_clusters: true)
+ end
+
+ describe 'functionality' do
+ context 'when instance has one or more clusters' do
+ let!(:enabled_cluster) do
+ create(:cluster, :provided_by_gcp, :instance)
+ end
+
+ let!(:disabled_cluster) do
+ create(:cluster, :disabled, :provided_by_gcp, :production_environment, :instance)
+ end
+
+ it 'lists available clusters' do
+ get_index
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:index)
+ expect(assigns(:clusters)).to match_array([enabled_cluster, disabled_cluster])
+ end
+
+ context 'when page is specified' do
+ let(:last_page) { Clusters::Cluster.instance_type.page.total_pages }
+
+ before do
+ allow(Clusters::Cluster).to receive(:paginates_per).and_return(1)
+ create_list(:cluster, 2, :provided_by_gcp, :production_environment, :instance)
+ end
+
+ it 'redirects to the page' do
+ get_index(page: last_page)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(assigns(:clusters).current_page).to eq(last_page)
+ end
+ end
+ end
+
+ context 'when instance does not have a cluster' do
+ it 'returns an empty state page' do
+ get_index
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:index, partial: :empty_state)
+ expect(assigns(:clusters)).to eq([])
+ end
+ end
+ end
+ end
+
+ describe 'security' do
+ let(:cluster) { create(:cluster, :provided_by_gcp, :instance) }
+
+ it { expect { get_index }.to be_allowed_for(:admin) }
+ it { expect { get_index }.to be_denied_for(:user) }
+ it { expect { get_index }.to be_denied_for(:external) }
+ end
+ end
+
+ describe 'GET #new' do
+ def get_new
+ get :new
+ end
+
+ describe 'functionality for new cluster' do
+ context 'when omniauth has been configured' do
+ let(:key) { 'secret-key' }
+ let(:session_key_for_redirect_uri) do
+ GoogleApi::CloudPlatform::Client.session_key_for_redirect_uri(key)
+ end
+
+ before do
+ allow(SecureRandom).to receive(:hex).and_return(key)
+ end
+
+ it 'has authorize_url' do
+ get_new
+
+ expect(assigns(:authorize_url)).to include(key)
+ expect(session[session_key_for_redirect_uri]).to eq(new_admin_cluster_path)
+ end
+ end
+
+ context 'when omniauth has not configured' do
+ before do
+ stub_omniauth_setting(providers: [])
+ end
+
+ it 'does not have authorize_url' do
+ get_new
+
+ expect(assigns(:authorize_url)).to be_nil
+ end
+ end
+
+ context 'when access token is valid' do
+ before do
+ stub_google_api_validate_token
+ end
+
+ it 'has new object' do
+ get_new
+
+ expect(assigns(:gcp_cluster)).to be_an_instance_of(Clusters::ClusterPresenter)
+ end
+ end
+
+ context 'when access token is expired' do
+ before do
+ stub_google_api_expired_token
+ end
+
+ it { expect(@valid_gcp_token).to be_falsey }
+ end
+
+ context 'when access token is not stored in session' do
+ it { expect(@valid_gcp_token).to be_falsey }
+ end
+ end
+
+ describe 'functionality for existing cluster' do
+ it 'has new object' do
+ get_new
+
+ expect(assigns(:user_cluster)).to be_an_instance_of(Clusters::ClusterPresenter)
+ end
+ end
+
+ describe 'security' do
+ it { expect { get_new }.to be_allowed_for(:admin) }
+ it { expect { get_new }.to be_denied_for(:user) }
+ it { expect { get_new }.to be_denied_for(:external) }
+ end
+ end
+
+ describe 'POST #create_gcp' do
+ let(:legacy_abac_param) { 'true' }
+ let(:params) do
+ {
+ cluster: {
+ name: 'new-cluster',
+ provider_gcp_attributes: {
+ gcp_project_id: 'gcp-project-12345',
+ legacy_abac: legacy_abac_param
+ }
+ }
+ }
+ end
+
+ def post_create_gcp
+ post :create_gcp, params: params
+ end
+
+ describe 'functionality' do
+ context 'when access token is valid' do
+ before do
+ stub_google_api_validate_token
+ end
+
+ it 'creates a new cluster' do
+ expect(ClusterProvisionWorker).to receive(:perform_async)
+ expect { post_create_gcp }.to change { Clusters::Cluster.count }
+ .and change { Clusters::Providers::Gcp.count }
+
+ cluster = Clusters::Cluster.instance_type.first
+
+ expect(response).to redirect_to(admin_cluster_path(cluster))
+ expect(cluster).to be_gcp
+ expect(cluster).to be_kubernetes
+ expect(cluster.provider_gcp).to be_legacy_abac
+ end
+
+ context 'when legacy_abac param is false' do
+ let(:legacy_abac_param) { 'false' }
+
+ it 'creates a new cluster with legacy_abac_disabled' do
+ expect(ClusterProvisionWorker).to receive(:perform_async)
+ expect { post_create_gcp }.to change { Clusters::Cluster.count }
+ .and change { Clusters::Providers::Gcp.count }
+ expect(Clusters::Cluster.instance_type.first.provider_gcp).not_to be_legacy_abac
+ end
+ end
+ end
+
+ context 'when access token is expired' do
+ before do
+ stub_google_api_expired_token
+ end
+
+ it { expect(@valid_gcp_token).to be_falsey }
+ end
+
+ context 'when access token is not stored in session' do
+ it { expect(@valid_gcp_token).to be_falsey }
+ end
+ end
+
+ describe 'security' do
+ before do
+ allow_any_instance_of(described_class)
+ .to receive(:token_in_session).and_return('token')
+ allow_any_instance_of(described_class)
+ .to receive(:expires_at_in_session).and_return(1.hour.since.to_i.to_s)
+ allow_any_instance_of(GoogleApi::CloudPlatform::Client)
+ .to receive(:projects_zones_clusters_create) do
+ OpenStruct.new(
+ self_link: 'projects/gcp-project-12345/zones/us-central1-a/operations/ope-123',
+ status: 'RUNNING'
+ )
+ end
+
+ allow(WaitForClusterCreationWorker).to receive(:perform_in).and_return(nil)
+ end
+
+ it { expect { post_create_gcp }.to be_allowed_for(:admin) }
+ it { expect { post_create_gcp }.to be_denied_for(:user) }
+ it { expect { post_create_gcp }.to be_denied_for(:external) }
+ end
+ end
+
+ describe 'POST #create_user' do
+ let(:params) do
+ {
+ cluster: {
+ name: 'new-cluster',
+ platform_kubernetes_attributes: {
+ api_url: 'http://my-url',
+ token: 'test'
+ }
+ }
+ }
+ end
+
+ def post_create_user
+ post :create_user, params: params
+ end
+
+ describe 'functionality' do
+ context 'when creates a cluster' do
+ it 'creates a new cluster' do
+ expect(ClusterProvisionWorker).to receive(:perform_async)
+
+ expect { post_create_user }.to change { Clusters::Cluster.count }
+ .and change { Clusters::Platforms::Kubernetes.count }
+
+ cluster = Clusters::Cluster.instance_type.first
+
+ expect(response).to redirect_to(admin_cluster_path(cluster))
+ expect(cluster).to be_user
+ expect(cluster).to be_kubernetes
+ end
+ end
+
+ context 'when creates a RBAC-enabled cluster' do
+ let(:params) do
+ {
+ cluster: {
+ name: 'new-cluster',
+ platform_kubernetes_attributes: {
+ api_url: 'http://my-url',
+ token: 'test',
+ authorization_type: 'rbac'
+ }
+ }
+ }
+ end
+
+ it 'creates a new cluster' do
+ expect(ClusterProvisionWorker).to receive(:perform_async)
+
+ expect { post_create_user }.to change { Clusters::Cluster.count }
+ .and change { Clusters::Platforms::Kubernetes.count }
+
+ cluster = Clusters::Cluster.instance_type.first
+
+ expect(response).to redirect_to(admin_cluster_path(cluster))
+ expect(cluster).to be_user
+ expect(cluster).to be_kubernetes
+ expect(cluster).to be_platform_kubernetes_rbac
+ end
+ end
+ end
+
+ describe 'security' do
+ it { expect { post_create_user }.to be_allowed_for(:admin) }
+ it { expect { post_create_user }.to be_denied_for(:user) }
+ it { expect { post_create_user }.to be_denied_for(:external) }
+ end
+ end
+
+ describe 'GET #cluster_status' do
+ let(:cluster) { create(:cluster, :providing_by_gcp, :instance) }
+
+ def get_cluster_status
+ get :cluster_status,
+ params: {
+ id: cluster
+ },
+ format: :json
+ end
+
+ describe 'functionality' do
+ it 'responds with matching schema' do
+ get_cluster_status
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('cluster_status')
+ end
+
+ it 'invokes schedule_status_update on each application' do
+ expect_any_instance_of(Clusters::Applications::Ingress).to receive(:schedule_status_update)
+
+ get_cluster_status
+ end
+ end
+
+ describe 'security' do
+ it { expect { get_cluster_status }.to be_allowed_for(:admin) }
+ it { expect { get_cluster_status }.to be_denied_for(:user) }
+ it { expect { get_cluster_status }.to be_denied_for(:external) }
+ end
+ end
+
+ describe 'GET #show' do
+ let(:cluster) { create(:cluster, :provided_by_gcp, :instance) }
+
+ def get_show
+ get :show,
+ params: {
+ id: cluster
+ }
+ end
+
+ describe 'functionality' do
+ it 'responds successfully' do
+ get_show
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(assigns(:cluster)).to eq(cluster)
+ end
+ end
+
+ describe 'security' do
+ it { expect { get_show }.to be_allowed_for(:admin) }
+ it { expect { get_show }.to be_denied_for(:user) }
+ it { expect { get_show }.to be_denied_for(:external) }
+ end
+ end
+
+ describe 'PUT #update' do
+ def put_update(format: :html)
+ put :update, params: params.merge(
+ id: cluster,
+ format: format
+ )
+ end
+
+ let(:cluster) { create(:cluster, :provided_by_user, :instance) }
+ let(:domain) { 'test-domain.com' }
+
+ let(:params) do
+ {
+ cluster: {
+ enabled: false,
+ name: 'my-new-cluster-name',
+ base_domain: domain
+ }
+ }
+ end
+
+ it 'updates and redirects back to show page' do
+ put_update
+
+ cluster.reload
+ expect(response).to redirect_to(admin_cluster_path(cluster))
+ expect(flash[:notice]).to eq('Kubernetes cluster was successfully updated.')
+ expect(cluster.enabled).to be_falsey
+ expect(cluster.name).to eq('my-new-cluster-name')
+ expect(cluster.domain).to eq('test-domain.com')
+ end
+
+ context 'when domain is invalid' do
+ let(:domain) { 'http://not-a-valid-domain' }
+
+ it 'does not update cluster attributes' do
+ put_update
+
+ cluster.reload
+ expect(response).to render_template(:show)
+ expect(cluster.name).not_to eq('my-new-cluster-name')
+ expect(cluster.domain).not_to eq('test-domain.com')
+ end
+ end
+
+ context 'when format is json' do
+ context 'when changing parameters' do
+ context 'when valid parameters are used' do
+ let(:params) do
+ {
+ cluster: {
+ enabled: false,
+ name: 'my-new-cluster-name',
+ domain: domain
+ }
+ }
+ end
+
+ it 'updates and redirects back to show page' do
+ put_update(format: :json)
+
+ cluster.reload
+ expect(response).to have_http_status(:no_content)
+ expect(cluster.enabled).to be_falsey
+ expect(cluster.name).to eq('my-new-cluster-name')
+ end
+ end
+
+ context 'when invalid parameters are used' do
+ let(:params) do
+ {
+ cluster: {
+ enabled: false,
+ name: ''
+ }
+ }
+ end
+
+ it 'rejects changes' do
+ put_update(format: :json)
+
+ expect(response).to have_http_status(:bad_request)
+ end
+ end
+ end
+ end
+
+ describe 'security' do
+ set(:cluster) { create(:cluster, :provided_by_gcp, :instance) }
+
+ it { expect { put_update }.to be_allowed_for(:admin) }
+ it { expect { put_update }.to be_denied_for(:user) }
+ it { expect { put_update }.to be_denied_for(:external) }
+ end
+ end
+
+ describe 'DELETE #destroy' do
+ let!(:cluster) { create(:cluster, :provided_by_gcp, :production_environment, :instance) }
+
+ def delete_destroy
+ delete :destroy,
+ params: {
+ id: cluster
+ }
+ end
+
+ describe 'functionality' do
+ context 'when cluster is provided by GCP' do
+ context 'when cluster is created' do
+ it 'destroys and redirects back to clusters list' do
+ expect { delete_destroy }
+ .to change { Clusters::Cluster.count }.by(-1)
+ .and change { Clusters::Platforms::Kubernetes.count }.by(-1)
+ .and change { Clusters::Providers::Gcp.count }.by(-1)
+
+ expect(response).to redirect_to(admin_clusters_path)
+ expect(flash[:notice]).to eq('Kubernetes cluster integration was successfully removed.')
+ end
+ end
+
+ context 'when cluster is being created' do
+ let!(:cluster) { create(:cluster, :providing_by_gcp, :production_environment, :instance) }
+
+ it 'destroys and redirects back to clusters list' do
+ expect { delete_destroy }
+ .to change { Clusters::Cluster.count }.by(-1)
+ .and change { Clusters::Providers::Gcp.count }.by(-1)
+
+ expect(response).to redirect_to(admin_clusters_path)
+ expect(flash[:notice]).to eq('Kubernetes cluster integration was successfully removed.')
+ end
+ end
+ end
+
+ context 'when cluster is provided by user' do
+ let!(:cluster) { create(:cluster, :provided_by_user, :production_environment, :instance) }
+
+ it 'destroys and redirects back to clusters list' do
+ expect { delete_destroy }
+ .to change { Clusters::Cluster.count }.by(-1)
+ .and change { Clusters::Platforms::Kubernetes.count }.by(-1)
+ .and change { Clusters::Providers::Gcp.count }.by(0)
+
+ expect(response).to redirect_to(admin_clusters_path)
+ expect(flash[:notice]).to eq('Kubernetes cluster integration was successfully removed.')
+ end
+ end
+ end
+
+ describe 'security' do
+ set(:cluster) { create(:cluster, :provided_by_gcp, :production_environment, :instance) }
+
+ it { expect { delete_destroy }.to be_allowed_for(:admin) }
+ it { expect { delete_destroy }.to be_denied_for(:user) }
+ it { expect { delete_destroy }.to be_denied_for(:external) }
+ end
+ end
+end
diff --git a/spec/controllers/concerns/enforces_admin_authentication_spec.rb b/spec/controllers/concerns/enforces_admin_authentication_spec.rb
new file mode 100644
index 00000000000..e6a6702fdea
--- /dev/null
+++ b/spec/controllers/concerns/enforces_admin_authentication_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe EnforcesAdminAuthentication do
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ controller(ApplicationController) do
+ # `described_class` is not available in this context
+ include EnforcesAdminAuthentication # rubocop:disable RSpec/DescribedClass
+
+ def index
+ head :ok
+ end
+ end
+
+ describe 'authenticate_admin!' do
+ context 'as an admin' do
+ let(:user) { create(:admin) }
+
+ it 'renders ok' do
+ get :index
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+ end
+
+ context 'as a user' do
+ it 'renders a 404' do
+ get :index
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+end
diff --git a/spec/controllers/groups/clusters_controller_spec.rb b/spec/controllers/groups/clusters_controller_spec.rb
index e5180ec5c5c..7349cb7094c 100644
--- a/spec/controllers/groups/clusters_controller_spec.rb
+++ b/spec/controllers/groups/clusters_controller_spec.rb
@@ -189,6 +189,7 @@ describe Groups::ClustersController do
{
cluster: {
name: 'new-cluster',
+ managed: '1',
provider_gcp_attributes: {
gcp_project_id: 'gcp-project-12345',
legacy_abac: legacy_abac_param
@@ -218,6 +219,7 @@ describe Groups::ClustersController do
expect(cluster).to be_gcp
expect(cluster).to be_kubernetes
expect(cluster.provider_gcp).to be_legacy_abac
+ expect(cluster).to be_managed
end
context 'when legacy_abac param is false' do
@@ -278,6 +280,7 @@ describe Groups::ClustersController do
{
cluster: {
name: 'new-cluster',
+ managed: '1',
platform_kubernetes_attributes: {
api_url: 'http://my-url',
token: 'test'
@@ -303,6 +306,7 @@ describe Groups::ClustersController do
expect(response).to redirect_to(group_cluster_path(group, cluster))
expect(cluster).to be_user
expect(cluster).to be_kubernetes
+ expect(cluster).to be_managed
end
end
@@ -334,6 +338,29 @@ describe Groups::ClustersController do
expect(cluster).to be_platform_kubernetes_rbac
end
end
+
+ context 'when creates a user-managed cluster' do
+ let(:params) do
+ {
+ cluster: {
+ name: 'new-cluster',
+ managed: '0',
+ platform_kubernetes_attributes: {
+ api_url: 'http://my-url',
+ token: 'test',
+ authorization_type: 'rbac'
+ }
+ }
+ }
+ end
+
+ it 'creates a new user-managed cluster' do
+ go
+
+ cluster = group.clusters.first
+ expect(cluster.managed?).to be_falsy
+ end
+ end
end
describe 'security' do
diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb
index d94c18ddc02..8d37bd82d21 100644
--- a/spec/controllers/projects/clusters_controller_spec.rb
+++ b/spec/controllers/projects/clusters_controller_spec.rb
@@ -165,6 +165,7 @@ describe Projects::ClustersController do
{
cluster: {
name: 'new-cluster',
+ managed: '1',
provider_gcp_attributes: {
gcp_project_id: 'gcp-project-12345',
legacy_abac: legacy_abac_param
@@ -191,6 +192,7 @@ describe Projects::ClustersController do
expect(project.clusters.first).to be_gcp
expect(project.clusters.first).to be_kubernetes
expect(project.clusters.first.provider_gcp).to be_legacy_abac
+ expect(project.clusters.first.managed?).to be_truthy
end
context 'when legacy_abac param is false' do
@@ -251,6 +253,7 @@ describe Projects::ClustersController do
{
cluster: {
name: 'new-cluster',
+ managed: '1',
platform_kubernetes_attributes: {
api_url: 'http://my-url',
token: 'test',
@@ -302,9 +305,35 @@ describe Projects::ClustersController do
expect(response).to redirect_to(project_cluster_path(project, project.clusters.first))
- expect(project.clusters.first).to be_user
- expect(project.clusters.first).to be_kubernetes
- expect(project.clusters.first).to be_platform_kubernetes_rbac
+ cluster = project.clusters.first
+
+ expect(cluster).to be_user
+ expect(cluster).to be_kubernetes
+ expect(cluster).to be_platform_kubernetes_rbac
+ end
+ end
+
+ context 'when creates a user-managed cluster' do
+ let(:params) do
+ {
+ cluster: {
+ name: 'new-cluster',
+ managed: '0',
+ platform_kubernetes_attributes: {
+ api_url: 'http://my-url',
+ token: 'test',
+ namespace: 'aaa',
+ authorization_type: 'rbac'
+ }
+ }
+ }
+ end
+
+ it 'creates a new user-managed cluster' do
+ go
+ cluster = project.clusters.first
+
+ expect(cluster.managed?).to be_falsy
end
end
end
diff --git a/spec/controllers/projects/pipeline_schedules_controller_spec.rb b/spec/controllers/projects/pipeline_schedules_controller_spec.rb
index eb8983a7633..850ef9c92fb 100644
--- a/spec/controllers/projects/pipeline_schedules_controller_spec.rb
+++ b/spec/controllers/projects/pipeline_schedules_controller_spec.rb
@@ -91,7 +91,7 @@ describe Projects::PipelineSchedulesController do
context 'when variables_attributes has one variable' do
let(:schedule) do
basic_param.merge({
- variables_attributes: [{ key: 'AAA', secret_value: 'AAA123' }]
+ variables_attributes: [{ key: 'AAA', secret_value: 'AAA123', variable_type: 'file' }]
})
end
@@ -105,6 +105,7 @@ describe Projects::PipelineSchedulesController do
Ci::PipelineScheduleVariable.last.tap do |v|
expect(v.key).to eq("AAA")
expect(v.value).to eq("AAA123")
+ expect(v.variable_type).to eq("file")
end
end
end
diff --git a/spec/controllers/projects/stages_controller_spec.rb b/spec/controllers/projects/stages_controller_spec.rb
new file mode 100644
index 00000000000..a91e3523fd7
--- /dev/null
+++ b/spec/controllers/projects/stages_controller_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::StagesController do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :repository) }
+
+ before do
+ sign_in(user)
+ end
+
+ describe 'POST #play_manual.json' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+ let(:stage_name) { 'test' }
+
+ before do
+ create_manual_build(pipeline, 'test', 'rspec 1/2')
+ create_manual_build(pipeline, 'test', 'rspec 2/2')
+
+ pipeline.reload
+ end
+
+ context 'when user does not have access' do
+ it 'returns not authorized' do
+ play_manual_stage!
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+
+ context 'when user has access' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ context 'when the stage does not exists' do
+ let(:stage_name) { 'deploy' }
+
+ it 'fails to play all manual' do
+ play_manual_stage!
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when the stage exists' do
+ it 'starts all manual jobs' do
+ expect(pipeline.builds.manual.count).to eq(2)
+
+ play_manual_stage!
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(pipeline.builds.manual.count).to eq(0)
+ end
+ end
+ end
+
+ def play_manual_stage!
+ post :play_manual, params: {
+ namespace_id: project.namespace,
+ project_id: project,
+ id: pipeline.id,
+ stage_name: stage_name
+ }, format: :json
+ end
+
+ def create_manual_build(pipeline, stage, name)
+ create(:ci_build, :manual, pipeline: pipeline, stage: stage, name: name)
+ end
+ end
+end
diff --git a/spec/factories/ci/pipeline_schedule_variables.rb b/spec/factories/ci/pipeline_schedule_variables.rb
index 8d29118e310..c85b97fbfc7 100644
--- a/spec/factories/ci/pipeline_schedule_variables.rb
+++ b/spec/factories/ci/pipeline_schedule_variables.rb
@@ -2,6 +2,7 @@ FactoryBot.define do
factory :ci_pipeline_schedule_variable, class: Ci::PipelineScheduleVariable do
sequence(:key) { |n| "VARIABLE_#{n}" }
value 'VARIABLE_VALUE'
+ variable_type 'env_var'
pipeline_schedule factory: :ci_pipeline_schedule
end
diff --git a/spec/factories/clusters/clusters.rb b/spec/factories/clusters/clusters.rb
index 97405ec7c58..6eb0194b710 100644
--- a/spec/factories/clusters/clusters.rb
+++ b/spec/factories/clusters/clusters.rb
@@ -65,7 +65,7 @@ FactoryBot.define do
domain 'example.com'
end
- trait :user_managed do
+ trait :not_managed do
managed false
end
end
diff --git a/spec/factories/merge_requests.rb b/spec/factories/merge_requests.rb
index abf0e6bccb7..e8df5094b83 100644
--- a/spec/factories/merge_requests.rb
+++ b/spec/factories/merge_requests.rb
@@ -119,7 +119,7 @@ FactoryBot.define do
trait :with_legacy_detached_merge_request_pipeline do
after(:create) do |merge_request|
- merge_request.merge_request_pipelines << create(:ci_pipeline,
+ merge_request.pipelines_for_merge_request << create(:ci_pipeline,
source: :merge_request_event,
merge_request: merge_request,
project: merge_request.source_project,
@@ -130,7 +130,7 @@ FactoryBot.define do
trait :with_detached_merge_request_pipeline do
after(:create) do |merge_request|
- merge_request.merge_request_pipelines << create(:ci_pipeline,
+ merge_request.pipelines_for_merge_request << create(:ci_pipeline,
source: :merge_request_event,
merge_request: merge_request,
project: merge_request.source_project,
@@ -147,7 +147,7 @@ FactoryBot.define do
end
after(:create) do |merge_request, evaluator|
- merge_request.merge_request_pipelines << create(:ci_pipeline,
+ merge_request.pipelines_for_merge_request << create(:ci_pipeline,
source: :merge_request_event,
merge_request: merge_request,
project: merge_request.source_project,
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index ab185ab3972..743ec322885 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -260,6 +260,7 @@ FactoryBot.define do
trait(:merge_requests_enabled) { merge_requests_access_level ProjectFeature::ENABLED }
trait(:merge_requests_disabled) { merge_requests_access_level ProjectFeature::DISABLED }
trait(:merge_requests_private) { merge_requests_access_level ProjectFeature::PRIVATE }
+ trait(:merge_requests_public) { merge_requests_access_level ProjectFeature::PUBLIC }
trait(:repository_enabled) { repository_access_level ProjectFeature::ENABLED }
trait(:repository_disabled) { repository_access_level ProjectFeature::DISABLED }
trait(:repository_private) { repository_access_level ProjectFeature::PRIVATE }
diff --git a/spec/features/dashboard/projects_spec.rb b/spec/features/dashboard/projects_spec.rb
index 9d1c1e3acc7..d1ed64cce7f 100644
--- a/spec/features/dashboard/projects_spec.rb
+++ b/spec/features/dashboard/projects_spec.rb
@@ -112,6 +112,14 @@ describe 'Dashboard Projects' do
expect(first('.project-row')).to have_content(project_with_most_stars.title)
end
+
+ it 'shows tabs to filter by all projects or personal' do
+ visit dashboard_projects_path
+ segmented_button = page.find('.filtered-search-nav .button-filter-group')
+
+ expect(segmented_button).to have_content 'All'
+ expect(segmented_button).to have_content 'Personal'
+ end
end
context 'when on Starred projects tab', :js do
@@ -134,6 +142,12 @@ describe 'Dashboard Projects' do
expect(find('.nav-links li:nth-child(1) .badge-pill')).to have_content(1)
expect(find('.nav-links li:nth-child(2) .badge-pill')).to have_content(1)
end
+
+ it 'does not show tabs to filter by all projects or personal' do
+ visit(starred_dashboard_projects_path)
+
+ expect(page).not_to have_content '.filtered-search-nav'
+ end
end
describe 'with a pipeline', :clean_gitlab_redis_shared_state do
diff --git a/spec/features/dashboard/user_filters_projects_spec.rb b/spec/features/dashboard/user_filters_projects_spec.rb
index cc86114e436..5b17c49db2d 100644
--- a/spec/features/dashboard/user_filters_projects_spec.rb
+++ b/spec/features/dashboard/user_filters_projects_spec.rb
@@ -2,9 +2,9 @@ require 'spec_helper'
describe 'Dashboard > User filters projects' do
let(:user) { create(:user) }
- let(:project) { create(:project, name: 'Victorialand', namespace: user.namespace) }
+ let(:project) { create(:project, name: 'Victorialand', namespace: user.namespace, created_at: 2.seconds.ago, updated_at: 2.seconds.ago) }
let(:user2) { create(:user) }
- let(:project2) { create(:project, name: 'Treasure', namespace: user2.namespace) }
+ let(:project2) { create(:project, name: 'Treasure', namespace: user2.namespace, created_at: 1.second.ago, updated_at: 1.second.ago) }
before do
project.add_maintainer(user)
@@ -14,6 +14,7 @@ describe 'Dashboard > User filters projects' do
describe 'filtering personal projects' do
before do
+ stub_feature_flags(project_list_filter_bar: false)
project2.add_developer(user)
visit dashboard_projects_path
@@ -30,6 +31,7 @@ describe 'Dashboard > User filters projects' do
describe 'filtering starred projects', :js do
before do
+ stub_feature_flags(project_list_filter_bar: false)
user.toggle_star(project)
visit dashboard_projects_path
@@ -42,4 +44,219 @@ describe 'Dashboard > User filters projects' do
expect(page).not_to have_content('You don\'t have starred projects yet')
end
end
+
+ describe 'without search bar', :js do
+ before do
+ stub_feature_flags(project_list_filter_bar: false)
+
+ project2.add_developer(user)
+ visit dashboard_projects_path
+ end
+
+ it 'autocompletes searches upon typing', :js do
+ expect(page).to have_content 'Victorialand'
+ expect(page).to have_content 'Treasure'
+
+ fill_in 'project-filter-form-field', with: 'Lord beerus\n'
+
+ expect(page).not_to have_content 'Victorialand'
+ expect(page).not_to have_content 'Treasure'
+ end
+ end
+
+ describe 'with search bar', :js do
+ before do
+ stub_feature_flags(project_list_filter_bar: true)
+
+ project2.add_developer(user)
+ visit dashboard_projects_path
+ end
+
+ # TODO: move these helpers somewhere more useful
+ def click_sort_direction
+ page.find('.filtered-search-block #filtered-search-sorting-dropdown .reverse-sort-btn').click
+ end
+
+ def select_dropdown_option(selector, label)
+ dropdown = page.find(selector)
+ dropdown.click
+
+ dropdown.find('.dropdown-menu a', text: label, match: :first).click
+ end
+
+ def expect_to_see_projects(sorted_projects)
+ list = page.all('.projects-list .project-name').map(&:text)
+ expect(list).to match(sorted_projects)
+ end
+
+ describe 'Search' do
+ it 'executes when the search button is clicked' do
+ expect(page).to have_content 'Victorialand'
+ expect(page).to have_content 'Treasure'
+
+ fill_in 'project-filter-form-field', with: 'Lord vegeta\n'
+ find('.filtered-search .btn').click
+
+ expect(page).not_to have_content 'Victorialand'
+ expect(page).not_to have_content 'Treasure'
+ end
+
+ it 'will execute when i press enter' do
+ expect(page).to have_content 'Victorialand'
+ expect(page).to have_content 'Treasure'
+
+ fill_in 'project-filter-form-field', with: 'Lord frieza\n'
+ find('#project-filter-form-field').native.send_keys :enter
+
+ expect(page).not_to have_content 'Victorialand'
+ expect(page).not_to have_content 'Treasure'
+ end
+ end
+
+ describe 'Filter' do
+ before do
+ private_project = create(:project, :private, name: 'Private project', namespace: user.namespace)
+ internal_project = create(:project, :internal, name: 'Internal project', namespace: user.namespace)
+
+ private_project.add_maintainer(user)
+ internal_project.add_maintainer(user)
+ end
+
+ it 'filters private projects only' do
+ select_dropdown_option '#filtered-search-visibility-dropdown', 'Private'
+
+ expect(current_url).to match(/visibility_level=0/)
+
+ list = page.all('.projects-list .project-name').map(&:text)
+
+ expect(list).to contain_exactly("Private project", "Treasure", "Victorialand")
+ end
+
+ it 'filters internal projects only' do
+ select_dropdown_option '#filtered-search-visibility-dropdown', 'Internal'
+
+ expect(current_url).to match(/visibility_level=10/)
+
+ list = page.all('.projects-list .project-name').map(&:text)
+
+ expect(list).to contain_exactly('Internal project')
+ end
+
+ it 'filters any project' do
+ select_dropdown_option '#filtered-search-visibility-dropdown', 'Any'
+ list = page.all('.projects-list .project-name').map(&:text)
+
+ expect(list).to contain_exactly("Internal project", "Private project", "Treasure", "Victorialand")
+ end
+ end
+
+ describe 'Sorting' do
+ before do
+ [
+ { name: 'Red ribbon army', created_at: 2.days.ago },
+ { name: 'Cell saga', created_at: Time.now },
+ { name: 'Frieza saga', created_at: 10.days.ago }
+ ].each do |item|
+ project = create(:project, name: item[:name], namespace: user.namespace, created_at: item[:created_at])
+ project.add_developer(user)
+ end
+
+ user.toggle_star(project)
+ user.toggle_star(project2)
+ user2.toggle_star(project2)
+ end
+
+ it 'includes sorting direction' do
+ sorting_dropdown = page.find('.filtered-search-block #filtered-search-sorting-dropdown')
+
+ expect(sorting_dropdown).to have_css '.reverse-sort-btn'
+ end
+
+ it 'has all sorting options', :js do
+ sorting_dropdown = page.find('.filtered-search-block #filtered-search-sorting-dropdown')
+ sorting_option_labels = ['Last updated', 'Created date', 'Name', 'Stars']
+
+ sorting_dropdown.click
+
+ sorting_option_labels.each do |label|
+ expect(sorting_dropdown).to have_content(label)
+ end
+ end
+
+ it 'defaults to "Last updated"', :js do
+ page.find('.filtered-search-block #filtered-search-sorting-dropdown').click
+ active_sorting_option = page.first('.filtered-search-block #filtered-search-sorting-dropdown .is-active')
+
+ expect(active_sorting_option).to have_content 'Last updated'
+ end
+
+ context 'Sorting by name' do
+ it 'sorts the project list' do
+ select_dropdown_option '#filtered-search-sorting-dropdown', 'Name'
+
+ desc = ['Victorialand', 'Treasure', 'Red ribbon army', 'Frieza saga', 'Cell saga']
+ asc = ['Cell saga', 'Frieza saga', 'Red ribbon army', 'Treasure', 'Victorialand']
+
+ click_sort_direction
+
+ expect_to_see_projects(desc)
+
+ click_sort_direction
+
+ expect_to_see_projects(asc)
+ end
+ end
+
+ context 'Sorting by Last updated' do
+ it 'sorts the project list' do
+ select_dropdown_option '#filtered-search-sorting-dropdown', 'Last updated'
+
+ desc = ["Frieza saga", "Red ribbon army", "Victorialand", "Treasure", "Cell saga"]
+ asc = ["Cell saga", "Treasure", "Victorialand", "Red ribbon army", "Frieza saga"]
+
+ click_sort_direction
+
+ expect_to_see_projects(desc)
+
+ click_sort_direction
+
+ expect_to_see_projects(asc)
+ end
+ end
+
+ context 'Sorting by Created date' do
+ it 'sorts the project list' do
+ select_dropdown_option '#filtered-search-sorting-dropdown', 'Created date'
+
+ desc = ["Frieza saga", "Red ribbon army", "Victorialand", "Treasure", "Cell saga"]
+ asc = ["Cell saga", "Treasure", "Victorialand", "Red ribbon army", "Frieza saga"]
+
+ click_sort_direction
+
+ expect_to_see_projects(desc)
+
+ click_sort_direction
+
+ expect_to_see_projects(asc)
+ end
+ end
+
+ context 'Sorting by Stars' do
+ it 'sorts the project list' do
+ select_dropdown_option '#filtered-search-sorting-dropdown', 'Stars'
+
+ desc = ["Red ribbon army", "Cell saga", "Frieza saga", "Victorialand", "Treasure"]
+ asc = ["Treasure", "Victorialand", "Red ribbon army", "Cell saga", "Frieza saga"]
+
+ click_sort_direction
+
+ expect_to_see_projects(desc)
+
+ click_sort_direction
+
+ expect_to_see_projects(asc)
+ end
+ end
+ end
+ end
end
diff --git a/spec/features/groups/members/leave_group_spec.rb b/spec/features/groups/members/leave_group_spec.rb
index 7a91c64d7db..439803f9255 100644
--- a/spec/features/groups/members/leave_group_spec.rb
+++ b/spec/features/groups/members/leave_group_spec.rb
@@ -21,6 +21,20 @@ describe 'Groups > Members > Leave group' do
expect(group.users).not_to include(user)
end
+ it 'guest leaves the group by url param', :js do
+ group.add_guest(user)
+ group.add_owner(other_user)
+
+ visit group_path(group, leave: 1)
+
+ page.accept_confirm
+
+ expect(find('.flash-notice')).to have_content "You left the \"#{group.full_name}\" group"
+ expect(page).to have_content left_group_message(group)
+ expect(current_path).to eq(dashboard_groups_path)
+ expect(group.users).not_to include(user)
+ end
+
it 'guest leaves the group as last member' do
group.add_guest(user)
@@ -32,7 +46,7 @@ describe 'Groups > Members > Leave group' do
expect(group.users).not_to include(user)
end
- it 'owner leaves the group if they is not the last owner' do
+ it 'owner leaves the group if they are not the last owner' do
group.add_owner(user)
group.add_owner(other_user)
@@ -44,7 +58,7 @@ describe 'Groups > Members > Leave group' do
expect(group.users).not_to include(user)
end
- it 'owner can not leave the group if they is a last owner' do
+ it 'owner can not leave the group if they are the last owner' do
group.add_owner(user)
visit group_path(group)
@@ -56,6 +70,14 @@ describe 'Groups > Members > Leave group' do
expect(find(:css, '.project-members-page li', text: user.name)).not_to have_selector(:css, 'a.btn-remove')
end
+ it 'owner can not leave the group by url param if they are the last owner', :js do
+ group.add_owner(user)
+
+ visit group_path(group, leave: 1)
+
+ expect(find('.flash-alert')).to have_content 'You do not have permission to leave this group'
+ end
+
def left_group_message(group)
"You left the \"#{group.name}\""
end
diff --git a/spec/features/issuables/sorting_list_spec.rb b/spec/features/issuables/sorting_list_spec.rb
index 0601dd47c03..3a46a4e0167 100644
--- a/spec/features/issuables/sorting_list_spec.rb
+++ b/spec/features/issuables/sorting_list_spec.rb
@@ -86,26 +86,26 @@ describe 'Sort Issuable List' do
expect(last_merge_request).to include(first_created_issuable.title)
end
end
+ end
- context 'custom sorting' do
- let(:issuable_type) { :merge_request }
+ context 'custom sorting' do
+ let(:issuable_type) { :merge_request }
- it 'supports sorting in asc and desc order' do
- visit_merge_requests_with_state(project, 'open')
+ it 'supports sorting in asc and desc order' do
+ visit_merge_requests_with_state(project, 'open')
- page.within('.issues-other-filters') do
- click_button('Created date')
- click_link('Last updated')
- end
+ page.within('.issues-other-filters') do
+ click_button('Created date')
+ click_link('Last updated')
+ end
- expect(first_merge_request).to include(last_updated_issuable.title)
- expect(last_merge_request).to include(first_updated_issuable.title)
+ expect(first_merge_request).to include(last_updated_issuable.title)
+ expect(last_merge_request).to include(first_updated_issuable.title)
- find('.issues-other-filters .filter-dropdown-container .qa-reverse-sort').click
+ find('.issues-other-filters .filter-dropdown-container .qa-reverse-sort').click
- expect(first_merge_request).to include(first_updated_issuable.title)
- expect(last_merge_request).to include(last_updated_issuable.title)
- end
+ expect(first_merge_request).to include(first_updated_issuable.title)
+ expect(last_merge_request).to include(last_updated_issuable.title)
end
end
end
diff --git a/spec/features/issues/filtered_search/dropdown_hint_spec.rb b/spec/features/issues/filtered_search/dropdown_hint_spec.rb
index 096756f19cc..1f4e9e79179 100644
--- a/spec/features/issues/filtered_search/dropdown_hint_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_hint_spec.rb
@@ -80,7 +80,7 @@ describe 'Dropdown hint', :js do
expect(page).to have_css(js_dropdown_hint, visible: false)
expect(page).to have_css('#js-dropdown-author', visible: true)
- expect_tokens([{ name: 'author' }])
+ expect_tokens([{ name: 'Author' }])
expect_filtered_search_input_empty
end
@@ -89,7 +89,7 @@ describe 'Dropdown hint', :js do
expect(page).to have_css(js_dropdown_hint, visible: false)
expect(page).to have_css('#js-dropdown-assignee', visible: true)
- expect_tokens([{ name: 'assignee' }])
+ expect_tokens([{ name: 'Assignee' }])
expect_filtered_search_input_empty
end
@@ -98,7 +98,7 @@ describe 'Dropdown hint', :js do
expect(page).to have_css(js_dropdown_hint, visible: false)
expect(page).to have_css('#js-dropdown-milestone', visible: true)
- expect_tokens([{ name: 'milestone' }])
+ expect_tokens([{ name: 'Milestone' }])
expect_filtered_search_input_empty
end
@@ -107,7 +107,7 @@ describe 'Dropdown hint', :js do
expect(page).to have_css(js_dropdown_hint, visible: false)
expect(page).to have_css('#js-dropdown-label', visible: true)
- expect_tokens([{ name: 'label' }])
+ expect_tokens([{ name: 'Label' }])
expect_filtered_search_input_empty
end
@@ -116,7 +116,7 @@ describe 'Dropdown hint', :js do
expect(page).to have_css(js_dropdown_hint, visible: false)
expect(page).to have_css('#js-dropdown-my-reaction', visible: true)
- expect_tokens([{ name: 'my-reaction' }])
+ expect_tokens([{ name: 'My-reaction' }])
expect_filtered_search_input_empty
end
@@ -125,7 +125,7 @@ describe 'Dropdown hint', :js do
expect(page).to have_css(js_dropdown_hint, visible: false)
expect(page).to have_css('#js-dropdown-confidential', visible: true)
- expect_tokens([{ name: 'confidential' }])
+ expect_tokens([{ name: 'Confidential' }])
expect_filtered_search_input_empty
end
end
@@ -137,7 +137,7 @@ describe 'Dropdown hint', :js do
expect(page).to have_css(js_dropdown_hint, visible: false)
expect(page).to have_css('#js-dropdown-author', visible: true)
- expect_tokens([{ name: 'author' }])
+ expect_tokens([{ name: 'Author' }])
expect_filtered_search_input_empty
end
@@ -147,7 +147,7 @@ describe 'Dropdown hint', :js do
expect(page).to have_css(js_dropdown_hint, visible: false)
expect(page).to have_css('#js-dropdown-assignee', visible: true)
- expect_tokens([{ name: 'assignee' }])
+ expect_tokens([{ name: 'Assignee' }])
expect_filtered_search_input_empty
end
@@ -157,7 +157,7 @@ describe 'Dropdown hint', :js do
expect(page).to have_css(js_dropdown_hint, visible: false)
expect(page).to have_css('#js-dropdown-milestone', visible: true)
- expect_tokens([{ name: 'milestone' }])
+ expect_tokens([{ name: 'Milestone' }])
expect_filtered_search_input_empty
end
@@ -167,7 +167,7 @@ describe 'Dropdown hint', :js do
expect(page).to have_css(js_dropdown_hint, visible: false)
expect(page).to have_css('#js-dropdown-label', visible: true)
- expect_tokens([{ name: 'label' }])
+ expect_tokens([{ name: 'Label' }])
expect_filtered_search_input_empty
end
@@ -177,7 +177,7 @@ describe 'Dropdown hint', :js do
expect(page).to have_css(js_dropdown_hint, visible: false)
expect(page).to have_css('#js-dropdown-my-reaction', visible: true)
- expect_tokens([{ name: 'my-reaction' }])
+ expect_tokens([{ name: 'My-reaction' }])
expect_filtered_search_input_empty
end
end
@@ -189,7 +189,7 @@ describe 'Dropdown hint', :js do
filtered_search.send_keys(:backspace)
click_hint('author')
- expect_tokens([{ name: 'author' }])
+ expect_tokens([{ name: 'Author' }])
expect_filtered_search_input_empty
end
@@ -199,7 +199,7 @@ describe 'Dropdown hint', :js do
filtered_search.send_keys(:backspace)
click_hint('assignee')
- expect_tokens([{ name: 'assignee' }])
+ expect_tokens([{ name: 'Assignee' }])
expect_filtered_search_input_empty
end
@@ -209,7 +209,7 @@ describe 'Dropdown hint', :js do
filtered_search.send_keys(:backspace)
click_hint('milestone')
- expect_tokens([{ name: 'milestone' }])
+ expect_tokens([{ name: 'Milestone' }])
expect_filtered_search_input_empty
end
@@ -219,7 +219,7 @@ describe 'Dropdown hint', :js do
filtered_search.send_keys(:backspace)
click_hint('label')
- expect_tokens([{ name: 'label' }])
+ expect_tokens([{ name: 'Label' }])
expect_filtered_search_input_empty
end
@@ -229,7 +229,7 @@ describe 'Dropdown hint', :js do
filtered_search.send_keys(:backspace)
click_hint('my-reaction')
- expect_tokens([{ name: 'my-reaction' }])
+ expect_tokens([{ name: 'My-reaction' }])
expect_filtered_search_input_empty
end
end
@@ -247,7 +247,7 @@ describe 'Dropdown hint', :js do
expect(page).to have_css(js_dropdown_hint, visible: false)
expect(page).to have_css('#js-dropdown-wip', visible: true)
- expect_tokens([{ name: 'wip' }])
+ expect_tokens([{ name: 'WIP' }])
expect_filtered_search_input_empty
end
end
diff --git a/spec/features/merge_request/user_sees_merge_widget_spec.rb b/spec/features/merge_request/user_sees_merge_widget_spec.rb
index 40ba676ff92..a32c6bdcf8f 100644
--- a/spec/features/merge_request/user_sees_merge_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_widget_spec.rb
@@ -670,4 +670,26 @@ describe 'Merge request > User sees merge widget', :js do
end
end
end
+
+ context 'when MR has pipeline but user does not have permission' do
+ let(:sha) { project.commit(merge_request.source_branch).sha }
+ let!(:pipeline) { create(:ci_pipeline_without_jobs, status: 'success', sha: sha, project: project, ref: merge_request.source_branch) }
+
+ before do
+ project.update(
+ visibility_level: Gitlab::VisibilityLevel::PUBLIC,
+ public_builds: false
+ )
+ merge_request.update!(head_pipeline: pipeline)
+ sign_out(:user)
+
+ visit project_merge_request_path(project, merge_request)
+ end
+
+ it 'renders a CI pipeline error' do
+ within '.ci-widget' do
+ expect(page).to have_content('Could not retrieve the pipeline status.')
+ end
+ end
+ end
end
diff --git a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
index 1b5dd6945e0..04c7f4b6c76 100644
--- a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
+++ b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
@@ -121,7 +121,7 @@ describe 'User comments on a diff', :js do
end
context 'multi-line suggestions' do
- it 'suggestion is presented' do
+ before do
click_diff_line(find("[id='#{sample_compare.changes[1][:line_code]}']"))
page.within('.js-discussion-note-form') do
@@ -130,7 +130,9 @@ describe 'User comments on a diff', :js do
end
wait_for_requests
+ end
+ it 'suggestion is presented' do
page.within('.diff-discussions') do
expect(page).to have_button('Apply suggestion')
expect(page).to have_content('Suggested change')
@@ -160,22 +162,24 @@ describe 'User comments on a diff', :js do
end
it 'suggestion is appliable' do
- click_diff_line(find("[id='#{sample_compare.changes[1][:line_code]}']"))
+ page.within('.diff-discussions') do
+ expect(page).not_to have_content('Applied')
- page.within('.js-discussion-note-form') do
- fill_in('note_note', with: "```suggestion:-3+5\n# change to a\n# comment\n# with\n# broken\n# lines\n```")
- click_button('Comment')
- end
+ click_button('Apply suggestion')
+ wait_for_requests
- wait_for_requests
+ expect(page).to have_content('Applied')
+ end
+ end
+ it 'resolves discussion when applied' do
page.within('.diff-discussions') do
- expect(page).not_to have_content('Applied')
+ expect(page).not_to have_content('Unresolve discussion')
click_button('Apply suggestion')
wait_for_requests
- expect(page).to have_content('Applied')
+ expect(page).to have_content('Unresolve discussion')
end
end
end
diff --git a/spec/features/oauth_login_spec.rb b/spec/features/oauth_login_spec.rb
index f4105730402..5ebfc32952d 100644
--- a/spec/features/oauth_login_spec.rb
+++ b/spec/features/oauth_login_spec.rb
@@ -14,7 +14,7 @@ describe 'OAuth Login', :js, :allow_forgery_protection do
end
providers = [:github, :twitter, :bitbucket, :gitlab, :google_oauth2,
- :facebook, :cas3, :auth0, :authentiq]
+ :facebook, :cas3, :auth0, :authentiq, :salesforce]
before(:all) do
# The OmniAuth `full_host` parameter doesn't get set correctly (it gets set to something like `http://localhost`
diff --git a/spec/features/profiles/user_edit_preferences_spec.rb b/spec/features/profiles/user_edit_preferences_spec.rb
new file mode 100644
index 00000000000..2d2da222998
--- /dev/null
+++ b/spec/features/profiles/user_edit_preferences_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+describe 'User edit preferences profile' do
+ let(:user) { create(:user) }
+
+ before do
+ stub_feature_flags(user_time_settings: true)
+ sign_in(user)
+ visit(profile_preferences_path)
+ end
+
+ it 'allows the user to toggle their time format preference' do
+ field = page.find_field("user[time_format_in_24h]")
+
+ expect(field).not_to be_checked
+
+ field.click
+
+ expect(field).to be_checked
+ end
+
+ it 'allows the user to toggle their time display preference' do
+ field = page.find_field("user[time_display_relative]")
+
+ expect(field).to be_checked
+
+ field.click
+
+ expect(field).not_to be_checked
+ end
+end
diff --git a/spec/features/profiles/user_edit_profile_spec.rb b/spec/features/profiles/user_edit_profile_spec.rb
index b43711f6ef6..a53da94ef7d 100644
--- a/spec/features/profiles/user_edit_profile_spec.rb
+++ b/spec/features/profiles/user_edit_profile_spec.rb
@@ -327,5 +327,37 @@ describe 'User edit profile' do
end
end
end
+
+ context 'User time preferences', :js do
+ let(:issue) { create(:issue, project: project)}
+ let(:project) { create(:project) }
+
+ before do
+ stub_feature_flags(user_time_settings: true)
+ end
+
+ it 'shows the user time preferences form' do
+ expect(page).to have_content('Time settings')
+ end
+
+ it 'allows the user to select a time zone from a dropdown list of options' do
+ expect(page.find('.user-time-preferences .dropdown')).not_to have_css('.show')
+
+ page.find('.user-time-preferences .js-timezone-dropdown').click
+
+ expect(page.find('.user-time-preferences .dropdown')).to have_css('.show')
+
+ page.find("a", text: "Nuku'alofa").click
+
+ tz = page.find('.user-time-preferences #user_timezone', visible: false)
+
+ expect(tz.value).to eq('Pacific/Tongatapu')
+ end
+
+ it 'timezone defaults to servers default' do
+ timezone_name = Time.zone.tzinfo.name
+ expect(page.find('.user-time-preferences #user_timezone', visible: false).value).to eq(timezone_name)
+ end
+ end
end
end
diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb
index 224375daf71..9cf04fe13b4 100644
--- a/spec/features/projects/jobs_spec.rb
+++ b/spec/features/projects/jobs_spec.rb
@@ -936,8 +936,8 @@ describe 'Jobs', :clean_gitlab_redis_shared_state do
find('.js-cancel-job').click
end
- it 'loads the page and shows all needed controls' do
- expect(page).to have_content 'Retry'
+ it 'loads the page and shows no controls' do
+ expect(page).not_to have_content 'Retry'
end
end
end
@@ -946,7 +946,7 @@ describe 'Jobs', :clean_gitlab_redis_shared_state do
context "Job from project", :js do
before do
job.run!
- job.cancel!
+ job.drop!(:script_failure)
visit project_job_path(project, job)
wait_for_requests
diff --git a/spec/features/projects/members/group_member_cannot_leave_group_project_spec.rb b/spec/features/projects/members/group_member_cannot_leave_group_project_spec.rb
index 0ab29660189..a645b917568 100644
--- a/spec/features/projects/members/group_member_cannot_leave_group_project_spec.rb
+++ b/spec/features/projects/members/group_member_cannot_leave_group_project_spec.rb
@@ -8,10 +8,17 @@ describe 'Projects > Members > Group member cannot leave group project' do
before do
group.add_developer(user)
sign_in(user)
- visit project_path(project)
end
it 'user does not see a "Leave project" link' do
+ visit project_path(project)
+
expect(page).not_to have_content 'Leave project'
end
+
+ it 'renders a flash message if attempting to leave by url', :js do
+ visit project_path(project, leave: 1)
+
+ expect(find('.flash-alert')).to have_content 'You do not have permission to leave this project'
+ end
end
diff --git a/spec/features/projects/members/member_leaves_project_spec.rb b/spec/features/projects/members/member_leaves_project_spec.rb
index 94b29de4686..bd2ef9c07c4 100644
--- a/spec/features/projects/members/member_leaves_project_spec.rb
+++ b/spec/features/projects/members/member_leaves_project_spec.rb
@@ -7,13 +7,24 @@ describe 'Projects > Members > Member leaves project' do
before do
project.add_developer(user)
sign_in(user)
- visit project_path(project)
end
it 'user leaves project' do
+ visit project_path(project)
+
click_link 'Leave project'
expect(current_path).to eq(dashboard_projects_path)
expect(project.users.exists?(user.id)).to be_falsey
end
+
+ it 'user leaves project by url param', :js do
+ visit project_path(project, leave: 1)
+
+ page.accept_confirm
+
+ expect(find('.flash-notice')).to have_content "You left the \"#{project.full_name}\" project"
+ expect(current_path).to eq(dashboard_projects_path)
+ expect(project.users.exists?(user.id)).to be_falsey
+ end
end
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index 4ec44cb05b3..a1115b514d3 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -115,11 +115,11 @@ describe 'Pipeline', :js do
end
end
- it 'cancels the running build and shows retry button' do
+ it 'cancels the running build and does not show retry button' do
find('#ci-badge-deploy .ci-action-icon-container').click
page.within('#ci-badge-deploy') do
- expect(page).to have_css('.js-icon-retry')
+ expect(page).not_to have_css('.js-icon-retry')
end
end
end
@@ -133,11 +133,11 @@ describe 'Pipeline', :js do
end
end
- it 'cancels the preparing build and shows retry button' do
+ it 'cancels the preparing build and does not show retry button' do
find('#ci-badge-deploy .ci-action-icon-container').click
page.within('#ci-badge-deploy') do
- expect(page).to have_css('.js-icon-retry')
+ expect(page).not_to have_css('.js-icon-retry')
end
end
end
@@ -236,6 +236,20 @@ describe 'Pipeline', :js do
end
end
+ context 'when the pipeline has manual stage' do
+ before do
+ create(:ci_build, :manual, pipeline: pipeline, stage: 'publish', name: 'CentOS')
+ create(:ci_build, :manual, pipeline: pipeline, stage: 'publish', name: 'Debian')
+ create(:ci_build, :manual, pipeline: pipeline, stage: 'publish', name: 'OpenSUDE')
+
+ visit_pipeline
+ end
+
+ it 'displays play all button' do
+ expect(page).to have_selector('.js-stage-action')
+ end
+ end
+
context 'page tabs' do
before do
visit_pipeline
diff --git a/spec/features/projects/settings/repository_settings_spec.rb b/spec/features/projects/settings/repository_settings_spec.rb
index 1259ad45791..8c7bc192c50 100644
--- a/spec/features/projects/settings/repository_settings_spec.rb
+++ b/spec/features/projects/settings/repository_settings_spec.rb
@@ -217,5 +217,36 @@ describe 'Projects > Settings > Repository settings' do
expect(RepositoryCleanupWorker.jobs.count).to eq(1)
end
end
+
+ context 'with an existing mirror', :js do
+ let(:mirrored_project) { create(:project, :repository, :remote_mirror) }
+
+ before do
+ mirrored_project.add_maintainer(user)
+
+ visit project_settings_repository_path(mirrored_project)
+ end
+
+ it 'delete remote mirrors' do
+ expect(mirrored_project.remote_mirrors.count).to eq(1)
+
+ find('.js-delete-mirror').click
+ wait_for_requests
+
+ expect(mirrored_project.remote_mirrors.count).to eq(0)
+ end
+ end
+
+ it 'shows a disabled mirror' do
+ create(:remote_mirror, project: project, enabled: false)
+
+ visit project_settings_repository_path(project)
+
+ mirror = find('.qa-mirrored-repository-row')
+
+ expect(mirror).to have_selector('.qa-delete-mirror')
+ expect(mirror).to have_selector('.qa-disabled-mirror-badge')
+ expect(mirror).not_to have_selector('.qa-update-now-button')
+ end
end
end
diff --git a/spec/finders/cluster_ancestors_finder_spec.rb b/spec/finders/cluster_ancestors_finder_spec.rb
index 332086c42e2..750042b6b54 100644
--- a/spec/finders/cluster_ancestors_finder_spec.rb
+++ b/spec/finders/cluster_ancestors_finder_spec.rb
@@ -8,11 +8,15 @@ describe ClusterAncestorsFinder, '#execute' do
let(:user) { create(:user) }
let!(:project_cluster) do
- create(:cluster, :provided_by_user, cluster_type: :project_type, projects: [project])
+ create(:cluster, :provided_by_user, :project, projects: [project])
end
let!(:group_cluster) do
- create(:cluster, :provided_by_user, cluster_type: :group_type, groups: [group])
+ create(:cluster, :provided_by_user, :group, groups: [group])
+ end
+
+ let!(:instance_cluster) do
+ create(:cluster, :provided_by_user, :instance)
end
subject { described_class.new(clusterable, user).execute }
@@ -25,7 +29,7 @@ describe ClusterAncestorsFinder, '#execute' do
end
it 'returns the project clusters followed by group clusters' do
- is_expected.to eq([project_cluster, group_cluster])
+ is_expected.to eq([project_cluster, group_cluster, instance_cluster])
end
context 'nested groups', :nested_groups do
@@ -33,11 +37,11 @@ describe ClusterAncestorsFinder, '#execute' do
let(:parent_group) { create(:group) }
let!(:parent_group_cluster) do
- create(:cluster, :provided_by_user, cluster_type: :group_type, groups: [parent_group])
+ create(:cluster, :provided_by_user, :group, groups: [parent_group])
end
it 'returns the project clusters followed by group clusters ordered ascending the hierarchy' do
- is_expected.to eq([project_cluster, group_cluster, parent_group_cluster])
+ is_expected.to eq([project_cluster, group_cluster, parent_group_cluster, instance_cluster])
end
end
end
@@ -58,7 +62,7 @@ describe ClusterAncestorsFinder, '#execute' do
end
it 'returns the list of group clusters' do
- is_expected.to eq([group_cluster])
+ is_expected.to eq([group_cluster, instance_cluster])
end
context 'nested groups', :nested_groups do
@@ -66,12 +70,21 @@ describe ClusterAncestorsFinder, '#execute' do
let(:parent_group) { create(:group) }
let!(:parent_group_cluster) do
- create(:cluster, :provided_by_user, cluster_type: :group_type, groups: [parent_group])
+ create(:cluster, :provided_by_user, :group, groups: [parent_group])
end
it 'returns the list of group clusters ordered ascending the hierarchy' do
- is_expected.to eq([group_cluster, parent_group_cluster])
+ is_expected.to eq([group_cluster, parent_group_cluster, instance_cluster])
end
end
end
+
+ context 'for an instance' do
+ let(:clusterable) { Clusters::Instance.new }
+ let(:user) { create(:admin) }
+
+ it 'returns the list of instance clusters' do
+ is_expected.to eq([instance_cluster])
+ end
+ end
end
diff --git a/spec/finders/issues_finder_spec.rb b/spec/finders/issues_finder_spec.rb
index 6a47cd013f8..89fdaceaa9f 100644
--- a/spec/finders/issues_finder_spec.rb
+++ b/spec/finders/issues_finder_spec.rb
@@ -641,9 +641,7 @@ describe IssuesFinder do
end
it 'filters by confidentiality' do
- expect(Issue).to receive(:where).with(a_string_matching('confidential'), anything)
-
- subject
+ expect(subject.to_sql).to match("issues.confidential")
end
end
@@ -660,9 +658,7 @@ describe IssuesFinder do
end
it 'filters by confidentiality' do
- expect(Issue).to receive(:where).with(a_string_matching('confidential'), anything)
-
- subject
+ expect(subject.to_sql).to match("issues.confidential")
end
end
diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb
index 117f4a03735..da5e9dab058 100644
--- a/spec/finders/merge_requests_finder_spec.rb
+++ b/spec/finders/merge_requests_finder_spec.rb
@@ -31,7 +31,7 @@ describe MergeRequestsFinder do
end
context 'filtering by group' do
- it 'includes all merge requests when user has access exceluding merge requests from projects the user does not have access to' do
+ it 'includes all merge requests when user has access excluding merge requests from projects the user does not have access to' do
private_project = allow_gitaly_n_plus_1 { create(:project, :private, group: group) }
private_project.add_guest(user)
create(:merge_request, :simple, author: user, source_project: private_project, target_project: private_project)
diff --git a/spec/fixtures/api/schemas/pipeline_schedule.json b/spec/fixtures/api/schemas/pipeline_schedule.json
index c76c6945117..690c4a7d4e8 100644
--- a/spec/fixtures/api/schemas/pipeline_schedule.json
+++ b/spec/fixtures/api/schemas/pipeline_schedule.json
@@ -33,7 +33,7 @@
"additionalProperties": false
},
"variables": {
- "type": ["array", "null"],
+ "type": "array",
"items": { "$ref": "pipeline_schedule_variable.json" }
}
},
diff --git a/spec/fixtures/api/schemas/pipeline_schedule_variable.json b/spec/fixtures/api/schemas/pipeline_schedule_variable.json
index f7ccb2d44a0..022d36cb88c 100644
--- a/spec/fixtures/api/schemas/pipeline_schedule_variable.json
+++ b/spec/fixtures/api/schemas/pipeline_schedule_variable.json
@@ -1,8 +1,14 @@
{
- "type": ["object", "null"],
+ "type": "object",
+ "required": [
+ "key",
+ "value",
+ "variable_type"
+ ],
"properties": {
"key": { "type": "string" },
- "value": { "type": "string" }
+ "value": { "type": "string" },
+ "variable_type": { "type": "string" }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/release.json b/spec/fixtures/api/schemas/public_api/v4/release.json
index 6612c2a9911..6ea0781c1ed 100644
--- a/spec/fixtures/api/schemas/public_api/v4/release.json
+++ b/spec/fixtures/api/schemas/public_api/v4/release.json
@@ -1,12 +1,33 @@
{
"type": "object",
- "required" : [
- "tag_name",
- "description"
- ],
- "properties" : {
- "tag_name": { "type": ["string", "null"] },
- "description": { "type": "string" }
+ "required": ["name", "tag_name", "commit"],
+ "properties": {
+ "name": { "type": "string" },
+ "tag_name": { "type": "string" },
+ "description": { "type": "string" },
+ "description_html": { "type": "string" },
+ "created_at": { "type": "date" },
+ "commit": {
+ "oneOf": [{ "type": "null" }, { "$ref": "commit/basic.json" }]
+ },
+ "author": {
+ "oneOf": [{ "type": "null" }, { "$ref": "user/basic.json" }]
+ },
+ "assets": {
+ "required": ["count", "links", "sources"],
+ "properties": {
+ "count": { "type": "integer" },
+ "links": { "$ref": "../../release/links.json" },
+ "sources": {
+ "type": "array",
+ "items": {
+ "format": "zip",
+ "url": "string"
+ }
+ }
+ },
+ "additionalProperties": false
+ }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/release/release_for_guest.json b/spec/fixtures/api/schemas/public_api/v4/release/release_for_guest.json
new file mode 100644
index 00000000000..e78398ad1d5
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/release/release_for_guest.json
@@ -0,0 +1,22 @@
+{
+ "type": "object",
+ "required": ["name"],
+ "properties": {
+ "name": { "type": "string" },
+ "description": { "type": "string" },
+ "description_html": { "type": "string" },
+ "created_at": { "type": "date" },
+ "author": {
+ "oneOf": [{ "type": "null" }, { "$ref": "../user/basic.json" }]
+ },
+ "assets": {
+ "required": ["count", "links"],
+ "properties": {
+ "count": { "type": "integer" },
+ "links": { "$ref": "../../../release/links.json" }
+ },
+ "additionalProperties": false
+ }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/release/releases_for_guest.json b/spec/fixtures/api/schemas/public_api/v4/release/releases_for_guest.json
new file mode 100644
index 00000000000..c13966b28e9
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/release/releases_for_guest.json
@@ -0,0 +1,4 @@
+{
+ "type": "array",
+ "items": { "$ref": "release_for_guest.json" }
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/release/tag_release.json b/spec/fixtures/api/schemas/public_api/v4/release/tag_release.json
new file mode 100644
index 00000000000..6612c2a9911
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/release/tag_release.json
@@ -0,0 +1,12 @@
+{
+ "type": "object",
+ "required" : [
+ "tag_name",
+ "description"
+ ],
+ "properties" : {
+ "tag_name": { "type": ["string", "null"] },
+ "description": { "type": "string" }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/releases.json b/spec/fixtures/api/schemas/public_api/v4/releases.json
new file mode 100644
index 00000000000..e26215707fe
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/releases.json
@@ -0,0 +1,4 @@
+{
+ "type": "array",
+ "items": { "$ref": "release.json" }
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/tag.json b/spec/fixtures/api/schemas/public_api/v4/tag.json
index 10d4edb7ffb..5713ea1f526 100644
--- a/spec/fixtures/api/schemas/public_api/v4/tag.json
+++ b/spec/fixtures/api/schemas/public_api/v4/tag.json
@@ -14,7 +14,7 @@
"release": {
"oneOf": [
{ "type": "null" },
- { "$ref": "release.json" }
+ { "$ref": "release/tag_release.json" }
]
}
},
diff --git a/spec/frontend/clusters/clusters_bundle_spec.js b/spec/frontend/clusters/clusters_bundle_spec.js
index a61103397eb..73897107f67 100644
--- a/spec/frontend/clusters/clusters_bundle_spec.js
+++ b/spec/frontend/clusters/clusters_bundle_spec.js
@@ -1,12 +1,12 @@
import Clusters from '~/clusters/clusters_bundle';
-import { APPLICATION_STATUS, INGRESS_DOMAIN_SUFFIX } from '~/clusters/constants';
+import { APPLICATION_STATUS, INGRESS_DOMAIN_SUFFIX, APPLICATIONS } from '~/clusters/constants';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import { loadHTMLFixture } from 'helpers/fixtures';
import { setTestTimeout } from 'helpers/timeout';
import $ from 'jquery';
-const { INSTALLING, INSTALLABLE, INSTALLED } = APPLICATION_STATUS;
+const { INSTALLING, INSTALLABLE, INSTALLED, UNINSTALLING } = APPLICATION_STATUS;
describe('Clusters', () => {
setTestTimeout(1000);
@@ -212,73 +212,61 @@ describe('Clusters', () => {
});
describe('installApplication', () => {
- it('tries to install helm', () => {
+ it.each(APPLICATIONS)('tries to install %s', applicationId => {
jest.spyOn(cluster.service, 'installApplication').mockResolvedValueOnce();
- cluster.store.state.applications.helm.status = INSTALLABLE;
+ cluster.store.state.applications[applicationId].status = INSTALLABLE;
- cluster.installApplication({ id: 'helm' });
+ cluster.installApplication({ id: applicationId });
- expect(cluster.store.state.applications.helm.status).toEqual(INSTALLING);
- expect(cluster.store.state.applications.helm.requestReason).toEqual(null);
- expect(cluster.service.installApplication).toHaveBeenCalledWith('helm', undefined);
+ expect(cluster.store.state.applications[applicationId].status).toEqual(INSTALLING);
+ expect(cluster.store.state.applications[applicationId].requestReason).toEqual(null);
+ expect(cluster.service.installApplication).toHaveBeenCalledWith(applicationId, undefined);
});
- it('tries to install ingress', () => {
- jest.spyOn(cluster.service, 'installApplication').mockResolvedValueOnce();
-
- cluster.store.state.applications.ingress.status = INSTALLABLE;
-
- cluster.installApplication({ id: 'ingress' });
-
- expect(cluster.store.state.applications.ingress.status).toEqual(INSTALLING);
- expect(cluster.store.state.applications.ingress.requestReason).toEqual(null);
- expect(cluster.service.installApplication).toHaveBeenCalledWith('ingress', undefined);
- });
+ it('sets error request status when the request fails', () => {
+ jest
+ .spyOn(cluster.service, 'installApplication')
+ .mockRejectedValueOnce(new Error('STUBBED ERROR'));
- it('tries to install runner', () => {
- jest.spyOn(cluster.service, 'installApplication').mockResolvedValueOnce();
+ cluster.store.state.applications.helm.status = INSTALLABLE;
- cluster.store.state.applications.runner.status = INSTALLABLE;
+ const promise = cluster.installApplication({ id: 'helm' });
- cluster.installApplication({ id: 'runner' });
+ return promise.then(() => {
+ expect(cluster.store.state.applications.helm.status).toEqual(INSTALLABLE);
+ expect(cluster.store.state.applications.helm.installFailed).toBe(true);
- expect(cluster.store.state.applications.runner.status).toEqual(INSTALLING);
- expect(cluster.store.state.applications.runner.requestReason).toEqual(null);
- expect(cluster.service.installApplication).toHaveBeenCalledWith('runner', undefined);
+ expect(cluster.store.state.applications.helm.requestReason).toBeDefined();
+ });
});
+ });
- it('tries to install jupyter', () => {
- jest.spyOn(cluster.service, 'installApplication').mockResolvedValueOnce();
+ describe('uninstallApplication', () => {
+ it.each(APPLICATIONS)('tries to uninstall %s', applicationId => {
+ jest.spyOn(cluster.service, 'uninstallApplication').mockResolvedValueOnce();
- cluster.installApplication({
- id: 'jupyter',
- params: { hostname: cluster.store.state.applications.jupyter.hostname },
- });
+ cluster.store.state.applications[applicationId].status = INSTALLED;
- cluster.store.state.applications.jupyter.status = INSTALLABLE;
- expect(cluster.store.state.applications.jupyter.requestReason).toEqual(null);
- expect(cluster.service.installApplication).toHaveBeenCalledWith('jupyter', {
- hostname: cluster.store.state.applications.jupyter.hostname,
- });
+ cluster.uninstallApplication({ id: applicationId });
+
+ expect(cluster.store.state.applications[applicationId].status).toEqual(UNINSTALLING);
+ expect(cluster.store.state.applications[applicationId].requestReason).toEqual(null);
+ expect(cluster.service.uninstallApplication).toHaveBeenCalledWith(applicationId);
});
- it('sets error request status when the request fails', () => {
+ it('sets error request status when the uninstall request fails', () => {
jest
- .spyOn(cluster.service, 'installApplication')
+ .spyOn(cluster.service, 'uninstallApplication')
.mockRejectedValueOnce(new Error('STUBBED ERROR'));
- cluster.store.state.applications.helm.status = INSTALLABLE;
+ cluster.store.state.applications.helm.status = INSTALLED;
- const promise = cluster.installApplication({ id: 'helm' });
-
- expect(cluster.store.state.applications.helm.status).toEqual(INSTALLING);
- expect(cluster.store.state.applications.helm.requestReason).toEqual(null);
- expect(cluster.service.installApplication).toHaveBeenCalled();
+ const promise = cluster.uninstallApplication({ id: 'helm' });
return promise.then(() => {
- expect(cluster.store.state.applications.helm.status).toEqual(INSTALLABLE);
- expect(cluster.store.state.applications.helm.installFailed).toBe(true);
+ expect(cluster.store.state.applications.helm.status).toEqual(INSTALLED);
+ expect(cluster.store.state.applications.helm.uninstallFailed).toBe(true);
expect(cluster.store.state.applications.helm.requestReason).toBeDefined();
});
diff --git a/spec/frontend/clusters/components/application_row_spec.js b/spec/frontend/clusters/components/application_row_spec.js
index 17273b7d5b1..7c781b72355 100644
--- a/spec/frontend/clusters/components/application_row_spec.js
+++ b/spec/frontend/clusters/components/application_row_spec.js
@@ -1,7 +1,10 @@
import Vue from 'vue';
+import { shallowMount } from '@vue/test-utils';
import eventHub from '~/clusters/event_hub';
import { APPLICATION_STATUS } from '~/clusters/constants';
import applicationRow from '~/clusters/components/application_row.vue';
+import UninstallApplicationConfirmationModal from '~/clusters/components/uninstall_application_confirmation_modal.vue';
+
import mountComponent from 'helpers/vue_mount_component_helper';
import { DEFAULT_APPLICATION_STATE } from '../services/mock_data';
@@ -194,11 +197,52 @@ describe('Application Row', () => {
...DEFAULT_APPLICATION_STATE,
installed: true,
uninstallable: true,
+ status: APPLICATION_STATUS.NOT_INSTALLABLE,
});
const uninstallButton = vm.$el.querySelector('.js-cluster-application-uninstall-button');
expect(uninstallButton).toBeTruthy();
});
+
+ it('displays a success toast message if application uninstall was successful', () => {
+ vm = mountComponent(ApplicationRow, {
+ ...DEFAULT_APPLICATION_STATE,
+ title: 'GitLab Runner',
+ uninstallSuccessful: false,
+ });
+
+ vm.$toast = { show: jest.fn() };
+ vm.uninstallSuccessful = true;
+
+ return vm.$nextTick(() => {
+ expect(vm.$toast.show).toHaveBeenCalledWith('GitLab Runner uninstalled successfully.');
+ });
+ });
+ });
+
+ describe('when confirmation modal triggers confirm event', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = shallowMount(ApplicationRow, {
+ propsData: {
+ ...DEFAULT_APPLICATION_STATE,
+ },
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('triggers uninstallApplication event', () => {
+ jest.spyOn(eventHub, '$emit');
+ wrapper.find(UninstallApplicationConfirmationModal).vm.$emit('confirm');
+
+ expect(eventHub.$emit).toHaveBeenCalledWith('uninstallApplication', {
+ id: DEFAULT_APPLICATION_STATE.id,
+ });
+ });
});
describe('Upgrade button', () => {
@@ -304,7 +348,7 @@ describe('Application Row', () => {
vm.$toast = { show: jest.fn() };
vm.updateSuccessful = true;
- vm.$nextTick(() => {
+ return vm.$nextTick(() => {
expect(vm.$toast.show).toHaveBeenCalledWith('GitLab Runner upgraded successfully.');
});
});
@@ -360,60 +404,88 @@ describe('Application Row', () => {
});
describe('Error block', () => {
- it('does not show error block when there is no error', () => {
- vm = mountComponent(ApplicationRow, {
- ...DEFAULT_APPLICATION_STATE,
- status: null,
- });
- const generalErrorMessage = vm.$el.querySelector(
- '.js-cluster-application-general-error-message',
- );
+ describe('when nothing fails', () => {
+ it('does not show error block', () => {
+ vm = mountComponent(ApplicationRow, {
+ ...DEFAULT_APPLICATION_STATE,
+ });
+ const generalErrorMessage = vm.$el.querySelector(
+ '.js-cluster-application-general-error-message',
+ );
- expect(generalErrorMessage).toBeNull();
+ expect(generalErrorMessage).toBeNull();
+ });
});
- it('shows status reason when install fails', () => {
+ describe('when install or uninstall fails', () => {
const statusReason = 'We broke it 0.0';
- vm = mountComponent(ApplicationRow, {
- ...DEFAULT_APPLICATION_STATE,
- status: APPLICATION_STATUS.ERROR,
- statusReason,
- installFailed: true,
+ const requestReason = 'We broke the request 0.0';
+
+ beforeEach(() => {
+ vm = mountComponent(ApplicationRow, {
+ ...DEFAULT_APPLICATION_STATE,
+ status: APPLICATION_STATUS.ERROR,
+ statusReason,
+ requestReason,
+ installFailed: true,
+ });
});
- const generalErrorMessage = vm.$el.querySelector(
- '.js-cluster-application-general-error-message',
- );
- const statusErrorMessage = vm.$el.querySelector(
- '.js-cluster-application-status-error-message',
- );
- expect(generalErrorMessage.textContent.trim()).toEqual(
- `Something went wrong while installing ${DEFAULT_APPLICATION_STATE.title}`,
- );
+ it('shows status reason if it is available', () => {
+ const statusErrorMessage = vm.$el.querySelector(
+ '.js-cluster-application-status-error-message',
+ );
+
+ expect(statusErrorMessage.textContent.trim()).toEqual(statusReason);
+ });
+
+ it('shows request reason if it is available', () => {
+ const requestErrorMessage = vm.$el.querySelector(
+ '.js-cluster-application-request-error-message',
+ );
+
+ expect(requestErrorMessage.textContent.trim()).toEqual(requestReason);
+ });
+ });
+
+ describe('when install fails', () => {
+ beforeEach(() => {
+ vm = mountComponent(ApplicationRow, {
+ ...DEFAULT_APPLICATION_STATE,
+ status: APPLICATION_STATUS.ERROR,
+ installFailed: true,
+ });
+ });
- expect(statusErrorMessage.textContent.trim()).toEqual(statusReason);
+ it('shows a general message indicating the installation failed', () => {
+ const generalErrorMessage = vm.$el.querySelector(
+ '.js-cluster-application-general-error-message',
+ );
+
+ expect(generalErrorMessage.textContent.trim()).toEqual(
+ `Something went wrong while installing ${DEFAULT_APPLICATION_STATE.title}`,
+ );
+ });
});
- it('shows request reason when REQUEST_FAILURE', () => {
- const requestReason = 'We broke thre request 0.0';
- vm = mountComponent(ApplicationRow, {
- ...DEFAULT_APPLICATION_STATE,
- status: APPLICATION_STATUS.INSTALLABLE,
- installFailed: true,
- requestReason,
+ describe('when uninstall fails', () => {
+ beforeEach(() => {
+ vm = mountComponent(ApplicationRow, {
+ ...DEFAULT_APPLICATION_STATE,
+ status: APPLICATION_STATUS.ERROR,
+ uninstallFailed: true,
+ });
});
- const generalErrorMessage = vm.$el.querySelector(
- '.js-cluster-application-general-error-message',
- );
- const requestErrorMessage = vm.$el.querySelector(
- '.js-cluster-application-request-error-message',
- );
- expect(generalErrorMessage.textContent.trim()).toEqual(
- `Something went wrong while installing ${DEFAULT_APPLICATION_STATE.title}`,
- );
+ it('shows a general message indicating the uninstalling failed', () => {
+ const generalErrorMessage = vm.$el.querySelector(
+ '.js-cluster-application-general-error-message',
+ );
- expect(requestErrorMessage.textContent.trim()).toEqual(requestReason);
+ expect(generalErrorMessage.textContent.trim()).toEqual(
+ `Something went wrong while uninstalling ${DEFAULT_APPLICATION_STATE.title}`,
+ );
+ });
});
});
});
diff --git a/spec/frontend/clusters/components/applications_spec.js b/spec/frontend/clusters/components/applications_spec.js
index 7c54a27d950..8bcf02f0a34 100644
--- a/spec/frontend/clusters/components/applications_spec.js
+++ b/spec/frontend/clusters/components/applications_spec.js
@@ -75,7 +75,7 @@ describe('Applications', () => {
});
it('renders a row for Prometheus', () => {
- expect(vm.$el.querySelector('.js-cluster-application-row-prometheus')).toBeNull();
+ expect(vm.$el.querySelector('.js-cluster-application-row-prometheus')).not.toBeNull();
});
it('renders a row for GitLab Runner', () => {
diff --git a/spec/frontend/clusters/components/uninstall_application_button_spec.js b/spec/frontend/clusters/components/uninstall_application_button_spec.js
new file mode 100644
index 00000000000..9f9397d4d41
--- /dev/null
+++ b/spec/frontend/clusters/components/uninstall_application_button_spec.js
@@ -0,0 +1,32 @@
+import { shallowMount } from '@vue/test-utils';
+import UninstallApplicationButton from '~/clusters/components/uninstall_application_button.vue';
+import LoadingButton from '~/vue_shared/components/loading_button.vue';
+import { APPLICATION_STATUS } from '~/clusters/constants';
+
+const { INSTALLED, UPDATING, UNINSTALLING } = APPLICATION_STATUS;
+
+describe('UninstallApplicationButton', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(UninstallApplicationButton, {
+ propsData: { ...props },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe.each`
+ status | loading | disabled | label
+ ${INSTALLED} | ${false} | ${false} | ${'Uninstall'}
+ ${UPDATING} | ${false} | ${true} | ${'Uninstall'}
+ ${UNINSTALLING} | ${true} | ${true} | ${'Uninstalling'}
+ `('when app status is $status', ({ loading, disabled, status, label }) => {
+ it(`renders a loading=${loading}, disabled=${disabled} button with label="${label}"`, () => {
+ createComponent({ status });
+ expect(wrapper.find(LoadingButton).props()).toMatchObject({ loading, disabled, label });
+ });
+ });
+});
diff --git a/spec/frontend/clusters/components/uninstall_application_confirmation_modal_spec.js b/spec/frontend/clusters/components/uninstall_application_confirmation_modal_spec.js
new file mode 100644
index 00000000000..04808864fc0
--- /dev/null
+++ b/spec/frontend/clusters/components/uninstall_application_confirmation_modal_spec.js
@@ -0,0 +1,56 @@
+import { shallowMount } from '@vue/test-utils';
+import UninstallApplicationConfirmationModal from '~/clusters/components/uninstall_application_confirmation_modal.vue';
+import { GlModal } from '@gitlab/ui';
+import { INGRESS } from '~/clusters/constants';
+
+describe('UninstallApplicationConfirmationModal', () => {
+ let wrapper;
+ const appTitle = 'Ingress';
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(UninstallApplicationConfirmationModal, {
+ propsData: { ...props },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ beforeEach(() => {
+ createComponent({ application: INGRESS, applicationTitle: appTitle });
+ });
+
+ it(`renders a modal with a title "Uninstall ${appTitle}"`, () => {
+ expect(wrapper.find(GlModal).attributes('title')).toEqual(`Uninstall ${appTitle}`);
+ });
+
+ it(`renders a modal with an ok button labeled "Uninstall ${appTitle}"`, () => {
+ expect(wrapper.find(GlModal).attributes('ok-title')).toEqual(`Uninstall ${appTitle}`);
+ });
+
+ describe('when ok button is clicked', () => {
+ beforeEach(() => {
+ jest.spyOn(wrapper.vm, 'trackUninstallButtonClick');
+ wrapper.find(GlModal).vm.$emit('ok');
+ });
+
+ it('emits confirm event', () => {
+ expect(wrapper.emitted('confirm')).toBeTruthy();
+ });
+
+ it('calls track uninstall button click mixin', () => {
+ expect(wrapper.vm.trackUninstallButtonClick).toHaveBeenCalledWith(INGRESS);
+ });
+ });
+
+ it('displays a warning text indicating the app will be uninstalled', () => {
+ expect(wrapper.text()).toContain(`You are about to uninstall ${appTitle} from your cluster.`);
+ });
+
+ it('displays a custom warning text depending on the application', () => {
+ expect(wrapper.text()).toContain(
+ `The associated load balancer and IP will be deleted and cannot be restored.`,
+ );
+ });
+});
diff --git a/spec/frontend/clusters/services/application_state_machine_spec.js b/spec/frontend/clusters/services/application_state_machine_spec.js
index e74b7910572..e057e2ac955 100644
--- a/spec/frontend/clusters/services/application_state_machine_spec.js
+++ b/spec/frontend/clusters/services/application_state_machine_spec.js
@@ -1,5 +1,10 @@
import transitionApplicationState from '~/clusters/services/application_state_machine';
-import { APPLICATION_STATUS, UPDATE_EVENT, INSTALL_EVENT } from '~/clusters/constants';
+import {
+ APPLICATION_STATUS,
+ UNINSTALL_EVENT,
+ UPDATE_EVENT,
+ INSTALL_EVENT,
+} from '~/clusters/constants';
const {
NO_STATUS,
@@ -12,6 +17,8 @@ const {
UPDATING,
UPDATED,
UPDATE_ERRORED,
+ UNINSTALLING,
+ UNINSTALL_ERRORED,
} = APPLICATION_STATUS;
const NO_EFFECTS = 'no effects';
@@ -21,16 +28,18 @@ describe('applicationStateMachine', () => {
describe(`current state is ${NO_STATUS}`, () => {
it.each`
- expectedState | event | effects
- ${INSTALLING} | ${SCHEDULED} | ${NO_EFFECTS}
- ${NOT_INSTALLABLE} | ${NOT_INSTALLABLE} | ${NO_EFFECTS}
- ${INSTALLABLE} | ${INSTALLABLE} | ${NO_EFFECTS}
- ${INSTALLING} | ${INSTALLING} | ${NO_EFFECTS}
- ${INSTALLED} | ${INSTALLED} | ${NO_EFFECTS}
- ${INSTALLABLE} | ${ERROR} | ${{ installFailed: true }}
- ${UPDATING} | ${UPDATING} | ${NO_EFFECTS}
- ${INSTALLED} | ${UPDATED} | ${NO_EFFECTS}
- ${INSTALLED} | ${UPDATE_ERRORED} | ${{ updateFailed: true }}
+ expectedState | event | effects
+ ${INSTALLING} | ${SCHEDULED} | ${NO_EFFECTS}
+ ${NOT_INSTALLABLE} | ${NOT_INSTALLABLE} | ${NO_EFFECTS}
+ ${INSTALLABLE} | ${INSTALLABLE} | ${NO_EFFECTS}
+ ${INSTALLING} | ${INSTALLING} | ${NO_EFFECTS}
+ ${INSTALLED} | ${INSTALLED} | ${NO_EFFECTS}
+ ${INSTALLABLE} | ${ERROR} | ${{ installFailed: true }}
+ ${UPDATING} | ${UPDATING} | ${NO_EFFECTS}
+ ${INSTALLED} | ${UPDATED} | ${NO_EFFECTS}
+ ${INSTALLED} | ${UPDATE_ERRORED} | ${{ updateFailed: true }}
+ ${UNINSTALLING} | ${UNINSTALLING} | ${NO_EFFECTS}
+ ${INSTALLED} | ${UNINSTALL_ERRORED} | ${{ uninstallFailed: true }}
`(`transitions to $expectedState on $event event and applies $effects`, data => {
const { expectedState, event, effects } = data;
const currentAppState = {
@@ -99,8 +108,9 @@ describe('applicationStateMachine', () => {
describe(`current state is ${INSTALLED}`, () => {
it.each`
- expectedState | event | effects
- ${UPDATING} | ${UPDATE_EVENT} | ${{ updateFailed: false, updateSuccessful: false }}
+ expectedState | event | effects
+ ${UPDATING} | ${UPDATE_EVENT} | ${{ updateFailed: false, updateSuccessful: false }}
+ ${UNINSTALLING} | ${UNINSTALL_EVENT} | ${{ uninstallFailed: false, uninstallSuccessful: false }}
`(`transitions to $expectedState on $event event and applies $effects`, data => {
const { expectedState, event, effects } = data;
const currentAppState = {
@@ -131,4 +141,22 @@ describe('applicationStateMachine', () => {
});
});
});
+
+ describe(`current state is ${UNINSTALLING}`, () => {
+ it.each`
+ expectedState | event | effects
+ ${INSTALLABLE} | ${INSTALLABLE} | ${{ uninstallSuccessful: true }}
+ ${INSTALLED} | ${UNINSTALL_ERRORED} | ${{ uninstallFailed: true }}
+ `(`transitions to $expectedState on $event event and applies $effects`, data => {
+ const { expectedState, event, effects } = data;
+ const currentAppState = {
+ status: UNINSTALLING,
+ };
+
+ expect(transitionApplicationState(currentAppState, event)).toEqual({
+ status: expectedState,
+ ...effects,
+ });
+ });
+ });
});
diff --git a/spec/frontend/clusters/services/mock_data.js b/spec/frontend/clusters/services/mock_data.js
index 1e896af1c7d..41ad398e924 100644
--- a/spec/frontend/clusters/services/mock_data.js
+++ b/spec/frontend/clusters/services/mock_data.js
@@ -11,6 +11,7 @@ const CLUSTERS_MOCK_DATA = {
name: 'helm',
status: APPLICATION_STATUS.INSTALLABLE,
status_reason: null,
+ can_uninstall: false,
},
{
name: 'ingress',
@@ -18,32 +19,38 @@ const CLUSTERS_MOCK_DATA = {
status_reason: 'Cannot connect',
external_ip: null,
external_hostname: null,
+ can_uninstall: false,
},
{
name: 'runner',
status: APPLICATION_STATUS.INSTALLING,
status_reason: null,
+ can_uninstall: false,
},
{
name: 'prometheus',
status: APPLICATION_STATUS.ERROR,
status_reason: 'Cannot connect',
+ can_uninstall: false,
},
{
name: 'jupyter',
status: APPLICATION_STATUS.INSTALLING,
status_reason: 'Cannot connect',
+ can_uninstall: false,
},
{
name: 'knative',
status: APPLICATION_STATUS.INSTALLING,
status_reason: 'Cannot connect',
+ can_uninstall: false,
},
{
name: 'cert_manager',
status: APPLICATION_STATUS.ERROR,
status_reason: 'Cannot connect',
email: 'test@example.com',
+ can_uninstall: false,
},
],
},
diff --git a/spec/frontend/clusters/stores/clusters_store_spec.js b/spec/frontend/clusters/stores/clusters_store_spec.js
index a20e0439555..aa926bb36d7 100644
--- a/spec/frontend/clusters/stores/clusters_store_spec.js
+++ b/spec/frontend/clusters/stores/clusters_store_spec.js
@@ -63,6 +63,8 @@ describe('Clusters Store', () => {
installed: false,
installFailed: false,
uninstallable: false,
+ uninstallSuccessful: false,
+ uninstallFailed: false,
},
ingress: {
title: 'Ingress',
@@ -74,6 +76,8 @@ describe('Clusters Store', () => {
installed: false,
installFailed: true,
uninstallable: false,
+ uninstallSuccessful: false,
+ uninstallFailed: false,
},
runner: {
title: 'GitLab Runner',
@@ -89,6 +93,8 @@ describe('Clusters Store', () => {
updateFailed: false,
updateSuccessful: false,
uninstallable: false,
+ uninstallSuccessful: false,
+ uninstallFailed: false,
},
prometheus: {
title: 'Prometheus',
@@ -98,6 +104,8 @@ describe('Clusters Store', () => {
installed: false,
installFailed: true,
uninstallable: false,
+ uninstallSuccessful: false,
+ uninstallFailed: false,
},
jupyter: {
title: 'JupyterHub',
@@ -108,6 +116,8 @@ describe('Clusters Store', () => {
installed: false,
installFailed: false,
uninstallable: false,
+ uninstallSuccessful: false,
+ uninstallFailed: false,
},
knative: {
title: 'Knative',
@@ -121,6 +131,8 @@ describe('Clusters Store', () => {
installed: false,
installFailed: false,
uninstallable: false,
+ uninstallSuccessful: false,
+ uninstallFailed: false,
},
cert_manager: {
title: 'Cert-Manager',
@@ -131,6 +143,8 @@ describe('Clusters Store', () => {
email: mockResponseData.applications[6].email,
installed: false,
uninstallable: false,
+ uninstallSuccessful: false,
+ uninstallFailed: false,
},
},
});
diff --git a/spec/frontend/environment.js b/spec/frontend/environment.js
index 34df8019a2e..9612162ad0c 100644
--- a/spec/frontend/environment.js
+++ b/spec/frontend/environment.js
@@ -24,8 +24,9 @@ class CustomEnvironment extends JSDOMEnvironment {
});
const { testEnvironmentOptions } = config;
+ const { IS_EE } = testEnvironmentOptions;
this.global.gon = {
- ee: testEnvironmentOptions.IS_EE,
+ ee: IS_EE,
};
this.rejectedPromises = [];
@@ -33,6 +34,10 @@ class CustomEnvironment extends JSDOMEnvironment {
this.global.promiseRejectionHandler = error => {
this.rejectedPromises.push(error);
};
+
+ this.global.fixturesBasePath = `${process.cwd()}/${
+ IS_EE ? 'ee/' : ''
+ }spec/javascripts/fixtures`;
}
async teardown() {
diff --git a/spec/frontend/helpers/fixtures.js b/spec/frontend/helpers/fixtures.js
index f0351aa31c6..b77bcd6266e 100644
--- a/spec/frontend/helpers/fixtures.js
+++ b/spec/frontend/helpers/fixtures.js
@@ -3,10 +3,8 @@ import path from 'path';
import { ErrorWithStack } from 'jest-util';
-const fixturesBasePath = path.join(process.cwd(), 'spec', 'javascripts', 'fixtures');
-
export function getFixture(relativePath) {
- const absolutePath = path.join(fixturesBasePath, relativePath);
+ const absolutePath = path.join(global.fixturesBasePath, relativePath);
if (!fs.existsSync(absolutePath)) {
throw new ErrorWithStack(
`Fixture file ${relativePath} does not exist.
diff --git a/spec/frontend/operation_settings/components/external_dashboard_spec.js b/spec/frontend/operation_settings/components/external_dashboard_spec.js
new file mode 100644
index 00000000000..de1dd219fe0
--- /dev/null
+++ b/spec/frontend/operation_settings/components/external_dashboard_spec.js
@@ -0,0 +1,100 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlButton, GlLink, GlFormGroup, GlFormInput } from '@gitlab/ui';
+import ExternalDashboard from '~/operation_settings/components/external_dashboard.vue';
+import { TEST_HOST } from 'helpers/test_constants';
+
+describe('operation settings external dashboard component', () => {
+ let wrapper;
+ const externalDashboardPath = `http://mock-external-domain.com/external/dashboard/path`;
+ const externalDashboardHelpPagePath = `${TEST_HOST}/help/page/path`;
+
+ beforeEach(() => {
+ wrapper = shallowMount(ExternalDashboard, {
+ propsData: {
+ externalDashboardPath,
+ externalDashboardHelpPagePath,
+ },
+ });
+ });
+
+ it('renders header text', () => {
+ expect(wrapper.find('.js-section-header').text()).toBe('External Dashboard');
+ });
+
+ describe('sub-header', () => {
+ let subHeader;
+
+ beforeEach(() => {
+ subHeader = wrapper.find('.js-section-sub-header');
+ });
+
+ it('renders descriptive text', () => {
+ expect(subHeader.text()).toContain(
+ 'Add a button to the metrics dashboard linking directly to your existing external dashboards.',
+ );
+ });
+
+ it('renders help page link', () => {
+ const link = subHeader.find(GlLink);
+
+ expect(link.text()).toBe('Learn more');
+ expect(link.attributes().href).toBe(externalDashboardHelpPagePath);
+ });
+ });
+
+ describe('form', () => {
+ let form;
+
+ beforeEach(() => {
+ form = wrapper.find('form');
+ });
+
+ describe('external dashboard url', () => {
+ describe('input label', () => {
+ let formGroup;
+
+ beforeEach(() => {
+ formGroup = form.find(GlFormGroup);
+ });
+
+ it('uses label text', () => {
+ expect(formGroup.attributes().label).toBe('Full dashboard URL');
+ });
+
+ it('uses description text', () => {
+ expect(formGroup.attributes().description).toBe(
+ 'Enter the URL of the dashboard you want to link to',
+ );
+ });
+ });
+
+ describe('input field', () => {
+ let input;
+
+ beforeEach(() => {
+ input = form.find(GlFormInput);
+ });
+
+ it('defaults to externalDashboardPath prop', () => {
+ expect(input.attributes().value).toBe(externalDashboardPath);
+ });
+
+ it('uses a placeholder', () => {
+ expect(input.attributes().placeholder).toBe('https://my-org.gitlab.io/my-dashboards');
+ });
+ });
+
+ describe('submit button', () => {
+ let submit;
+
+ beforeEach(() => {
+ submit = form.find(GlButton);
+ });
+
+ it('renders button label', () => {
+ expect(submit.text()).toBe('Save Changes');
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js b/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
new file mode 100644
index 00000000000..3b6f67457ad
--- /dev/null
+++ b/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
@@ -0,0 +1,103 @@
+import { GlLoadingIcon } from '@gitlab/ui';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import SuggestionDiffHeader from '~/vue_shared/components/markdown/suggestion_diff_header.vue';
+
+const localVue = createLocalVue();
+
+const DEFAULT_PROPS = {
+ canApply: true,
+ isApplied: false,
+ helpPagePath: 'path_to_docs',
+};
+
+describe('Suggestion Diff component', () => {
+ let wrapper;
+
+ const createComponent = props => {
+ wrapper = shallowMount(localVue.extend(SuggestionDiffHeader), {
+ propsData: {
+ ...DEFAULT_PROPS,
+ ...props,
+ },
+ localVue,
+ sync: false,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findApplyButton = () => wrapper.find('.qa-apply-btn');
+ const findHeader = () => wrapper.find('.qa-suggestion-diff-header');
+ const findHelpButton = () => wrapper.find('.js-help-btn');
+ const findLoading = () => wrapper.find(GlLoadingIcon);
+
+ it('renders a suggestion header', () => {
+ createComponent();
+
+ const header = findHeader();
+
+ expect(header.exists()).toBe(true);
+ expect(header.html().includes('Suggested change')).toBe(true);
+ });
+
+ it('renders a help button', () => {
+ createComponent();
+
+ expect(findHelpButton().exists()).toBe(true);
+ });
+
+ it('renders an apply button', () => {
+ createComponent();
+
+ const applyBtn = findApplyButton();
+
+ expect(applyBtn.exists()).toBe(true);
+ expect(applyBtn.html().includes('Apply suggestion')).toBe(true);
+ });
+
+ it('does not render an apply button if `canApply` is set to false', () => {
+ createComponent({ canApply: false });
+
+ expect(findApplyButton().exists()).toBe(false);
+ });
+
+ describe('when apply suggestion is clicked', () => {
+ beforeEach(done => {
+ createComponent();
+
+ findApplyButton().vm.$emit('click');
+
+ wrapper.vm.$nextTick(done);
+ });
+
+ it('emits apply', () => {
+ expect(wrapper.emittedByOrder()).toEqual([{ name: 'apply', args: [expect.any(Function)] }]);
+ });
+
+ it('hides apply button', () => {
+ expect(findApplyButton().exists()).toBe(false);
+ });
+
+ it('shows loading', () => {
+ expect(findLoading().exists()).toBe(true);
+ expect(wrapper.text()).toContain('Applying suggestion');
+ });
+
+ it('when callback of apply is called, hides loading', done => {
+ const [callback] = wrapper.emitted().apply[0];
+
+ callback();
+
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ expect(findApplyButton().exists()).toBe(true);
+ expect(findLoading().exists()).toBe(false);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+});
diff --git a/spec/graphql/resolvers/base_resolver_spec.rb b/spec/graphql/resolvers/base_resolver_spec.rb
index e3a34762b62..9982288e206 100644
--- a/spec/graphql/resolvers/base_resolver_spec.rb
+++ b/spec/graphql/resolvers/base_resolver_spec.rb
@@ -28,4 +28,19 @@ describe Resolvers::BaseResolver do
expect(result).to eq(test: 1)
end
end
+
+ it 'increases complexity based on arguments' do
+ field = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: described_class, null: false, max_page_size: 1)
+
+ expect(field.to_graphql.complexity.call({}, { sort: 'foo' }, 1)).to eq 3
+ expect(field.to_graphql.complexity.call({}, { search: 'foo' }, 1)).to eq 7
+ end
+
+ it 'does not increase complexity when filtering by iids' do
+ field = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: described_class, null: false, max_page_size: 100)
+
+ expect(field.to_graphql.complexity.call({}, { sort: 'foo' }, 1)).to eq 6
+ expect(field.to_graphql.complexity.call({}, { sort: 'foo', iid: 1 }, 1)).to eq 3
+ expect(field.to_graphql.complexity.call({}, { sort: 'foo', iids: [1, 2, 3] }, 1)).to eq 3
+ end
end
diff --git a/spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb b/spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb
index ea7159eacf9..3140af27af5 100644
--- a/spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb
+++ b/spec/graphql/resolvers/concerns/resolves_pipelines_spec.rb
@@ -46,6 +46,14 @@ describe ResolvesPipelines do
expect(resolve_pipelines({}, {})).to be_empty
end
+ it 'increases field complexity based on arguments' do
+ field = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: resolver, null: false, max_page_size: 1)
+
+ expect(field.to_graphql.complexity.call({}, {}, 1)).to eq 2
+ expect(field.to_graphql.complexity.call({}, { sha: 'foo' }, 1)).to eq 4
+ expect(field.to_graphql.complexity.call({}, { sha: 'ref' }, 1)).to eq 4
+ end
+
def resolve_pipelines(args = {}, context = { current_user: current_user })
resolve(resolver, obj: project, args: args, ctx: context)
end
diff --git a/spec/graphql/resolvers/issues_resolver_spec.rb b/spec/graphql/resolvers/issues_resolver_spec.rb
index 399a33dae75..bffcdbfe915 100644
--- a/spec/graphql/resolvers/issues_resolver_spec.rb
+++ b/spec/graphql/resolvers/issues_resolver_spec.rb
@@ -120,6 +120,13 @@ describe Resolvers::IssuesResolver do
end
end
+ it 'increases field complexity based on arguments' do
+ field = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: described_class, null: false, max_page_size: 100)
+
+ expect(field.to_graphql.complexity.call({}, {}, 1)).to eq 4
+ expect(field.to_graphql.complexity.call({}, { labelName: 'foo' }, 1)).to eq 8
+ end
+
def resolve_issues(args = {}, context = { current_user: current_user })
resolve(described_class, obj: project, args: args, ctx: context)
end
diff --git a/spec/graphql/resolvers/project_resolver_spec.rb b/spec/graphql/resolvers/project_resolver_spec.rb
index d4990c6492c..4fdbb3aa43e 100644
--- a/spec/graphql/resolvers/project_resolver_spec.rb
+++ b/spec/graphql/resolvers/project_resolver_spec.rb
@@ -26,6 +26,14 @@ describe Resolvers::ProjectResolver do
end
end
+ it 'does not increase complexity depending on number of load limits' do
+ field1 = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: described_class, null: false, max_page_size: 100)
+ field2 = Types::BaseField.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: described_class, null: false, max_page_size: 1)
+
+ expect(field1.to_graphql.complexity.call({}, {}, 1)).to eq 2
+ expect(field2.to_graphql.complexity.call({}, {}, 1)).to eq 2
+ end
+
def resolve_project(full_path)
resolve(described_class, args: { full_path: full_path })
end
diff --git a/spec/graphql/types/base_field_spec.rb b/spec/graphql/types/base_field_spec.rb
index b5697ee5245..4fe426e2447 100644
--- a/spec/graphql/types/base_field_spec.rb
+++ b/spec/graphql/types/base_field_spec.rb
@@ -4,6 +4,18 @@ require 'spec_helper'
describe Types::BaseField do
context 'when considering complexity' do
+ let(:resolver) do
+ Class.new(described_class) do
+ def self.resolver_complexity(args)
+ 2 if args[:foo]
+ end
+
+ def self.complexity_multiplier(args)
+ 0.01
+ end
+ end
+ end
+
it 'defaults to 1' do
field = described_class.new(name: 'test', type: GraphQL::STRING_TYPE, null: true)
@@ -15,5 +27,19 @@ describe Types::BaseField do
expect(field.to_graphql.complexity).to eq 12
end
+
+ it 'sets complexity depending on arguments for resolvers' do
+ field = described_class.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: resolver, max_page_size: 100, null: true)
+
+ expect(field.to_graphql.complexity.call({}, {}, 2)).to eq 4
+ expect(field.to_graphql.complexity.call({}, { first: 50 }, 2)).to eq 3
+ end
+
+ it 'sets complexity depending on number load limits for resolvers' do
+ field = described_class.new(name: 'test', type: GraphQL::STRING_TYPE, resolver_class: resolver, max_page_size: 100, null: true)
+
+ expect(field.to_graphql.complexity.call({}, { first: 1 }, 2)).to eq 2
+ expect(field.to_graphql.complexity.call({}, { first: 1, foo: true }, 2)).to eq 4
+ end
end
end
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index 37c63807c82..554cb861563 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -445,6 +445,10 @@ describe ProjectsHelper do
Project.all
end
+ before do
+ stub_feature_flags(project_list_filter_bar: false)
+ end
+
it 'returns true when there are projects' do
expect(helper.show_projects?(projects, {})).to eq(true)
end
diff --git a/spec/helpers/storage_helper_spec.rb b/spec/helpers/storage_helper_spec.rb
index 03df9deafa1..50c74a7c2f9 100644
--- a/spec/helpers/storage_helper_spec.rb
+++ b/spec/helpers/storage_helper_spec.rb
@@ -18,4 +18,28 @@ describe StorageHelper do
expect(helper.storage_counter(100_000_000_000_000_000_000_000)).to eq("86,736.2 EB")
end
end
+
+ describe "#storage_counters_details" do
+ let(:namespace) { create :namespace }
+ let(:project) do
+ create(:project,
+ namespace: namespace,
+ statistics: build(:project_statistics,
+ repository_size: 10.kilobytes,
+ lfs_objects_size: 20.gigabytes,
+ build_artifacts_size: 30.megabytes))
+ end
+
+ let(:message) { '10 KB repositories, 30 MB build artifacts, 20 GB LFS' }
+
+ it 'works on ProjectStatistics' do
+ expect(helper.storage_counters_details(project.statistics)).to eq(message)
+ end
+
+ it 'works on Namespace.with_statistics' do
+ namespace_stats = Namespace.with_statistics.find(project.namespace.id)
+
+ expect(helper.storage_counters_details(namespace_stats)).to eq(message)
+ end
+ end
end
diff --git a/spec/initializers/secret_token_spec.rb b/spec/initializers/secret_token_spec.rb
index 6366be30079..726ce07a2d1 100644
--- a/spec/initializers/secret_token_spec.rb
+++ b/spec/initializers/secret_token_spec.rb
@@ -6,8 +6,8 @@ describe 'create_tokens' do
let(:secrets) { ActiveSupport::OrderedOptions.new }
- HEX_KEY = /\h{128}/
- RSA_KEY = /\A-----BEGIN RSA PRIVATE KEY-----\n.+\n-----END RSA PRIVATE KEY-----\n\Z/m
+ HEX_KEY = /\h{128}/.freeze
+ RSA_KEY = /\A-----BEGIN RSA PRIVATE KEY-----\n.+\n-----END RSA PRIVATE KEY-----\n\Z/m.freeze
before do
allow(File).to receive(:write)
diff --git a/spec/javascripts/diffs/components/diff_content_spec.js b/spec/javascripts/diffs/components/diff_content_spec.js
index bc9288e4150..124bdeea45d 100644
--- a/spec/javascripts/diffs/components/diff_content_spec.js
+++ b/spec/javascripts/diffs/components/diff_content_spec.js
@@ -29,6 +29,10 @@ describe('DiffContent', () => {
});
});
+ afterEach(() => {
+ vm.$destroy();
+ });
+
describe('text based files', () => {
it('should render diff inline view', done => {
vm.$store.state.diffs.diffViewType = 'inline';
@@ -49,6 +53,16 @@ describe('DiffContent', () => {
done();
});
});
+
+ it('renders rendering more lines loading icon', done => {
+ vm.diffFile.renderingLines = true;
+
+ vm.$nextTick(() => {
+ expect(vm.$el.querySelector('.loading-container')).not.toBe(null);
+
+ done();
+ });
+ });
});
describe('empty files', () => {
diff --git a/spec/javascripts/diffs/mock_data/diff_file.js b/spec/javascripts/diffs/mock_data/diff_file.js
index 32af9ea8ddd..27428197c1c 100644
--- a/spec/javascripts/diffs/mock_data/diff_file.js
+++ b/spec/javascripts/diffs/mock_data/diff_file.js
@@ -240,4 +240,5 @@ export default {
},
],
discussions: [],
+ renderingLines: false,
};
diff --git a/spec/javascripts/diffs/store/actions_spec.js b/spec/javascripts/diffs/store/actions_spec.js
index bca99caa920..c82dcadd2f1 100644
--- a/spec/javascripts/diffs/store/actions_spec.js
+++ b/spec/javascripts/diffs/store/actions_spec.js
@@ -36,6 +36,7 @@ import actions, {
fetchFullDiff,
toggleFullDiff,
setFileCollapsed,
+ setExpandedDiffLines,
} from '~/diffs/store/actions';
import eventHub from '~/notes/event_hub';
import * as types from '~/diffs/store/mutation_types';
@@ -879,9 +880,9 @@ describe('DiffsStoreActions', () => {
it('commits REQUEST_FULL_DIFF', done => {
testAction(
receiveFullDiffSucess,
- { filePath: 'test', data: 'test' },
+ { filePath: 'test' },
{},
- [{ type: types.RECEIVE_FULL_DIFF_SUCCESS, payload: { filePath: 'test', data: 'test' } }],
+ [{ type: types.RECEIVE_FULL_DIFF_SUCCESS, payload: { filePath: 'test' } }],
[],
done,
);
@@ -903,11 +904,8 @@ describe('DiffsStoreActions', () => {
describe('fetchFullDiff', () => {
let mock;
- let scrollToElementSpy;
beforeEach(() => {
- scrollToElementSpy = spyOnDependency(actions, 'scrollToElement').and.stub();
-
mock = new MockAdapter(axios);
});
@@ -921,28 +919,23 @@ describe('DiffsStoreActions', () => {
});
it('dispatches receiveFullDiffSucess', done => {
+ const file = {
+ context_lines_path: `${gl.TEST_HOST}/context`,
+ file_path: 'test',
+ file_hash: 'test',
+ };
testAction(
fetchFullDiff,
- { context_lines_path: `${gl.TEST_HOST}/context`, file_path: 'test', file_hash: 'test' },
+ file,
null,
[],
- [{ type: 'receiveFullDiffSucess', payload: { filePath: 'test', data: ['test'] } }],
+ [
+ { type: 'receiveFullDiffSucess', payload: { filePath: 'test' } },
+ { type: 'setExpandedDiffLines', payload: { file, data: ['test'] } },
+ ],
done,
);
});
-
- it('scrolls to element', done => {
- fetchFullDiff(
- { dispatch() {} },
- { context_lines_path: `${gl.TEST_HOST}/context`, file_path: 'test', file_hash: 'test' },
- )
- .then(() => {
- expect(scrollToElementSpy).toHaveBeenCalledWith('#test');
-
- done();
- })
- .catch(done.fail);
- });
});
describe('error', () => {
@@ -999,4 +992,63 @@ describe('DiffsStoreActions', () => {
);
});
});
+
+ describe('setExpandedDiffLines', () => {
+ beforeEach(() => {
+ spyOnDependency(actions, 'idleCallback').and.callFake(cb => {
+ cb({ timeRemaining: () => 50 });
+ });
+ });
+
+ it('commits SET_CURRENT_VIEW_DIFF_FILE_LINES when lines less than MAX_RENDERING_DIFF_LINES', done => {
+ spyOnDependency(actions, 'convertExpandLines').and.callFake(() => ['test']);
+
+ testAction(
+ setExpandedDiffLines,
+ { file: { file_path: 'path' }, data: [] },
+ { diffViewType: 'inline' },
+ [
+ {
+ type: 'SET_HIDDEN_VIEW_DIFF_FILE_LINES',
+ payload: { filePath: 'path', lines: ['test'] },
+ },
+ {
+ type: 'SET_CURRENT_VIEW_DIFF_FILE_LINES',
+ payload: { filePath: 'path', lines: ['test'] },
+ },
+ ],
+ [],
+ done,
+ );
+ });
+
+ it('commits ADD_CURRENT_VIEW_DIFF_FILE_LINES when lines more than MAX_RENDERING_DIFF_LINES', done => {
+ const lines = new Array(501).fill().map((_, i) => `line-${i}`);
+ spyOnDependency(actions, 'convertExpandLines').and.callFake(() => lines);
+
+ testAction(
+ setExpandedDiffLines,
+ { file: { file_path: 'path' }, data: [] },
+ { diffViewType: 'inline' },
+ [
+ {
+ type: 'SET_HIDDEN_VIEW_DIFF_FILE_LINES',
+ payload: { filePath: 'path', lines },
+ },
+ {
+ type: 'SET_CURRENT_VIEW_DIFF_FILE_LINES',
+ payload: { filePath: 'path', lines: lines.slice(0, 200) },
+ },
+ { type: 'TOGGLE_DIFF_FILE_RENDERING_MORE', payload: 'path' },
+ ...new Array(301).fill().map((_, i) => ({
+ type: 'ADD_CURRENT_VIEW_DIFF_FILE_LINES',
+ payload: { filePath: 'path', line: `line-${i + 200}` },
+ })),
+ { type: 'TOGGLE_DIFF_FILE_RENDERING_MORE', payload: 'path' },
+ ],
+ [],
+ done,
+ );
+ });
+ });
});
diff --git a/spec/javascripts/diffs/store/mutations_spec.js b/spec/javascripts/diffs/store/mutations_spec.js
index 5556a994a14..fa193e1d3b9 100644
--- a/spec/javascripts/diffs/store/mutations_spec.js
+++ b/spec/javascripts/diffs/store/mutations_spec.js
@@ -756,4 +756,98 @@ describe('DiffsStoreMutations', () => {
expect(state.diffFiles[0].viewer.collapsed).toBe(true);
});
});
+
+ describe('SET_HIDDEN_VIEW_DIFF_FILE_LINES', () => {
+ [
+ { current: 'highlighted', hidden: 'parallel', diffViewType: 'inline' },
+ { current: 'parallel', hidden: 'highlighted', diffViewType: 'parallel' },
+ ].forEach(({ current, hidden, diffViewType }) => {
+ it(`sets the ${hidden} lines when diff view is ${diffViewType}`, () => {
+ const file = { file_path: 'test', parallel_diff_lines: [], highlighted_diff_lines: [] };
+ const state = {
+ diffFiles: [file],
+ diffViewType,
+ };
+
+ mutations[types.SET_HIDDEN_VIEW_DIFF_FILE_LINES](state, {
+ filePath: 'test',
+ lines: ['test'],
+ });
+
+ expect(file[`${current}_diff_lines`]).toEqual([]);
+ expect(file[`${hidden}_diff_lines`]).toEqual(['test']);
+ });
+ });
+ });
+
+ describe('SET_CURRENT_VIEW_DIFF_FILE_LINES', () => {
+ [
+ { current: 'highlighted', hidden: 'parallel', diffViewType: 'inline' },
+ { current: 'parallel', hidden: 'highlighted', diffViewType: 'parallel' },
+ ].forEach(({ current, hidden, diffViewType }) => {
+ it(`sets the ${current} lines when diff view is ${diffViewType}`, () => {
+ const file = { file_path: 'test', parallel_diff_lines: [], highlighted_diff_lines: [] };
+ const state = {
+ diffFiles: [file],
+ diffViewType,
+ };
+
+ mutations[types.SET_CURRENT_VIEW_DIFF_FILE_LINES](state, {
+ filePath: 'test',
+ lines: ['test'],
+ });
+
+ expect(file[`${current}_diff_lines`]).toEqual(['test']);
+ expect(file[`${hidden}_diff_lines`]).toEqual([]);
+ });
+ });
+ });
+
+ describe('ADD_CURRENT_VIEW_DIFF_FILE_LINES', () => {
+ [
+ { current: 'highlighted', hidden: 'parallel', diffViewType: 'inline' },
+ { current: 'parallel', hidden: 'highlighted', diffViewType: 'parallel' },
+ ].forEach(({ current, hidden, diffViewType }) => {
+ it(`pushes to ${current} lines when diff view is ${diffViewType}`, () => {
+ const file = { file_path: 'test', parallel_diff_lines: [], highlighted_diff_lines: [] };
+ const state = {
+ diffFiles: [file],
+ diffViewType,
+ };
+
+ mutations[types.ADD_CURRENT_VIEW_DIFF_FILE_LINES](state, {
+ filePath: 'test',
+ line: 'test',
+ });
+
+ expect(file[`${current}_diff_lines`]).toEqual(['test']);
+ expect(file[`${hidden}_diff_lines`]).toEqual([]);
+
+ mutations[types.ADD_CURRENT_VIEW_DIFF_FILE_LINES](state, {
+ filePath: 'test',
+ line: 'test2',
+ });
+
+ expect(file[`${current}_diff_lines`]).toEqual(['test', 'test2']);
+ expect(file[`${hidden}_diff_lines`]).toEqual([]);
+ });
+ });
+ });
+
+ describe('TOGGLE_DIFF_FILE_RENDERING_MORE', () => {
+ it('toggles renderingLines on file', () => {
+ const file = { file_path: 'test', renderingLines: false };
+ const state = {
+ diffFiles: [file],
+ };
+
+ mutations[types.TOGGLE_DIFF_FILE_RENDERING_MORE](state, 'test');
+
+ expect(file.renderingLines).toBe(true);
+
+ mutations[types.TOGGLE_DIFF_FILE_RENDERING_MORE](state, 'test');
+
+ expect(file.renderingLines).toBe(false);
+ });
+ });
});
diff --git a/spec/javascripts/fixtures/.gitignore b/spec/javascripts/fixtures/.gitignore
index 2507c8e7263..bed020f5b0a 100644
--- a/spec/javascripts/fixtures/.gitignore
+++ b/spec/javascripts/fixtures/.gitignore
@@ -1,3 +1,5 @@
*.html.raw
*.html
*.json
+*.pdf
+*.bmpr
diff --git a/spec/javascripts/ide/stores/actions/file_spec.js b/spec/javascripts/ide/stores/actions/file_spec.js
index 1e5b55af4ba..e6fb08bcc49 100644
--- a/spec/javascripts/ide/stores/actions/file_spec.js
+++ b/spec/javascripts/ide/stores/actions/file_spec.js
@@ -10,11 +10,19 @@ import eventHub from '~/ide/eventhub';
import { file, resetStore } from '../../helpers';
import testAction from '../../../helpers/vuex_action_helper';
+const RELATIVE_URL_ROOT = '/gitlab';
+
describe('IDE store file actions', () => {
let mock;
+ let originalGon;
beforeEach(() => {
mock = new MockAdapter(axios);
+ originalGon = window.gon;
+ window.gon = {
+ ...window.gon,
+ relative_url_root: RELATIVE_URL_ROOT,
+ };
spyOn(router, 'push');
});
@@ -22,6 +30,7 @@ describe('IDE store file actions', () => {
afterEach(() => {
mock.restore();
resetStore(store);
+ window.gon = originalGon;
});
describe('closeFile', () => {
@@ -173,13 +182,13 @@ describe('IDE store file actions', () => {
spyOn(service, 'getFileData').and.callThrough();
localFile = file(`newCreate-${Math.random()}`);
- localFile.url = `${gl.TEST_HOST}/getFileDataURL`;
+ localFile.url = `project/getFileDataURL`;
store.state.entries[localFile.path] = localFile;
});
describe('success', () => {
beforeEach(() => {
- mock.onGet(`${gl.TEST_HOST}/getFileDataURL`).replyOnce(
+ mock.onGet(`${RELATIVE_URL_ROOT}/project/getFileDataURL`).replyOnce(
200,
{
blame_path: 'blame_path',
@@ -200,7 +209,9 @@ describe('IDE store file actions', () => {
store
.dispatch('getFileData', { path: localFile.path })
.then(() => {
- expect(service.getFileData).toHaveBeenCalledWith(`${gl.TEST_HOST}/getFileDataURL`);
+ expect(service.getFileData).toHaveBeenCalledWith(
+ `${RELATIVE_URL_ROOT}/project/getFileDataURL`,
+ );
done();
})
@@ -266,7 +277,7 @@ describe('IDE store file actions', () => {
describe('error', () => {
beforeEach(() => {
- mock.onGet(`${gl.TEST_HOST}/getFileDataURL`).networkError();
+ mock.onGet(`project/getFileDataURL`).networkError();
});
it('dispatches error action', done => {
diff --git a/spec/javascripts/monitoring/dashboard_spec.js b/spec/javascripts/monitoring/dashboard_spec.js
index 16dc0084a10..e9bd6050d68 100644
--- a/spec/javascripts/monitoring/dashboard_spec.js
+++ b/spec/javascripts/monitoring/dashboard_spec.js
@@ -1,7 +1,7 @@
import Vue from 'vue';
import MockAdapter from 'axios-mock-adapter';
import Dashboard from '~/monitoring/components/dashboard.vue';
-import { timeWindows } from '~/monitoring/constants';
+import { timeWindows, timeWindowsKeyNames } from '~/monitoring/constants';
import axios from '~/lib/utils/axios_utils';
import { metricsGroupsAPIResponse, mockApiEndpoint, environmentData } from './mock_data';
@@ -20,6 +20,9 @@ const propsData = {
emptyUnableToConnectSvgPath: '/path/to/unable-to-connect.svg',
environmentsEndpoint: '/root/hello-prometheus/environments/35',
currentEnvironmentName: 'production',
+ customMetricsAvailable: false,
+ customMetricsPath: '',
+ validateQueryPath: '',
};
export default propsData;
@@ -37,6 +40,9 @@ describe('Dashboard', () => {
window.gon = {
...window.gon,
ee: false,
+ features: {
+ grafanaDashboardLink: true,
+ },
};
mock = new MockAdapter(axios);
@@ -160,7 +166,7 @@ describe('Dashboard', () => {
});
});
- it('renders the environments dropdown with a single is-active element', done => {
+ it('renders the environments dropdown with a single active element', done => {
const component = new DashboardComponent({
el: document.querySelector('.prometheus-graphs'),
propsData: {
@@ -175,7 +181,7 @@ describe('Dashboard', () => {
setTimeout(() => {
const dropdownItems = component.$el.querySelectorAll(
- '.js-environments-dropdown .dropdown-item.is-active',
+ '.js-environments-dropdown .dropdown-item[active="true"]',
);
expect(dropdownItems.length).toEqual(1);
@@ -248,6 +254,41 @@ describe('Dashboard', () => {
done();
});
});
+
+ it('shows a specific time window selected from the url params', done => {
+ spyOnDependency(Dashboard, 'getParameterValues').and.returnValue(['thirtyMinutes']);
+
+ const component = new DashboardComponent({
+ el: document.querySelector('.prometheus-graphs'),
+ propsData: { ...propsData, hasMetrics: true, showTimeWindowDropdown: true },
+ });
+
+ setTimeout(() => {
+ const selectedTimeWindow = component.$el.querySelector(
+ '.js-time-window-dropdown [active="true"]',
+ );
+
+ expect(selectedTimeWindow.textContent.trim()).toEqual('30 minutes');
+ done();
+ });
+ });
+
+ it('defaults to the eight hours time window for non valid url parameters', done => {
+ spyOnDependency(Dashboard, 'getParameterValues').and.returnValue([
+ '<script>alert("XSS")</script>',
+ ]);
+
+ const component = new DashboardComponent({
+ el: document.querySelector('.prometheus-graphs'),
+ propsData: { ...propsData, hasMetrics: true, showTimeWindowDropdown: true },
+ });
+
+ Vue.nextTick(() => {
+ expect(component.selectedTimeWindowKey).toEqual(timeWindowsKeyNames.eightHours);
+
+ done();
+ });
+ });
});
describe('when the window resizes', () => {
@@ -288,4 +329,63 @@ describe('Dashboard', () => {
.catch(done.fail);
});
});
+
+ describe('external dashboard link', () => {
+ let component;
+
+ beforeEach(() => {
+ mock.onGet(mockApiEndpoint).reply(200, metricsGroupsAPIResponse);
+ });
+
+ afterEach(() => {
+ component.$destroy();
+ });
+
+ describe('with feature flag enabled', () => {
+ beforeEach(() => {
+ component = new DashboardComponent({
+ el: document.querySelector('.prometheus-graphs'),
+ propsData: {
+ ...propsData,
+ hasMetrics: true,
+ showPanels: false,
+ showTimeWindowDropdown: false,
+ externalDashboardPath: '/mockPath',
+ },
+ });
+ });
+
+ it('shows the link', done => {
+ setTimeout(() => {
+ expect(component.$el.querySelector('.js-external-dashboard-link').innerText).toContain(
+ 'View full dashboard',
+ );
+ done();
+ });
+ });
+ });
+
+ describe('without feature flage enabled', () => {
+ beforeEach(() => {
+ window.gon.features.grafanaDashboardLink = false;
+ component = new DashboardComponent({
+ el: document.querySelector('.prometheus-graphs'),
+ propsData: {
+ ...propsData,
+ hasMetrics: true,
+ showPanels: false,
+ showTimeWindowDropdown: false,
+ externalDashboardPath: '',
+ },
+ });
+ });
+
+ it('does not show the link', done => {
+ setTimeout(() => {
+ expect(component.$el.querySelector('.js-external-dashboard-link')).toBe(null);
+ done();
+ });
+ });
+ });
+ });
});
diff --git a/spec/javascripts/notes/stores/actions_spec.js b/spec/javascripts/notes/stores/actions_spec.js
index 94ce6d8e222..39901276b8c 100644
--- a/spec/javascripts/notes/stores/actions_spec.js
+++ b/spec/javascripts/notes/stores/actions_spec.js
@@ -3,11 +3,12 @@ import $ from 'jquery';
import _ from 'underscore';
import { TEST_HOST } from 'spec/test_constants';
import { headersInterceptor } from 'spec/helpers/vue_resource_helper';
-import * as actions from '~/notes/stores/actions';
+import actionsModule, * as actions from '~/notes/stores/actions';
import * as mutationTypes from '~/notes/stores/mutation_types';
import * as notesConstants from '~/notes/constants';
import createStore from '~/notes/stores';
import mrWidgetEventHub from '~/vue_merge_request_widget/event_hub';
+import service from '~/notes/services/notes_service';
import testAction from '../../helpers/vuex_action_helper';
import { resetStore } from '../helpers';
import {
@@ -18,11 +19,21 @@ import {
individualNote,
} from '../mock_data';
+const TEST_ERROR_MESSAGE = 'Test error message';
+
describe('Actions Notes Store', () => {
+ let commit;
+ let dispatch;
+ let state;
let store;
+ let flashSpy;
beforeEach(() => {
store = createStore();
+ commit = jasmine.createSpy('commit');
+ dispatch = jasmine.createSpy('dispatch');
+ state = {};
+ flashSpy = spyOnDependency(actionsModule, 'Flash');
});
afterEach(() => {
@@ -604,21 +615,6 @@ describe('Actions Notes Store', () => {
});
describe('updateOrCreateNotes', () => {
- let commit;
- let dispatch;
- let state;
-
- beforeEach(() => {
- commit = jasmine.createSpy('commit');
- dispatch = jasmine.createSpy('dispatch');
- state = {};
- });
-
- afterEach(() => {
- commit.calls.reset();
- dispatch.calls.reset();
- });
-
it('Updates existing note', () => {
const note = { id: 1234 };
const getters = { notesById: { 1234: note } };
@@ -751,4 +747,106 @@ describe('Actions Notes Store', () => {
);
});
});
+
+ describe('resolveDiscussion', () => {
+ let getters;
+ let discussionId;
+
+ beforeEach(() => {
+ discussionId = discussionMock.id;
+ state.discussions = [discussionMock];
+ getters = {
+ isDiscussionResolved: () => false,
+ };
+ });
+
+ it('when unresolved, dispatches action', done => {
+ testAction(
+ actions.resolveDiscussion,
+ { discussionId },
+ { ...state, ...getters },
+ [],
+ [
+ {
+ type: 'toggleResolveNote',
+ payload: {
+ endpoint: discussionMock.resolve_path,
+ isResolved: false,
+ discussion: true,
+ },
+ },
+ ],
+ done,
+ );
+ });
+
+ it('when resolved, does nothing', done => {
+ getters.isDiscussionResolved = id => id === discussionId;
+
+ testAction(
+ actions.resolveDiscussion,
+ { discussionId },
+ { ...state, ...getters },
+ [],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('submitSuggestion', () => {
+ const discussionId = 'discussion-id';
+ const noteId = 'note-id';
+ const suggestionId = 'suggestion-id';
+ let flashContainer;
+
+ beforeEach(() => {
+ spyOn(service, 'applySuggestion');
+ dispatch.and.returnValue(Promise.resolve());
+ service.applySuggestion.and.returnValue(Promise.resolve());
+ flashContainer = {};
+ });
+
+ const testSubmitSuggestion = (done, expectFn) => {
+ actions
+ .submitSuggestion(
+ { commit, dispatch },
+ { discussionId, noteId, suggestionId, flashContainer },
+ )
+ .then(expectFn)
+ .then(done)
+ .catch(done.fail);
+ };
+
+ it('when service success, commits and resolves discussion', done => {
+ testSubmitSuggestion(done, () => {
+ expect(commit.calls.allArgs()).toEqual([
+ [mutationTypes.APPLY_SUGGESTION, { discussionId, noteId, suggestionId }],
+ ]);
+
+ expect(dispatch.calls.allArgs()).toEqual([['resolveDiscussion', { discussionId }]]);
+ expect(flashSpy).not.toHaveBeenCalled();
+ });
+ });
+
+ it('when service fails, flashes error message', done => {
+ const response = { response: { data: { message: TEST_ERROR_MESSAGE } } };
+
+ service.applySuggestion.and.returnValue(Promise.reject(response));
+
+ testSubmitSuggestion(done, () => {
+ expect(commit).not.toHaveBeenCalled();
+ expect(dispatch).not.toHaveBeenCalled();
+ expect(flashSpy).toHaveBeenCalledWith(`${TEST_ERROR_MESSAGE}.`, 'alert', flashContainer);
+ });
+ });
+
+ it('when resolve discussion fails, fail gracefully', done => {
+ dispatch.and.returnValue(Promise.reject());
+
+ testSubmitSuggestion(done, () => {
+ expect(flashSpy).not.toHaveBeenCalled();
+ });
+ });
+ });
});
diff --git a/spec/javascripts/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js b/spec/javascripts/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js
index a89952ee435..5f4dba5ecb9 100644
--- a/spec/javascripts/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js
+++ b/spec/javascripts/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js
@@ -3,6 +3,7 @@ import GLDropdown from '~/gl_dropdown'; // eslint-disable-line no-unused-vars
import TimezoneDropdown, {
formatUtcOffset,
formatTimezone,
+ findTimezoneByIdentifier,
} from '~/pages/projects/pipeline_schedules/shared/components/timezone_dropdown';
describe('Timezone Dropdown', function() {
@@ -12,6 +13,7 @@ describe('Timezone Dropdown', function() {
let $dropdownEl = null;
let $wrapper = null;
const tzListSel = '.dropdown-content ul li a.is-active';
+ const tzDropdownToggleText = '.dropdown-toggle-text';
describe('Initialize', () => {
describe('with dropdown already loaded', () => {
@@ -94,6 +96,36 @@ describe('Timezone Dropdown', function() {
expect(onSelectTimezone).toHaveBeenCalled();
});
+
+ it('will correctly set the dropdown label if a timezone identifier is set on the inputEl', () => {
+ $inputEl.val('America/St_Johns');
+
+ // eslint-disable-next-line no-new
+ new TimezoneDropdown({
+ $inputEl,
+ $dropdownEl,
+ displayFormat: selectedItem => formatTimezone(selectedItem),
+ });
+
+ expect($wrapper.find(tzDropdownToggleText).html()).toEqual('[UTC - 2.5] Newfoundland');
+ });
+
+ it('will call a provided `displayFormat` handler to format the dropdown value', () => {
+ const displayFormat = jasmine.createSpy('displayFormat');
+ // eslint-disable-next-line no-new
+ new TimezoneDropdown({
+ $inputEl,
+ $dropdownEl,
+ displayFormat,
+ });
+
+ $wrapper
+ .find(tzListSel)
+ .first()
+ .trigger('click');
+
+ expect(displayFormat).toHaveBeenCalled();
+ });
});
});
@@ -164,4 +196,49 @@ describe('Timezone Dropdown', function() {
).toEqual('[UTC 0] Accra');
});
});
+
+ describe('findTimezoneByIdentifier', () => {
+ const tzList = [
+ {
+ identifier: 'Asia/Tokyo',
+ name: 'Sapporo',
+ offset: 32400,
+ },
+ {
+ identifier: 'Asia/Hong_Kong',
+ name: 'Hong Kong',
+ offset: 28800,
+ },
+ {
+ identifier: 'Asia/Dhaka',
+ name: 'Dhaka',
+ offset: 21600,
+ },
+ ];
+
+ const identifier = 'Asia/Dhaka';
+ it('returns the correct object if the identifier exists', () => {
+ const res = findTimezoneByIdentifier(tzList, identifier);
+
+ expect(res).toBeTruthy();
+ expect(res).toBe(tzList[2]);
+ });
+
+ it('returns null if it doesnt find the identifier', () => {
+ const res = findTimezoneByIdentifier(tzList, 'Australia/Melbourne');
+
+ expect(res).toBeNull();
+ });
+
+ it('returns null if there is no identifier given', () => {
+ expect(findTimezoneByIdentifier(tzList)).toBeNull();
+ expect(findTimezoneByIdentifier(tzList, '')).toBeNull();
+ });
+
+ it('returns null if there is an empty or invalid array given', () => {
+ expect(findTimezoneByIdentifier([], identifier)).toBeNull();
+ expect(findTimezoneByIdentifier(null, identifier)).toBeNull();
+ expect(findTimezoneByIdentifier(undefined, identifier)).toBeNull();
+ });
+ });
});
diff --git a/spec/javascripts/pipelines/graph/action_component_spec.js b/spec/javascripts/pipelines/graph/action_component_spec.js
index 95717d659b8..321497b35b5 100644
--- a/spec/javascripts/pipelines/graph/action_component_spec.js
+++ b/spec/javascripts/pipelines/graph/action_component_spec.js
@@ -66,5 +66,16 @@ describe('pipeline graph action component', () => {
done();
}, 0);
});
+
+ it('renders a loading icon while waiting for request', done => {
+ component.$el.click();
+
+ component.$nextTick(() => {
+ expect(component.$el.querySelector('.js-action-icon-loading')).not.toBeNull();
+ setTimeout(() => {
+ done();
+ });
+ });
+ });
});
});
diff --git a/spec/javascripts/pipelines/graph/stage_column_component_spec.js b/spec/javascripts/pipelines/graph/stage_column_component_spec.js
index 3240e8e4c1b..5183f8dd2d6 100644
--- a/spec/javascripts/pipelines/graph/stage_column_component_spec.js
+++ b/spec/javascripts/pipelines/graph/stage_column_component_spec.js
@@ -70,4 +70,53 @@ describe('stage column component', () => {
);
});
});
+
+ describe('with action', () => {
+ it('renders action button', () => {
+ component = mountComponent(StageColumnComponent, {
+ groups: [
+ {
+ id: 4259,
+ name: '<img src=x onerror=alert(document.domain)>',
+ status: {
+ icon: 'status_success',
+ label: 'success',
+ tooltip: '<img src=x onerror=alert(document.domain)>',
+ },
+ },
+ ],
+ title: 'test',
+ hasTriggeredBy: false,
+ action: {
+ icon: 'play',
+ title: 'Play all',
+ path: 'action',
+ },
+ });
+
+ expect(component.$el.querySelector('.js-stage-action')).not.toBeNull();
+ });
+ });
+
+ describe('without action', () => {
+ it('does not render action button', () => {
+ component = mountComponent(StageColumnComponent, {
+ groups: [
+ {
+ id: 4259,
+ name: '<img src=x onerror=alert(document.domain)>',
+ status: {
+ icon: 'status_success',
+ label: 'success',
+ tooltip: '<img src=x onerror=alert(document.domain)>',
+ },
+ },
+ ],
+ title: 'test',
+ hasTriggeredBy: false,
+ });
+
+ expect(component.$el.querySelector('.js-stage-action')).toBeNull();
+ });
+ });
});
diff --git a/spec/javascripts/pipelines/pipeline_triggerer_spec.js b/spec/javascripts/pipelines/pipeline_triggerer_spec.js
new file mode 100644
index 00000000000..8cf290f2663
--- /dev/null
+++ b/spec/javascripts/pipelines/pipeline_triggerer_spec.js
@@ -0,0 +1,54 @@
+import { mount } from '@vue/test-utils';
+import pipelineTriggerer from '~/pipelines/components/pipeline_triggerer.vue';
+
+describe('Pipelines Triggerer', () => {
+ let wrapper;
+
+ const mockData = {
+ pipeline: {
+ user: {
+ name: 'foo',
+ avatar_url: '/avatar',
+ path: '/path',
+ },
+ },
+ };
+
+ const createComponent = () => {
+ wrapper = mount(pipelineTriggerer, {
+ propsData: mockData,
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('should render a table cell', () => {
+ expect(wrapper.contains('.table-section')).toBe(true);
+ });
+
+ it('should render triggerer information when triggerer is provided', () => {
+ const link = wrapper.find('.js-pipeline-url-user');
+
+ expect(link.attributes('href')).toEqual(mockData.pipeline.user.path);
+ expect(link.find('.js-user-avatar-image-toolip').text()).toEqual(mockData.pipeline.user.name);
+ expect(link.find('img.avatar').attributes('src')).toEqual(
+ `${mockData.pipeline.user.avatar_url}?width=26`,
+ );
+ });
+
+ it('should render "API" when no triggerer is provided', () => {
+ wrapper.setProps({
+ pipeline: {
+ user: null,
+ },
+ });
+
+ expect(wrapper.find('.js-pipeline-url-api').text()).toEqual('API');
+ });
+});
diff --git a/spec/javascripts/pipelines/pipeline_url_spec.js b/spec/javascripts/pipelines/pipeline_url_spec.js
index faad49a78b0..aa196af2f33 100644
--- a/spec/javascripts/pipelines/pipeline_url_spec.js
+++ b/spec/javascripts/pipelines/pipeline_url_spec.js
@@ -42,54 +42,6 @@ describe('Pipeline Url Component', () => {
expect(component.$el.querySelector('.js-pipeline-url-link span').textContent).toEqual('#1');
});
- it('should render user information when a user is provided', () => {
- const mockData = {
- pipeline: {
- id: 1,
- path: 'foo',
- flags: {},
- user: {
- web_url: '/',
- name: 'foo',
- avatar_url: '/',
- path: '/',
- },
- },
- autoDevopsHelpPath: 'foo',
- };
-
- const component = new PipelineUrlComponent({
- propsData: mockData,
- }).$mount();
-
- const image = component.$el.querySelector('.js-pipeline-url-user img');
- const tooltip = component.$el.querySelector(
- '.js-pipeline-url-user .js-user-avatar-image-toolip',
- );
-
- expect(component.$el.querySelector('.js-pipeline-url-user').getAttribute('href')).toEqual(
- mockData.pipeline.user.web_url,
- );
-
- expect(tooltip.textContent.trim()).toEqual(mockData.pipeline.user.name);
- expect(image.getAttribute('src')).toEqual(`${mockData.pipeline.user.avatar_url}?width=20`);
- });
-
- it('should render "API" when no user is provided', () => {
- const component = new PipelineUrlComponent({
- propsData: {
- pipeline: {
- id: 1,
- path: 'foo',
- flags: {},
- },
- autoDevopsHelpPath: 'foo',
- },
- }).$mount();
-
- expect(component.$el.querySelector('.js-pipeline-url-api').textContent).toContain('API');
- });
-
it('should render latest, yaml invalid, merge request, and stuck flags when provided', () => {
const component = new PipelineUrlComponent({
propsData: {
diff --git a/spec/javascripts/pipelines/pipelines_table_row_spec.js b/spec/javascripts/pipelines/pipelines_table_row_spec.js
index 234fc705a81..d47504d2f54 100644
--- a/spec/javascripts/pipelines/pipelines_table_row_spec.js
+++ b/spec/javascripts/pipelines/pipelines_table_row_spec.js
@@ -80,13 +80,13 @@ describe('Pipelines Table Row', () => {
it('should render user information', () => {
expect(
component.$el
- .querySelector('.table-section:nth-child(2) a:nth-child(3)')
+ .querySelector('.table-section:nth-child(3) .js-pipeline-url-user')
.getAttribute('href'),
).toEqual(pipeline.user.path);
expect(
component.$el
- .querySelector('.table-section:nth-child(2) .js-user-avatar-image-toolip')
+ .querySelector('.table-section:nth-child(3) .js-user-avatar-image-toolip')
.textContent.trim(),
).toEqual(pipeline.user.name);
});
diff --git a/spec/javascripts/test_constants.js b/spec/javascripts/test_constants.js
index 24b5512b053..77c206585fe 100644
--- a/spec/javascripts/test_constants.js
+++ b/spec/javascripts/test_constants.js
@@ -1,4 +1,6 @@
-export const FIXTURES_PATH = '/base/spec/javascripts/fixtures';
+export const FIXTURES_PATH = `/base/${
+ process.env.IS_GITLAB_EE ? 'ee/' : ''
+}spec/javascripts/fixtures`;
export const TEST_HOST = 'http://test.host';
export const DUMMY_IMAGE_URL = `${FIXTURES_PATH}/static/images/one_white_pixel.png`;
diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js
index de213210cfc..8ac6e6a7b44 100644
--- a/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js
@@ -78,6 +78,19 @@ describe('MRWidgetPipeline', () => {
);
});
+ it('should render CI error when no pipeline is provided', () => {
+ vm = mountComponent(Component, {
+ pipeline: {},
+ hasCi: true,
+ ciStatus: 'success',
+ troubleshootingDocsPath: 'help',
+ });
+
+ expect(vm.$el.querySelector('.media-body').textContent.trim()).toContain(
+ 'Could not retrieve the pipeline status. For troubleshooting steps, read the documentation.',
+ );
+ });
+
describe('with a pipeline', () => {
beforeEach(() => {
vm = mountComponent(Component, {
diff --git a/spec/javascripts/vue_shared/components/markdown/suggestion_diff_header_spec.js b/spec/javascripts/vue_shared/components/markdown/suggestion_diff_header_spec.js
deleted file mode 100644
index 12ee804f668..00000000000
--- a/spec/javascripts/vue_shared/components/markdown/suggestion_diff_header_spec.js
+++ /dev/null
@@ -1,75 +0,0 @@
-import Vue from 'vue';
-import SuggestionDiffHeaderComponent from '~/vue_shared/components/markdown/suggestion_diff_header.vue';
-
-const MOCK_DATA = {
- canApply: true,
- isApplied: false,
- helpPagePath: 'path_to_docs',
-};
-
-describe('Suggestion Diff component', () => {
- let vm;
-
- function createComponent(propsData) {
- const Component = Vue.extend(SuggestionDiffHeaderComponent);
-
- return new Component({
- propsData,
- }).$mount();
- }
-
- beforeEach(done => {
- vm = createComponent(MOCK_DATA);
- Vue.nextTick(done);
- });
-
- describe('init', () => {
- it('renders a suggestion header', () => {
- const header = vm.$el.querySelector('.qa-suggestion-diff-header');
-
- expect(header).not.toBeNull();
- expect(header.innerHTML.includes('Suggested change')).toBe(true);
- });
-
- it('renders a help button', () => {
- const helpBtn = vm.$el.querySelector('.js-help-btn');
-
- expect(helpBtn).not.toBeNull();
- });
-
- it('renders an apply button', () => {
- const applyBtn = vm.$el.querySelector('.qa-apply-btn');
-
- expect(applyBtn).not.toBeNull();
- expect(applyBtn.innerHTML.includes('Apply suggestion')).toBe(true);
- });
-
- it('does not render an apply button if `canApply` is set to false', () => {
- const props = Object.assign(MOCK_DATA, { canApply: false });
-
- vm = createComponent(props);
-
- expect(vm.$el.querySelector('.qa-apply-btn')).toBeNull();
- });
- });
-
- describe('applySuggestion', () => {
- it('emits when the apply button is clicked', () => {
- const props = Object.assign(MOCK_DATA, { canApply: true });
-
- vm = createComponent(props);
- spyOn(vm, '$emit');
- vm.applySuggestion();
-
- expect(vm.$emit).toHaveBeenCalled();
- });
-
- it('does not emit when the canApply is set to false', () => {
- spyOn(vm, '$emit');
- vm.canApply = false;
- vm.applySuggestion();
-
- expect(vm.$emit).not.toHaveBeenCalled();
- });
- });
-});
diff --git a/spec/lib/api/helpers/related_resources_helpers_spec.rb b/spec/lib/api/helpers/related_resources_helpers_spec.rb
index 66af7f81535..99fe8795d91 100644
--- a/spec/lib/api/helpers/related_resources_helpers_spec.rb
+++ b/spec/lib/api/helpers/related_resources_helpers_spec.rb
@@ -5,6 +5,40 @@ describe API::Helpers::RelatedResourcesHelpers do
Class.new.include(described_class).new
end
+ describe '#expose_path' do
+ let(:path) { '/api/v4/awesome_endpoint' }
+
+ context 'empty relative URL root' do
+ before do
+ stub_config_setting(relative_url_root: '')
+ end
+
+ it 'returns the existing path' do
+ expect(helpers.expose_path(path)).to eq(path)
+ end
+ end
+
+ context 'slash relative URL root' do
+ before do
+ stub_config_setting(relative_url_root: '/')
+ end
+
+ it 'returns the existing path' do
+ expect(helpers.expose_path(path)).to eq(path)
+ end
+ end
+
+ context 'with relative URL root' do
+ before do
+ stub_config_setting(relative_url_root: '/gitlab/root')
+ end
+
+ it 'returns the existing path' do
+ expect(helpers.expose_path(path)).to eq("/gitlab/root" + path)
+ end
+ end
+ end
+
describe '#expose_url' do
let(:path) { '/api/v4/awesome_endpoint' }
subject(:url) { helpers.expose_url(path) }
diff --git a/spec/lib/banzai/filter/table_of_contents_filter_spec.rb b/spec/lib/banzai/filter/table_of_contents_filter_spec.rb
index 7213cd58ea7..4a9880ac85a 100644
--- a/spec/lib/banzai/filter/table_of_contents_filter_spec.rb
+++ b/spec/lib/banzai/filter/table_of_contents_filter_spec.rb
@@ -58,6 +58,11 @@ describe Banzai::Filter::TableOfContentsFilter do
expect(doc.css('h1 a').first.attr('href')).to eq '#this-header-is-filled-with-punctuation'
end
+ it 'removes any leading or trailing spaces' do
+ doc = filter(header(1, " \r\n\tTitle with spaces\r\n\t "))
+ expect(doc.css('h1 a').first.attr('href')).to eq '#title-with-spaces'
+ end
+
it 'appends a unique number to duplicates' do
doc = filter(header(1, 'One') + header(2, 'One'))
diff --git a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb
index 1e90a2ef27f..cc09804fd53 100644
--- a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb
@@ -105,6 +105,7 @@ describe Gitlab::BitbucketServerImport::Importer do
expect(merge_request.metrics.merged_by).to eq(project.owner)
expect(merge_request.metrics.merged_at).to eq(@merge_event.merge_timestamp)
expect(merge_request.merge_commit_sha).to eq('12345678')
+ expect(merge_request.state_id).to eq(3)
end
it 'imports comments' do
diff --git a/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb b/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb
index e8332b14627..5387863bd07 100644
--- a/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb
+++ b/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb
@@ -28,6 +28,12 @@ describe Gitlab::Ci::Build::Prerequisite::KubernetesNamespace do
it { is_expected.to be_truthy }
+ context 'and the cluster is not managed' do
+ let(:cluster) { create(:cluster, :not_managed, projects: [build.project]) }
+
+ it { is_expected.to be_falsey }
+ end
+
context 'and a namespace is already created for this project' do
let!(:kubernetes_namespace) { create(:cluster_kubernetes_namespace, cluster: cluster, project: build.project) }
diff --git a/spec/lib/gitlab/ci/status/build/factory_spec.rb b/spec/lib/gitlab/ci/status/build/factory_spec.rb
index b6231510b91..025439f1b6e 100644
--- a/spec/lib/gitlab/ci/status/build/factory_spec.rb
+++ b/spec/lib/gitlab/ci/status/build/factory_spec.rb
@@ -163,11 +163,11 @@ describe Gitlab::Ci::Status::Build::Factory do
it 'matches correct extended statuses' do
expect(factory.extended_statuses)
- .to eq [Gitlab::Ci::Status::Build::Canceled, Gitlab::Ci::Status::Build::Retryable]
+ .to eq [Gitlab::Ci::Status::Build::Canceled]
end
- it 'fabricates a retryable build status' do
- expect(status).to be_a Gitlab::Ci::Status::Build::Retryable
+ it 'does not fabricate a retryable build status' do
+ expect(status).not_to be_a Gitlab::Ci::Status::Build::Retryable
end
it 'fabricates status with correct details' do
@@ -177,7 +177,7 @@ describe Gitlab::Ci::Status::Build::Factory do
expect(status.illustration).to include(:image, :size, :title)
expect(status.label).to eq 'canceled'
expect(status).to have_details
- expect(status).to have_action
+ expect(status).not_to have_action
end
end
diff --git a/spec/lib/gitlab/ci/status/stage/factory_spec.rb b/spec/lib/gitlab/ci/status/stage/factory_spec.rb
index dee4f4efd1b..4b299170c87 100644
--- a/spec/lib/gitlab/ci/status/stage/factory_spec.rb
+++ b/spec/lib/gitlab/ci/status/stage/factory_spec.rb
@@ -22,7 +22,7 @@ describe Gitlab::Ci::Status::Stage::Factory do
end
context 'when stage has a core status' do
- HasStatus::AVAILABLE_STATUSES.each do |core_status|
+ (HasStatus::AVAILABLE_STATUSES - %w(manual skipped scheduled)).each do |core_status|
context "when core status is #{core_status}" do
before do
create(:ci_build, pipeline: pipeline, stage: 'test', status: core_status)
@@ -64,4 +64,20 @@ describe Gitlab::Ci::Status::Stage::Factory do
expect(status.details_path).to include "pipelines/#{pipeline.id}##{stage.name}"
end
end
+
+ context 'when stage has manual builds' do
+ (HasStatus::BLOCKED_STATUS + ['skipped']).each do |core_status|
+ context "when status is #{core_status}" do
+ before do
+ create(:ci_build, pipeline: pipeline, stage: 'test', status: core_status)
+ create(:commit_status, pipeline: pipeline, stage: 'test', status: core_status)
+ create(:ci_build, pipeline: pipeline, stage: 'build', status: :manual)
+ end
+
+ it 'fabricates a play manual status' do
+ expect(status).to be_a(Gitlab::Ci::Status::Stage::PlayManual)
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/status/stage/play_manual_spec.rb b/spec/lib/gitlab/ci/status/stage/play_manual_spec.rb
new file mode 100644
index 00000000000..b0113b00ef0
--- /dev/null
+++ b/spec/lib/gitlab/ci/status/stage/play_manual_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Status::Stage::PlayManual do
+ let(:stage) { double('stage') }
+ let(:play_manual) { described_class.new(stage) }
+
+ describe '#action_icon' do
+ subject { play_manual.action_icon }
+
+ it { is_expected.to eq('play') }
+ end
+
+ describe '#action_button_title' do
+ subject { play_manual.action_button_title }
+
+ it { is_expected.to eq('Play all manual') }
+ end
+
+ describe '#action_title' do
+ subject { play_manual.action_title }
+
+ it { is_expected.to eq('Play all manual') }
+ end
+
+ describe '#action_path' do
+ let(:stage) { create(:ci_stage_entity, status: 'manual') }
+ let(:pipeline) { stage.pipeline }
+ let(:play_manual) { stage.detailed_status(create(:user)) }
+
+ subject { play_manual.action_path }
+
+ it { is_expected.to eq("/#{pipeline.project.full_path}/pipelines/#{pipeline.id}/stages/#{stage.name}/play_manual") }
+ end
+
+ describe '#action_method' do
+ subject { play_manual.action_method }
+
+ it { is_expected.to eq(:post) }
+ end
+
+ describe '.matches?' do
+ let(:user) { double('user') }
+
+ subject { described_class.matches?(stage, user) }
+
+ context 'when stage is skipped' do
+ let(:stage) { create(:ci_stage_entity, status: :skipped) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when stage is manual' do
+ let(:stage) { create(:ci_stage_entity, status: :manual) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when stage is scheduled' do
+ let(:stage) { create(:ci_stage_entity, status: :scheduled) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when stage is success' do
+ let(:stage) { create(:ci_stage_entity, status: :success) }
+
+ context 'and does not have manual builds' do
+ it { is_expected.to be_falsy }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/data_builder/deployment_spec.rb b/spec/lib/gitlab/data_builder/deployment_spec.rb
index b89a44e178b..0a6e2302b09 100644
--- a/spec/lib/gitlab/data_builder/deployment_spec.rb
+++ b/spec/lib/gitlab/data_builder/deployment_spec.rb
@@ -19,6 +19,7 @@ describe Gitlab::DataBuilder::Deployment do
deployment = create(:deployment, status: :failed, environment: environment, sha: commit.sha, project: project)
deployable = deployment.deployable
expected_deployable_url = Gitlab::Routing.url_helpers.project_job_url(deployable.project, deployable)
+ expected_user_url = Gitlab::Routing.url_helpers.user_url(deployment.user)
expected_commit_url = Gitlab::UrlBuilder.build(commit)
data = described_class.build(deployment)
@@ -30,7 +31,9 @@ describe Gitlab::DataBuilder::Deployment do
expect(data[:project]).to eq(project.hook_attrs)
expect(data[:short_sha]).to eq(deployment.short_sha)
expect(data[:user]).to eq(deployment.user.hook_attrs)
+ expect(data[:user_url]).to eq(expected_user_url)
expect(data[:commit_url]).to eq(expected_commit_url)
+ expect(data[:commit_title]).to eq(commit.title)
end
end
end
diff --git a/spec/lib/gitlab/discussions_diff/highlight_cache_spec.rb b/spec/lib/gitlab/discussions_diff/highlight_cache_spec.rb
index fe26ebb8796..15ee8c40b55 100644
--- a/spec/lib/gitlab/discussions_diff/highlight_cache_spec.rb
+++ b/spec/lib/gitlab/discussions_diff/highlight_cache_spec.rb
@@ -3,31 +3,32 @@
require 'spec_helper'
describe Gitlab::DiscussionsDiff::HighlightCache, :clean_gitlab_redis_cache do
+ def fake_file(offset)
+ {
+ text: 'foo',
+ type: 'new',
+ index: 2 + offset,
+ old_pos: 10 + offset,
+ new_pos: 11 + offset,
+ line_code: 'xpto',
+ rich_text: '<blips>blops</blips>'
+ }
+ end
+
+ let(:mapping) do
+ {
+ 3 => [
+ fake_file(0),
+ fake_file(1)
+ ],
+ 4 => [
+ fake_file(2)
+ ]
+ }
+ end
+
describe '#write_multiple' do
it 'sets multiple keys serializing content as JSON' do
- mapping = {
- 3 => [
- {
- text: 'foo',
- type: 'new',
- index: 2,
- old_pos: 10,
- new_pos: 11,
- line_code: 'xpto',
- rich_text: '<blips>blops</blips>'
- },
- {
- text: 'foo',
- type: 'new',
- index: 3,
- old_pos: 11,
- new_pos: 12,
- line_code: 'xpto',
- rich_text: '<blops>blips</blops>'
- }
- ]
- }
-
described_class.write_multiple(mapping)
mapping.each do |key, value|
@@ -41,53 +42,16 @@ describe Gitlab::DiscussionsDiff::HighlightCache, :clean_gitlab_redis_cache do
describe '#read_multiple' do
it 'reads multiple keys and serializes content into Gitlab::Diff::Line objects' do
- mapping = {
- 3 => [
- {
- text: 'foo',
- type: 'new',
- index: 2,
- old_pos: 11,
- new_pos: 12,
- line_code: 'xpto',
- rich_text: '<blips>blops</blips>'
- },
- {
- text: 'foo',
- type: 'new',
- index: 3,
- old_pos: 10,
- new_pos: 11,
- line_code: 'xpto',
- rich_text: '<blips>blops</blips>'
- }
- ]
- }
-
described_class.write_multiple(mapping)
found = described_class.read_multiple(mapping.keys)
- expect(found.size).to eq(1)
+ expect(found.size).to eq(2)
expect(found.first.size).to eq(2)
expect(found.first).to all(be_a(Gitlab::Diff::Line))
end
it 'returns nil when cached key is not found' do
- mapping = {
- 3 => [
- {
- text: 'foo',
- type: 'new',
- index: 2,
- old_pos: 11,
- new_pos: 12,
- line_code: 'xpto',
- rich_text: '<blips>blops</blips>'
- }
- ]
- }
-
described_class.write_multiple(mapping)
found = described_class.read_multiple([2, 3])
@@ -95,8 +59,30 @@ describe Gitlab::DiscussionsDiff::HighlightCache, :clean_gitlab_redis_cache do
expect(found.size).to eq(2)
expect(found.first).to eq(nil)
- expect(found.second.size).to eq(1)
+ expect(found.second.size).to eq(2)
expect(found.second).to all(be_a(Gitlab::Diff::Line))
end
end
+
+ describe '#clear_multiple' do
+ it 'removes all named keys' do
+ described_class.write_multiple(mapping)
+
+ described_class.clear_multiple(mapping.keys)
+
+ expect(described_class.read_multiple(mapping.keys)).to all(be_nil)
+ end
+
+ it 'only removed named keys' do
+ to_clear, to_leave = mapping.keys
+
+ described_class.write_multiple(mapping)
+ described_class.clear_multiple([to_clear])
+
+ cleared, left = described_class.read_multiple([to_clear, to_leave])
+
+ expect(cleared).to be_nil
+ expect(left).to all(be_a(Gitlab::Diff::Line))
+ end
+ end
end
diff --git a/spec/lib/gitlab/git/object_pool_spec.rb b/spec/lib/gitlab/git/object_pool_spec.rb
index 6511c2b61bf..ebeb7b7b633 100644
--- a/spec/lib/gitlab/git/object_pool_spec.rb
+++ b/spec/lib/gitlab/git/object_pool_spec.rb
@@ -7,8 +7,6 @@ describe Gitlab::Git::ObjectPool do
let(:pool_repository) { create(:pool_repository) }
let(:source_repository) { pool_repository.source_project.repository }
- let(:source_repository_path) { File.join(TestEnv.repos_path, source_repository.relative_path) }
- let(:source_repository_rugged) { Rugged::Repository.new(source_repository_path) }
subject { pool_repository.object_pool }
@@ -82,6 +80,8 @@ describe Gitlab::Git::ObjectPool do
end
describe '#fetch' do
+ let(:source_repository_path) { File.join(TestEnv.repos_path, source_repository.relative_path) }
+ let(:source_repository_rugged) { Rugged::Repository.new(source_repository_path) }
let(:commit_count) { source_repository.commit_count }
context "when the object's pool repository exists" do
@@ -98,7 +98,7 @@ describe Gitlab::Git::ObjectPool do
it "re-creates the object pool's repository" do
subject.fetch
- expect(subject.repository.exists?).to be(true)
+ expect(subject.repository.exists?).to be true
end
it 'does not raise an error' do
diff --git a/spec/lib/gitlab/git/repository_cleaner_spec.rb b/spec/lib/gitlab/git/repository_cleaner_spec.rb
index 6602f22843f..7bba0107e58 100644
--- a/spec/lib/gitlab/git/repository_cleaner_spec.rb
+++ b/spec/lib/gitlab/git/repository_cleaner_spec.rb
@@ -6,55 +6,62 @@ describe Gitlab::Git::RepositoryCleaner do
let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
let(:head_sha) { repository.head_commit.id }
- let(:object_map_data) { "#{head_sha} #{'0' * 40}" }
+ let(:object_map_data) { "#{head_sha} #{Gitlab::Git::BLANK_SHA}" }
- subject(:cleaner) { described_class.new(repository.raw) }
+ let(:clean_refs) { %W[refs/environments/1 refs/merge-requests/1 refs/keep-around/#{head_sha}] }
+ let(:keep_refs) { %w[refs/heads/_keep refs/tags/_keep] }
- describe '#apply_bfg_object_map' do
- let(:clean_refs) { %W[refs/environments/1 refs/merge-requests/1 refs/keep-around/#{head_sha}] }
- let(:keep_refs) { %w[refs/heads/_keep refs/tags/_keep] }
+ subject(:cleaner) { described_class.new(repository.raw) }
+ shared_examples_for '#apply_bfg_object_map_stream' do
before do
(clean_refs + keep_refs).each { |ref| repository.create_ref(head_sha, ref) }
end
- context 'from StringIO' do
- let(:object_map) { StringIO.new(object_map_data) }
+ it 'removes internal references' do
+ entries = []
- it 'removes internal references' do
- cleaner.apply_bfg_object_map(object_map)
+ cleaner.apply_bfg_object_map_stream(object_map) do |rsp|
+ entries.concat(rsp.entries)
+ end
- aggregate_failures do
- clean_refs.each { |ref| expect(repository.ref_exists?(ref)).to be_falsy }
- keep_refs.each { |ref| expect(repository.ref_exists?(ref)).to be_truthy }
- end
+ aggregate_failures do
+ clean_refs.each { |ref| expect(repository.ref_exists?(ref)).to be(false) }
+ keep_refs.each { |ref| expect(repository.ref_exists?(ref)).to be(true) }
+
+ expect(entries).to contain_exactly(
+ Gitaly::ApplyBfgObjectMapStreamResponse::Entry.new(
+ type: :COMMIT,
+ old_oid: head_sha,
+ new_oid: Gitlab::Git::BLANK_SHA
+ )
+ )
end
end
+ end
- context 'from Gitlab::HttpIO' do
- let(:url) { 'http://example.com/bfg_object_map.txt' }
- let(:tempfile) { Tempfile.new }
- let(:object_map) { Gitlab::HttpIO.new(url, object_map_data.size) }
+ describe '#apply_bfg_object_map_stream (from StringIO)' do
+ let(:object_map) { StringIO.new(object_map_data) }
- around do |example|
- tempfile.write(object_map_data)
- tempfile.close
+ include_examples '#apply_bfg_object_map_stream'
+ end
- example.run
- ensure
- tempfile.unlink
- end
+ describe '#apply_bfg_object_map_stream (from Gitlab::HttpIO)' do
+ let(:url) { 'http://example.com/bfg_object_map.txt' }
+ let(:tempfile) { Tempfile.new }
+ let(:object_map) { Gitlab::HttpIO.new(url, object_map_data.size) }
- it 'removes internal references' do
- stub_remote_url_200(url, tempfile.path)
+ around do |example|
+ tempfile.write(object_map_data)
+ tempfile.close
- cleaner.apply_bfg_object_map(object_map)
+ stub_remote_url_200(url, tempfile.path)
- aggregate_failures do
- clean_refs.each { |ref| expect(repository.ref_exists?(ref)).to be_falsy }
- keep_refs.each { |ref| expect(repository.ref_exists?(ref)).to be_truthy }
- end
- end
+ example.run
+ ensure
+ tempfile.unlink
end
+
+ include_examples '#apply_bfg_object_map_stream'
end
end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 0f6aac9b6de..7644d83992f 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -2215,4 +2215,43 @@ describe Gitlab::Git::Repository, :seed_helper do
line.split("\t").last
end
end
+
+ describe '#disconnect_alternates' do
+ let(:project) { create(:project, :repository) }
+ let(:pool_repository) { create(:pool_repository) }
+ let(:repository) { project.repository }
+ let(:repository_path) { File.join(TestEnv.repos_path, repository.relative_path) }
+ let(:object_pool) { pool_repository.object_pool }
+ let(:object_pool_path) { File.join(TestEnv.repos_path, object_pool.repository.relative_path) }
+ let(:object_pool_rugged) { Rugged::Repository.new(object_pool_path) }
+
+ before do
+ object_pool.create
+ end
+
+ it 'does not raise an error when disconnecting a non-linked repository' do
+ expect { repository.disconnect_alternates }.not_to raise_error
+ end
+
+ it 'removes the alternates file' do
+ object_pool.link(repository)
+
+ alternates_file = File.join(repository_path, "objects", "info", "alternates")
+ expect(File.exist?(alternates_file)).to be_truthy
+
+ repository.disconnect_alternates
+
+ expect(File.exist?(alternates_file)).to be_falsey
+ end
+
+ it 'can still access objects in the object pool' do
+ object_pool.link(repository)
+ new_commit = new_commit_edit_old_file(object_pool_rugged)
+ expect(repository.commit(new_commit.oid).id).to eq(new_commit.oid)
+
+ repository.disconnect_alternates
+
+ expect(repository.commit(new_commit.oid).id).to eq(new_commit.oid)
+ end
+ end
end
diff --git a/spec/lib/gitlab/gitaly_client/cleanup_service_spec.rb b/spec/lib/gitlab/gitaly_client/cleanup_service_spec.rb
index 369deff732a..c42332dc27b 100644
--- a/spec/lib/gitlab/gitaly_client/cleanup_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/cleanup_service_spec.rb
@@ -6,14 +6,14 @@ describe Gitlab::GitalyClient::CleanupService do
let(:relative_path) { project.disk_path + '.git' }
let(:client) { described_class.new(project.repository) }
- describe '#apply_bfg_object_map' do
- it 'sends an apply_bfg_object_map message' do
+ describe '#apply_bfg_object_map_stream' do
+ it 'sends an apply_bfg_object_map_stream message' do
expect_any_instance_of(Gitaly::CleanupService::Stub)
- .to receive(:apply_bfg_object_map)
+ .to receive(:apply_bfg_object_map_stream)
.with(kind_of(Enumerator), kind_of(Hash))
- .and_return(double)
+ .and_return([])
- client.apply_bfg_object_map(StringIO.new)
+ client.apply_bfg_object_map_stream(StringIO.new)
end
end
end
diff --git a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
index 0dab39575b9..0bb6e582159 100644
--- a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
@@ -165,4 +165,15 @@ describe Gitlab::GitalyClient::RefService do
client.delete_refs(except_with_prefixes: prefixes)
end
end
+
+ describe '#pack_refs' do
+ it 'sends a pack_refs message' do
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:pack_refs)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_return(double(:pack_refs_response))
+
+ client.pack_refs
+ end
+ end
end
diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
index 46ca2340389..09de7ca6afd 100644
--- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
@@ -231,4 +231,34 @@ describe Gitlab::GitalyClient::RepositoryService do
client.raw_changes_between('deadbeef', 'deadpork')
end
end
+
+ describe '#disconnect_alternates' do
+ let(:project) { create(:project, :repository) }
+ let(:repository) { project.repository }
+ let(:repository_path) { File.join(TestEnv.repos_path, repository.relative_path) }
+ let(:pool_repository) { create(:pool_repository) }
+ let(:object_pool) { pool_repository.object_pool }
+ let(:object_pool_service) { Gitlab::GitalyClient::ObjectPoolService.new(object_pool) }
+
+ before do
+ object_pool_service.create(repository)
+ object_pool_service.link_repository(repository)
+ end
+
+ it 'deletes the alternates file' do
+ repository.disconnect_alternates
+
+ alternates_file = File.join(repository_path, "objects", "info", "alternates")
+
+ expect(File.exist?(alternates_file)).to be_falsey
+ end
+
+ context 'when called twice' do
+ it "doesn't raise an error" do
+ repository.disconnect_alternates
+
+ expect { repository.disconnect_alternates }.not_to raise_error
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
index 7901ae005d9..dab5767ece1 100644
--- a/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
@@ -98,6 +98,7 @@ describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redis_cach
description: 'This is my issue',
milestone_id: milestone.id,
state: :opened,
+ state_id: 1,
created_at: created_at,
updated_at: updated_at
},
@@ -127,6 +128,7 @@ describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redis_cach
description: "*Created by: alice*\n\nThis is my issue",
milestone_id: milestone.id,
state: :opened,
+ state_id: 1,
created_at: created_at,
updated_at: updated_at
},
diff --git a/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
index 2e4a7c36fb8..6d614c6527a 100644
--- a/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
@@ -93,6 +93,7 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi
source_branch: 'github/fork/alice/feature',
target_branch: 'master',
state: :merged,
+ state_id: 3,
milestone_id: milestone.id,
author_id: user.id,
assignee_id: user.id,
@@ -138,6 +139,7 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi
source_branch: 'github/fork/alice/feature',
target_branch: 'master',
state: :merged,
+ state_id: 3,
milestone_id: milestone.id,
author_id: project.creator_id,
assignee_id: user.id,
@@ -184,6 +186,7 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi
source_branch: 'master-42',
target_branch: 'master',
state: :merged,
+ state_id: 3,
milestone_id: milestone.id,
author_id: user.id,
assignee_id: user.id,
diff --git a/spec/lib/gitlab/graphql/generic_tracing_spec.rb b/spec/lib/gitlab/graphql/generic_tracing_spec.rb
new file mode 100644
index 00000000000..ae92dcc40af
--- /dev/null
+++ b/spec/lib/gitlab/graphql/generic_tracing_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Graphql::GenericTracing do
+ let(:graphql_duration_seconds_histogram) { double('Gitlab::Metrics::NullMetric') }
+
+ it 'updates graphql histogram with expected labels' do
+ query = 'query { users { id } }'
+ tracer = described_class.new
+
+ allow(tracer)
+ .to receive(:graphql_duration_seconds)
+ .and_return(graphql_duration_seconds_histogram)
+
+ expect_metric('graphql.lex', 'lex')
+ expect_metric('graphql.parse', 'parse')
+ expect_metric('graphql.validate', 'validate')
+ expect_metric('graphql.analyze', 'analyze_multiplex')
+ expect_metric('graphql.execute', 'execute_query_lazy')
+ expect_metric('graphql.execute', 'execute_multiplex')
+
+ GitlabSchema.execute(query, context: { tracers: [tracer] })
+ end
+
+ context "when labkit tracing is enabled" do
+ before do
+ expect(Labkit::Tracing).to receive(:enabled?).and_return(true)
+ end
+
+ it 'yields with labkit tracing' do
+ expected_tags = {
+ 'component' => 'web',
+ 'span.kind' => 'server',
+ 'platform_key' => 'pkey',
+ 'key' => 'key'
+ }
+
+ expect(Labkit::Tracing)
+ .to receive(:with_tracing)
+ .with(operation_name: "pkey.key", tags: expected_tags)
+ .and_yield
+
+ expect { |b| described_class.new.platform_trace('pkey', 'key', nil, &b) }.to yield_control
+ end
+ end
+
+ context "when labkit tracing is disabled" do
+ before do
+ expect(Labkit::Tracing).to receive(:enabled?).and_return(false)
+ end
+
+ it 'yields without measurement' do
+ expect(Labkit::Tracing).not_to receive(:with_tracing)
+
+ expect { |b| described_class.new.platform_trace('pkey', 'key', nil, &b) }.to yield_control
+ end
+ end
+
+ private
+
+ def expect_metric(platform_key, key)
+ expect(graphql_duration_seconds_histogram)
+ .to receive(:observe)
+ .with({ platform_key: platform_key, key: key }, be > 0.0)
+ end
+end
diff --git a/spec/lib/gitlab/graphql/tracing_spec.rb b/spec/lib/gitlab/graphql/tracing_spec.rb
deleted file mode 100644
index 7300a9a572e..00000000000
--- a/spec/lib/gitlab/graphql/tracing_spec.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::Graphql::Tracing do
- let(:graphql_duration_seconds_histogram) { double('Gitlab::Metrics::NullMetric') }
-
- it 'updates graphql histogram with expected labels' do
- query = 'query { users { id } }'
- tracer = described_class.new
-
- allow(tracer)
- .to receive(:graphql_duration_seconds)
- .and_return(graphql_duration_seconds_histogram)
-
- expect_metric('graphql.lex', 'lex')
- expect_metric('graphql.parse', 'parse')
- expect_metric('graphql.validate', 'validate')
- expect_metric('graphql.analyze', 'analyze_multiplex')
- expect_metric('graphql.execute', 'execute_query_lazy')
- expect_metric('graphql.execute', 'execute_multiplex')
-
- GitlabSchema.execute(query, context: { tracers: [tracer] })
- end
-
- private
-
- def expect_metric(platform_key, key)
- expect(graphql_duration_seconds_histogram)
- .to receive(:observe)
- .with({ platform_key: platform_key, key: key }, be > 0.0)
- end
-end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 482e9c05da8..2242543daad 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -99,7 +99,7 @@ merge_requests:
- timelogs
- head_pipeline
- latest_merge_request_diff
-- merge_request_pipelines
+- pipelines_for_merge_request
- merge_request_assignees
- suggestions
- assignees
diff --git a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
index 7972ff253fe..aaf8c9fa2a0 100644
--- a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
@@ -10,17 +10,20 @@ describe Gitlab::Metrics::Samplers::RubySampler do
describe '#sample' do
it 'samples various statistics' do
- expect(Gitlab::Metrics::System).to receive(:memory_usage)
+ expect(Gitlab::Metrics::System).to receive(:cpu_time)
expect(Gitlab::Metrics::System).to receive(:file_descriptor_count)
+ expect(Gitlab::Metrics::System).to receive(:memory_usage)
+ expect(Gitlab::Metrics::System).to receive(:process_start_time)
+ expect(Gitlab::Metrics::System).to receive(:max_open_file_descriptors)
expect(sampler).to receive(:sample_gc)
sampler.sample
end
- it 'adds a metric containing the memory usage' do
+ it 'adds a metric containing the process resident memory bytes' do
expect(Gitlab::Metrics::System).to receive(:memory_usage).and_return(9000)
- expect(sampler.metrics[:memory_usage]).to receive(:set).with({}, 9000)
+ expect(sampler.metrics[:process_resident_memory_bytes]).to receive(:set).with({}, 9000)
sampler.sample
end
@@ -34,6 +37,27 @@ describe Gitlab::Metrics::Samplers::RubySampler do
sampler.sample
end
+ it 'adds a metric containing the process total cpu time' do
+ expect(Gitlab::Metrics::System).to receive(:cpu_time).and_return(0.51)
+ expect(sampler.metrics[:process_cpu_seconds_total]).to receive(:set).with({}, 0.51)
+
+ sampler.sample
+ end
+
+ it 'adds a metric containing the process start time' do
+ expect(Gitlab::Metrics::System).to receive(:process_start_time).and_return(12345)
+ expect(sampler.metrics[:process_start_time_seconds]).to receive(:set).with({}, 12345)
+
+ sampler.sample
+ end
+
+ it 'adds a metric containing the process max file descriptors' do
+ expect(Gitlab::Metrics::System).to receive(:max_open_file_descriptors).and_return(1024)
+ expect(sampler.metrics[:process_max_fds]).to receive(:set).with({}, 1024)
+
+ sampler.sample
+ end
+
it 'clears any GC profiles' do
expect(GC::Profiler).to receive(:clear)
diff --git a/spec/lib/gitlab/metrics/samplers/unicorn_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/unicorn_sampler_spec.rb
index 4b03f3c2532..090e456644f 100644
--- a/spec/lib/gitlab/metrics/samplers/unicorn_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/unicorn_sampler_spec.rb
@@ -39,8 +39,8 @@ describe Gitlab::Metrics::Samplers::UnicornSampler do
it 'updates metrics type unix and with addr' do
labels = { socket_type: 'unix', socket_address: socket_address }
- expect(subject).to receive_message_chain(:unicorn_active_connections, :set).with(labels, 'active')
- expect(subject).to receive_message_chain(:unicorn_queued_connections, :set).with(labels, 'queued')
+ expect(subject.metrics[:unicorn_active_connections]).to receive(:set).with(labels, 'active')
+ expect(subject.metrics[:unicorn_queued_connections]).to receive(:set).with(labels, 'queued')
subject.sample
end
@@ -50,7 +50,6 @@ describe Gitlab::Metrics::Samplers::UnicornSampler do
context 'unicorn listens on tcp sockets' do
let(:tcp_socket_address) { '0.0.0.0:8080' }
let(:tcp_sockets) { [tcp_socket_address] }
-
before do
allow(unicorn).to receive(:listener_names).and_return(tcp_sockets)
end
@@ -71,13 +70,29 @@ describe Gitlab::Metrics::Samplers::UnicornSampler do
it 'updates metrics type unix and with addr' do
labels = { socket_type: 'tcp', socket_address: tcp_socket_address }
- expect(subject).to receive_message_chain(:unicorn_active_connections, :set).with(labels, 'active')
- expect(subject).to receive_message_chain(:unicorn_queued_connections, :set).with(labels, 'queued')
+ expect(subject.metrics[:unicorn_active_connections]).to receive(:set).with(labels, 'active')
+ expect(subject.metrics[:unicorn_queued_connections]).to receive(:set).with(labels, 'queued')
subject.sample
end
end
end
+
+ context 'additional metrics' do
+ let(:unicorn_workers) { 2 }
+
+ before do
+ allow(unicorn).to receive(:listener_names).and_return([""])
+ allow(::Gitlab::Metrics::System).to receive(:cpu_time).and_return(3.14)
+ allow(subject).to receive(:unicorn_workers_count).and_return(unicorn_workers)
+ end
+
+ it "sets additional metrics" do
+ expect(subject.metrics[:unicorn_workers]).to receive(:set).with({}, unicorn_workers)
+
+ subject.sample
+ end
+ end
end
describe '#start' do
diff --git a/spec/lib/gitlab/metrics/system_spec.rb b/spec/lib/gitlab/metrics/system_spec.rb
index 14afcdf5daa..b0603d96eb2 100644
--- a/spec/lib/gitlab/metrics/system_spec.rb
+++ b/spec/lib/gitlab/metrics/system_spec.rb
@@ -13,6 +13,18 @@ describe Gitlab::Metrics::System do
expect(described_class.file_descriptor_count).to be > 0
end
end
+
+ describe '.max_open_file_descriptors' do
+ it 'returns the max allowed open file descriptors' do
+ expect(described_class.max_open_file_descriptors).to be > 0
+ end
+ end
+
+ describe '.process_start_time' do
+ it 'returns the process start time' do
+ expect(described_class.process_start_time).to be > 0
+ end
+ end
else
describe '.memory_usage' do
it 'returns 0.0' do
@@ -25,6 +37,18 @@ describe Gitlab::Metrics::System do
expect(described_class.file_descriptor_count).to eq(0)
end
end
+
+ describe '.max_open_file_descriptors' do
+ it 'returns 0' do
+ expect(described_class.max_open_file_descriptors).to eq(0)
+ end
+ end
+
+ describe 'process_start_time' do
+ it 'returns 0' do
+ expect(described_class.process_start_time).to eq(0)
+ end
+ end
end
describe '.cpu_time' do
diff --git a/spec/lib/gitlab/namespaced_session_store_spec.rb b/spec/lib/gitlab/namespaced_session_store_spec.rb
new file mode 100644
index 00000000000..c0af2ede32a
--- /dev/null
+++ b/spec/lib/gitlab/namespaced_session_store_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::NamespacedSessionStore do
+ let(:key) { :some_key }
+ subject { described_class.new(key) }
+
+ it 'stores data under the specified key' do
+ Gitlab::Session.with_session({}) do
+ subject[:new_data] = 123
+
+ expect(Thread.current[:session_storage][key]).to eq(new_data: 123)
+ end
+ end
+
+ it 'retrieves data from the given key' do
+ Thread.current[:session_storage] = { key => { existing_data: 123 } }
+
+ expect(subject[:existing_data]).to eq 123
+ end
+end
diff --git a/spec/lib/gitlab/path_regex_spec.rb b/spec/lib/gitlab/path_regex_spec.rb
index 857862a2abd..84b2e2dc823 100644
--- a/spec/lib/gitlab/path_regex_spec.rb
+++ b/spec/lib/gitlab/path_regex_spec.rb
@@ -120,10 +120,10 @@ describe Gitlab::PathRegex do
# - Followed by one or more path-parts not starting with `:` or `*`
# - Followed by a path-part that includes a wildcard parameter `*`
# At the time of writing these routes match: http://rubular.com/r/Rv2pDE5Dvw
- STARTING_WITH_NAMESPACE = %r{^/\*namespace_id/:(project_)?id}
- NON_PARAM_PARTS = %r{[^:*][a-z\-_/]*}
- ANY_OTHER_PATH_PART = %r{[a-z\-_/:]*}
- WILDCARD_SEGMENT = /\*/
+ STARTING_WITH_NAMESPACE = %r{^/\*namespace_id/:(project_)?id}.freeze
+ NON_PARAM_PARTS = %r{[^:*][a-z\-_/]*}.freeze
+ ANY_OTHER_PATH_PART = %r{[a-z\-_/:]*}.freeze
+ WILDCARD_SEGMENT = /\*/.freeze
let(:namespaced_wildcard_routes) do
routes_without_format.select do |p|
p =~ %r{#{STARTING_WITH_NAMESPACE}/#{NON_PARAM_PARTS}/#{ANY_OTHER_PATH_PART}#{WILDCARD_SEGMENT}}
@@ -144,7 +144,7 @@ describe Gitlab::PathRegex do
end.uniq
end
- STARTING_WITH_GROUP = %r{^/groups/\*(group_)?id/}
+ STARTING_WITH_GROUP = %r{^/groups/\*(group_)?id/}.freeze
let(:group_routes) do
routes_without_format.select do |path|
path =~ STARTING_WITH_GROUP
diff --git a/spec/lib/gitlab/session_spec.rb b/spec/lib/gitlab/session_spec.rb
new file mode 100644
index 00000000000..8db73f0ec7b
--- /dev/null
+++ b/spec/lib/gitlab/session_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Session do
+ it 'uses the current thread as a data store' do
+ Thread.current[:session_storage] = { a: :b }
+
+ expect(described_class.current).to eq(a: :b)
+ ensure
+ Thread.current[:session_storage] = nil
+ end
+
+ describe '#with_session' do
+ it 'sets session hash' do
+ described_class.with_session(one: 1) do
+ expect(described_class.current).to eq(one: 1)
+ end
+ end
+
+ it 'restores current store after' do
+ described_class.with_session(two: 2) { }
+
+ expect(described_class.current).to eq nil
+ end
+ end
+end
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index fee1d701e3a..8f348b1b053 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -701,6 +701,8 @@ describe Notify do
is_expected.to have_body_text project.full_name
is_expected.to have_body_text project.web_url
is_expected.to have_body_text project_member.human_access
+ is_expected.to have_body_text 'leave the project'
+ is_expected.to have_body_text project_url(project, leave: 1)
end
end
@@ -1144,6 +1146,8 @@ describe Notify do
is_expected.to have_body_text group.name
is_expected.to have_body_text group.web_url
is_expected.to have_body_text group_member.human_access
+ is_expected.to have_body_text 'leave the group'
+ is_expected.to have_body_text group_url(group, leave: 1)
end
end
diff --git a/spec/migrations/schedule_sync_issuables_state_id_where_nil_spec.rb b/spec/migrations/schedule_sync_issuables_state_id_where_nil_spec.rb
new file mode 100644
index 00000000000..105c05bb7ca
--- /dev/null
+++ b/spec/migrations/schedule_sync_issuables_state_id_where_nil_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20190506135400_schedule_sync_issuables_state_id_where_nil')
+
+describe ScheduleSyncIssuablesStateIdWhereNil, :migration, :sidekiq do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:merge_requests) { table(:merge_requests) }
+ let(:issues) { table(:issues) }
+ let(:migration) { described_class.new }
+ let(:group) { namespaces.create!(name: 'gitlab', path: 'gitlab') }
+ let(:project) { projects.create!(namespace_id: group.id) }
+
+ shared_examples 'scheduling migrations' do
+ before do
+ Sidekiq::Worker.clear_all
+ stub_const("#{described_class.name}::BATCH_SIZE", 2)
+ end
+
+ it 'correctly schedules issuable sync background migration' do
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ expect(migration).to be_scheduled_delayed_migration(120.seconds, resource_1.id, resource_3.id)
+ expect(migration).to be_scheduled_delayed_migration(240.seconds, resource_5.id, resource_5.id)
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ end
+ end
+ end
+ end
+
+ describe '#up' do
+ context 'issues' do
+ it_behaves_like 'scheduling migrations' do
+ let(:migration) { described_class::ISSUES_MIGRATION }
+ let!(:resource_1) { issues.create!(description: 'first', state: 'opened', state_id: nil) }
+ let!(:resource_2) { issues.create!(description: 'second', state: 'closed', state_id: 2) }
+ let!(:resource_3) { issues.create!(description: 'third', state: 'closed', state_id: nil) }
+ let!(:resource_4) { issues.create!(description: 'fourth', state: 'closed', state_id: 2) }
+ let!(:resource_5) { issues.create!(description: 'fifth', state: 'closed', state_id: nil) }
+ end
+ end
+
+ context 'merge requests' do
+ it_behaves_like 'scheduling migrations' do
+ let(:migration) { described_class::MERGE_REQUESTS_MIGRATION }
+ let!(:resource_1) { merge_requests.create!(state: 'opened', state_id: nil, target_project_id: project.id, target_branch: 'feature1', source_branch: 'master') }
+ let!(:resource_2) { merge_requests.create!(state: 'closed', state_id: 2, target_project_id: project.id, target_branch: 'feature2', source_branch: 'master') }
+ let!(:resource_3) { merge_requests.create!(state: 'merged', state_id: nil, target_project_id: project.id, target_branch: 'feature3', source_branch: 'master') }
+ let!(:resource_4) { merge_requests.create!(state: 'locked', state_id: 3, target_project_id: project.id, target_branch: 'feature4', source_branch: 'master') }
+ let!(:resource_5) { merge_requests.create!(state: 'locked', state_id: nil, target_project_id: project.id, target_branch: 'feature4', source_branch: 'master') }
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 59ec7310391..9b489baf163 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -1434,7 +1434,7 @@ describe Ci::Build do
build.cancel!
end
- it { is_expected.to be_retryable }
+ it { is_expected.not_to be_retryable }
end
end
@@ -1964,7 +1964,7 @@ describe Ci::Build do
context 'when build has been canceled' do
subject { build_stubbed(:ci_build, :manual, status: :canceled) }
- it { is_expected.to be_playable }
+ it { is_expected.not_to be_playable }
end
context 'when build is successful' do
@@ -2817,7 +2817,7 @@ describe Ci::Build do
context 'when ref is merge request' do
let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
- let(:pipeline) { merge_request.merge_request_pipelines.first }
+ let(:pipeline) { merge_request.pipelines_for_merge_request.first }
let(:build) { create(:ci_build, ref: merge_request.source_branch, tag: false, pipeline: pipeline, project: project) }
context 'when ref is protected' do
@@ -2875,7 +2875,7 @@ describe Ci::Build do
context 'when ref is merge request' do
let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
- let(:pipeline) { merge_request.merge_request_pipelines.first }
+ let(:pipeline) { merge_request.pipelines_for_merge_request.first }
let(:build) { create(:ci_build, ref: merge_request.source_branch, tag: false, pipeline: pipeline, project: project) }
context 'when ref is protected' do
diff --git a/spec/models/ci/group_variable_spec.rb b/spec/models/ci/group_variable_spec.rb
index b3999765e5f..406a69f3bbc 100644
--- a/spec/models/ci/group_variable_spec.rb
+++ b/spec/models/ci/group_variable_spec.rb
@@ -5,7 +5,8 @@ require 'spec_helper'
describe Ci::GroupVariable do
subject { build(:ci_group_variable) }
- it { is_expected.to include_module(HasVariable) }
+ it_behaves_like "CI variable"
+
it { is_expected.to include_module(Presentable) }
it { is_expected.to include_module(Maskable) }
it { is_expected.to validate_uniqueness_of(:key).scoped_to(:group_id).with_message(/\(\w+\) has already been taken/) }
diff --git a/spec/models/ci/legacy_stage_spec.rb b/spec/models/ci/legacy_stage_spec.rb
index be0307518eb..bb812cc0533 100644
--- a/spec/models/ci/legacy_stage_spec.rb
+++ b/spec/models/ci/legacy_stage_spec.rb
@@ -272,4 +272,6 @@ describe Ci::LegacyStage do
def create_job(type, status: 'success', stage: stage_name, **opts)
create(type, pipeline: pipeline, stage: stage, status: status, **opts)
end
+
+ it_behaves_like 'manual playable stage', :ci_stage
end
diff --git a/spec/models/ci/pipeline_schedule_spec.rb b/spec/models/ci/pipeline_schedule_spec.rb
index 81913f4a3b6..1bfc14d2839 100644
--- a/spec/models/ci/pipeline_schedule_spec.rb
+++ b/spec/models/ci/pipeline_schedule_spec.rb
@@ -35,6 +35,15 @@ describe Ci::PipelineSchedule do
expect(pipeline_schedule).not_to be_valid
end
end
+
+ context 'when cron contains trailing whitespaces' do
+ it 'strips the attribute' do
+ pipeline_schedule = build(:ci_pipeline_schedule, cron: ' 0 0 * * * ')
+
+ expect(pipeline_schedule).to be_valid
+ expect(pipeline_schedule.cron).to eq('0 0 * * *')
+ end
+ end
end
describe '#set_next_run_at' do
diff --git a/spec/models/ci/pipeline_schedule_variable_spec.rb b/spec/models/ci/pipeline_schedule_variable_spec.rb
index 3c9379ecb0d..c96a24d5042 100644
--- a/spec/models/ci/pipeline_schedule_variable_spec.rb
+++ b/spec/models/ci/pipeline_schedule_variable_spec.rb
@@ -5,5 +5,5 @@ require 'spec_helper'
describe Ci::PipelineScheduleVariable do
subject { build(:ci_pipeline_schedule_variable) }
- it { is_expected.to include_module(HasVariable) }
+ it_behaves_like "CI variable"
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 9d0cd654f13..a0319b3eb0a 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -466,7 +466,7 @@ describe Ci::Pipeline, :mailer do
target_branch: 'master')
end
- let(:pipeline) { merge_request.merge_request_pipelines.first }
+ let(:pipeline) { merge_request.pipelines_for_merge_request.first }
it 'does not return the pipeline' do
is_expected.to be_empty
@@ -2942,4 +2942,36 @@ describe Ci::Pipeline, :mailer do
end
end
end
+
+ describe '#find_stage_by_name' do
+ let(:pipeline) { create(:ci_pipeline) }
+ let(:stage_name) { 'test' }
+
+ let(:stage) do
+ create(:ci_stage_entity,
+ pipeline: pipeline,
+ project: pipeline.project,
+ name: 'test')
+ end
+
+ before do
+ create_list(:ci_build, 2, pipeline: pipeline, stage: stage.name)
+ end
+
+ subject { pipeline.find_stage_by_name!(stage_name) }
+
+ context 'when stage exists' do
+ it { is_expected.to eq(stage) }
+ end
+
+ context 'when stage does not exist' do
+ let(:stage_name) { 'build' }
+
+ it 'raises an ActiveRecord exception' do
+ expect do
+ subject
+ end.to raise_exception(ActiveRecord::RecordNotFound)
+ end
+ end
+ end
end
diff --git a/spec/models/ci/pipeline_variable_spec.rb b/spec/models/ci/pipeline_variable_spec.rb
index 2ecb688299a..e8c7ce088e2 100644
--- a/spec/models/ci/pipeline_variable_spec.rb
+++ b/spec/models/ci/pipeline_variable_spec.rb
@@ -5,7 +5,8 @@ require 'spec_helper'
describe Ci::PipelineVariable do
subject { build(:ci_pipeline_variable) }
- it { is_expected.to include_module(HasVariable) }
+ it_behaves_like "CI variable"
+
it { is_expected.to validate_uniqueness_of(:key).scoped_to(:pipeline_id) }
describe '#hook_attrs' do
diff --git a/spec/models/ci/stage_spec.rb b/spec/models/ci/stage_spec.rb
index 661958390e2..85cd32fb03a 100644
--- a/spec/models/ci/stage_spec.rb
+++ b/spec/models/ci/stage_spec.rb
@@ -285,4 +285,6 @@ describe Ci::Stage, :models do
end
end
end
+
+ it_behaves_like 'manual playable stage', :ci_stage_entity
end
diff --git a/spec/models/ci/variable_spec.rb b/spec/models/ci/variable_spec.rb
index d2df6b3344e..a231c7eaed8 100644
--- a/spec/models/ci/variable_spec.rb
+++ b/spec/models/ci/variable_spec.rb
@@ -5,8 +5,9 @@ require 'spec_helper'
describe Ci::Variable do
subject { build(:ci_variable) }
+ it_behaves_like "CI variable"
+
describe 'validations' do
- it { is_expected.to include_module(HasVariable) }
it { is_expected.to include_module(Presentable) }
it { is_expected.to include_module(Maskable) }
it { is_expected.to validate_uniqueness_of(:key).scoped_to(:project_id, :environment_scope).with_message(/\(\w+\) has already been taken/) }
diff --git a/spec/models/clusters/applications/runner_spec.rb b/spec/models/clusters/applications/runner_spec.rb
index bdc0cb8ed86..4f0cd0efe9c 100644
--- a/spec/models/clusters/applications/runner_spec.rb
+++ b/spec/models/clusters/applications/runner_spec.rb
@@ -69,8 +69,8 @@ describe Clusters::Applications::Runner do
expect(values).to include('privileged: true')
expect(values).to include('image: ubuntu:16.04')
expect(values).to include('resources')
- expect(values).to match(/runnerToken: '?#{ci_runner.token}/)
- expect(values).to match(/gitlabUrl: '?#{Gitlab::Routing.url_helpers.root_url}/)
+ expect(values).to match(/runnerToken: '?#{Regexp.escape(ci_runner.token)}/)
+ expect(values).to match(/gitlabUrl: '?#{Regexp.escape(Gitlab::Routing.url_helpers.root_url)}/)
end
context 'without a runner' do
@@ -83,7 +83,7 @@ describe Clusters::Applications::Runner do
end
it 'uses the new runner token' do
- expect(values).to match(/runnerToken: '?#{runner.token}/)
+ expect(values).to match(/runnerToken: '?#{Regexp.escape(runner.token)}/)
end
end
@@ -114,6 +114,18 @@ describe Clusters::Applications::Runner do
expect(runner.groups).to eq [group]
end
end
+
+ context 'instance cluster' do
+ let(:cluster) { create(:cluster, :with_installed_helm, :instance) }
+
+ include_examples 'runner creation'
+
+ it 'creates an instance runner' do
+ subject
+
+ expect(runner).to be_instance_type
+ end
+ end
end
context 'with duplicated values on vendor/runner/values.yaml' do
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index 894ef3fb956..58203da5b22 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -95,6 +95,24 @@ describe Clusters::Cluster do
it { is_expected.to contain_exactly(cluster) }
end
+ describe '.managed' do
+ subject do
+ described_class.managed
+ end
+
+ context 'cluster is not managed' do
+ let!(:cluster) { create(:cluster, :not_managed) }
+
+ it { is_expected.not_to include(cluster) }
+ end
+
+ context 'cluster is managed' do
+ let!(:cluster) { create(:cluster) }
+
+ it { is_expected.to include(cluster) }
+ end
+ end
+
describe '.missing_kubernetes_namespace' do
let!(:cluster) { create(:cluster, :provided_by_gcp, :project) }
let(:project) { cluster.project }
@@ -307,6 +325,15 @@ describe Clusters::Cluster do
end
end
+ context 'when group and instance have configured kubernetes clusters' do
+ let(:project) { create(:project, group: group) }
+ let!(:instance_cluster) { create(:cluster, :provided_by_gcp, :instance) }
+
+ it 'returns clusters in order, descending the hierachy' do
+ is_expected.to eq([group_cluster, instance_cluster])
+ end
+ end
+
context 'when sub-group has configured kubernetes cluster', :nested_groups do
let(:sub_group_cluster) { create(:cluster, :provided_by_gcp, :group) }
let(:sub_group) { sub_group_cluster.group }
diff --git a/spec/models/clusters/platforms/kubernetes_spec.rb b/spec/models/clusters/platforms/kubernetes_spec.rb
index 0281dd2c303..e35d14f2282 100644
--- a/spec/models/clusters/platforms/kubernetes_spec.rb
+++ b/spec/models/clusters/platforms/kubernetes_spec.rb
@@ -331,6 +331,18 @@ describe Clusters::Platforms::Kubernetes, :use_clean_rails_memory_store_caching
{ key: 'KUBE_TOKEN', value: kubernetes.token, public: false }
)
end
+
+ context 'the cluster is not managed' do
+ let!(:cluster) { create(:cluster, :group, :not_managed, platform_kubernetes: kubernetes) }
+
+ it_behaves_like 'setting variables'
+
+ it 'sets KUBE_TOKEN' do
+ expect(subject).to include(
+ { key: 'KUBE_TOKEN', value: kubernetes.token, public: false, masked: true }
+ )
+ end
+ end
end
context 'kubernetes namespace exists for the project' do
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index ca2f9e36c98..017cca0541e 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -449,7 +449,7 @@ describe CommitStatus do
end
it "lock" do
- is_expected.to be true
+ is_expected.to be_truthy
end
it "raise exception when trying to update" do
@@ -463,7 +463,7 @@ describe CommitStatus do
end
it "do not lock" do
- is_expected.to be false
+ is_expected.to be_falsey
end
it "save correctly" do
diff --git a/spec/models/concerns/has_ref_spec.rb b/spec/models/concerns/has_ref_spec.rb
index 6805731fed3..66b25c77430 100644
--- a/spec/models/concerns/has_ref_spec.rb
+++ b/spec/models/concerns/has_ref_spec.rb
@@ -19,7 +19,7 @@ describe HasRef do
context 'when it was triggered by merge request' do
let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
- let(:pipeline) { merge_request.merge_request_pipelines.first }
+ let(:pipeline) { merge_request.pipelines_for_merge_request.first }
let(:build) { create(:ci_build, pipeline: pipeline) }
it 'returns false' do
@@ -68,7 +68,7 @@ describe HasRef do
context 'when it is triggered by a merge request' do
let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
- let(:pipeline) { merge_request.merge_request_pipelines.first }
+ let(:pipeline) { merge_request.pipelines_for_merge_request.first }
let(:build) { create(:ci_build, tag: false, pipeline: pipeline) }
it 'returns nil' do
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index 0cd69cb4817..cc777cbf749 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -55,6 +55,29 @@ describe Issue do
end
end
+ describe 'locking' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:lock_version) do
+ [
+ [0],
+ ["0"]
+ ]
+ end
+
+ with_them do
+ it 'works when an issue has a NULL lock_version' do
+ issue = create(:issue)
+
+ described_class.where(id: issue.id).update_all('lock_version = NULL')
+
+ issue.update!(lock_version: lock_version, title: 'locking test')
+
+ expect(issue.reload.title).to eq('locking test')
+ end
+ end
+ end
+
describe '#order_by_position_and_priority' do
let(:project) { create :project }
let(:p1) { create(:label, title: 'P1', project: project, priority: 1) }
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index c68c3ce2abe..782a84f922b 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -70,6 +70,16 @@ describe Member do
expect(child_member).not_to be_valid
end
+ # Membership in a subgroup confers certain access rights, such as being
+ # able to merge or push code to protected branches.
+ it "is valid with an equal level" do
+ child_member.access_level = GroupMember::DEVELOPER
+
+ child_member.validate
+
+ expect(child_member).to be_valid
+ end
+
it "is valid with a higher level" do
child_member.access_level = GroupMember::MAINTAINER
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 7457efef013..c72b6e9033d 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -31,6 +31,29 @@ describe MergeRequest do
end
end
+ describe 'locking' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:lock_version) do
+ [
+ [0],
+ ["0"]
+ ]
+ end
+
+ with_them do
+ it 'works when a merge request has a NULL lock_version' do
+ merge_request = create(:merge_request)
+
+ described_class.where(id: merge_request.id).update_all('lock_version = NULL')
+
+ merge_request.update!(lock_version: lock_version, title: 'locking test')
+
+ expect(merge_request.reload.title).to eq('locking test')
+ end
+ end
+ end
+
describe '#squash_in_progress?' do
let(:repo_path) do
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 95a4b0f6d71..bfde367c47f 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -146,20 +146,20 @@ describe Namespace do
create(:project,
namespace: namespace,
statistics: build(:project_statistics,
- storage_size: 606,
repository_size: 101,
lfs_objects_size: 202,
- build_artifacts_size: 303))
+ build_artifacts_size: 303,
+ packages_size: 404))
end
let(:project2) do
create(:project,
namespace: namespace,
statistics: build(:project_statistics,
- storage_size: 60,
repository_size: 10,
lfs_objects_size: 20,
- build_artifacts_size: 30))
+ build_artifacts_size: 30,
+ packages_size: 40))
end
it "sums all project storage counters in the namespace" do
@@ -167,10 +167,11 @@ describe Namespace do
project2
statistics = described_class.with_statistics.find(namespace.id)
- expect(statistics.storage_size).to eq 666
+ expect(statistics.storage_size).to eq 1110
expect(statistics.repository_size).to eq 111
expect(statistics.lfs_objects_size).to eq 222
expect(statistics.build_artifacts_size).to eq 333
+ expect(statistics.packages_size).to eq 444
end
it "correctly handles namespaces without projects" do
@@ -180,6 +181,7 @@ describe Namespace do
expect(statistics.repository_size).to eq 0
expect(statistics.lfs_objects_size).to eq 0
expect(statistics.build_artifacts_size).to eq 0
+ expect(statistics.packages_size).to eq 0
end
end
@@ -743,22 +745,25 @@ describe Namespace do
end
end
- describe '#full_path_was' do
+ describe '#full_path_before_last_save' do
context 'when the group has no parent' do
- it 'returns the path was' do
- group = create(:group, parent: nil)
- expect(group.full_path_was).to eq(group.path_was)
+ it 'returns the path before last save' do
+ group = create(:group)
+
+ group.update(parent: nil)
+
+ expect(group.full_path_before_last_save).to eq(group.path_before_last_save)
end
end
context 'when a parent is assigned to a group with no previous parent' do
- it 'returns the path was' do
+ it 'returns the path before last save' do
group = create(:group, parent: nil)
-
parent = create(:group)
- group.parent = parent
- expect(group.full_path_was).to eq("#{group.path_was}")
+ group.update(parent: parent)
+
+ expect(group.full_path_before_last_save).to eq("#{group.path_before_last_save}")
end
end
@@ -766,9 +771,10 @@ describe Namespace do
it 'returns the parent full path' do
parent = create(:group)
group = create(:group, parent: parent)
- group.parent = nil
- expect(group.full_path_was).to eq("#{parent.full_path}/#{group.path}")
+ group.update(parent: nil)
+
+ expect(group.full_path_before_last_save).to eq("#{parent.full_path}/#{group.path}")
end
end
@@ -777,8 +783,10 @@ describe Namespace do
parent = create(:group)
group = create(:group, parent: parent)
new_parent = create(:group)
- group.parent = new_parent
- expect(group.full_path_was).to eq("#{parent.full_path}/#{group.path}")
+
+ group.update(parent: new_parent)
+
+ expect(group.full_path_before_last_save).to eq("#{parent.full_path}/#{group.path}")
end
end
end
diff --git a/spec/models/note_diff_file_spec.rb b/spec/models/note_diff_file_spec.rb
index 99eeac8d778..b15bedd257e 100644
--- a/spec/models/note_diff_file_spec.rb
+++ b/spec/models/note_diff_file_spec.rb
@@ -10,4 +10,31 @@ describe NoteDiffFile do
describe 'validations' do
it { is_expected.to validate_presence_of(:diff_note) }
end
+
+ describe '.referencing_sha' do
+ let!(:diff_note) { create(:diff_note_on_commit) }
+
+ let(:note_diff_file) { diff_note.note_diff_file }
+ let(:project) { diff_note.project }
+
+ it 'finds note diff files by project and sha' do
+ found = described_class.referencing_sha(diff_note.commit_id, project_id: project.id)
+
+ expect(found).to contain_exactly(note_diff_file)
+ end
+
+ it 'excludes note diff files with the wrong project' do
+ other_project = create(:project)
+
+ found = described_class.referencing_sha(diff_note.commit_id, project_id: other_project.id)
+
+ expect(found).to be_empty
+ end
+
+ it 'excludes note diff files with the wrong sha' do
+ found = described_class.referencing_sha(Gitlab::Git::BLANK_SHA, project_id: project.id)
+
+ expect(found).to be_empty
+ end
+ end
end
diff --git a/spec/models/project_services/chat_message/deployment_message_spec.rb b/spec/models/project_services/chat_message/deployment_message_spec.rb
index 86565ce8b01..42c1689db3d 100644
--- a/spec/models/project_services/chat_message/deployment_message_spec.rb
+++ b/spec/models/project_services/chat_message/deployment_message_spec.rb
@@ -89,8 +89,10 @@ describe ChatMessage::DeploymentMessage do
name: "Jane Person",
username: "jane"
},
+ user_url: "user_url",
short_sha: "12345678",
- commit_url: "commit_url"
+ commit_url: "commit_url",
+ commit_title: "commit title text"
}.merge(params)
end
@@ -104,12 +106,13 @@ describe ChatMessage::DeploymentMessage do
deployment = create(:deployment, :success, deployable: ci_build, environment: environment, project: project, user: user, sha: commit.sha)
job_url = Gitlab::Routing.url_helpers.project_job_url(project, ci_build)
commit_url = Gitlab::UrlBuilder.build(deployment.commit)
+ user_url = Gitlab::Routing.url_helpers.user_url(user)
data = Gitlab::DataBuilder::Deployment.build(deployment)
message = described_class.new(data)
expect(message.attachments).to eq([{
- text: "[myspace/myproject](#{project.web_url})\n[Job ##{ci_build.id}](#{job_url}), SHA [#{deployment.short_sha}](#{commit_url}), by John Smith (smith)",
+ text: "[myspace/myproject](#{project.web_url}) with job [##{ci_build.id}](#{job_url}) by [John Smith (smith)](#{user_url})\n[#{deployment.short_sha}](#{commit_url}): #{commit.title}",
color: "good"
}])
end
@@ -120,7 +123,7 @@ describe ChatMessage::DeploymentMessage do
message = described_class.new(data)
expect(message.attachments).to eq([{
- text: "[project_path_with_namespace](project_web_url)\n[Job #3](deployable_url), SHA [12345678](commit_url), by Jane Person (jane)",
+ text: "[project_path_with_namespace](project_web_url) with job [#3](deployable_url) by [Jane Person (jane)](user_url)\n[12345678](commit_url): commit title text",
color: "danger"
}])
end
@@ -131,7 +134,7 @@ describe ChatMessage::DeploymentMessage do
message = described_class.new(data)
expect(message.attachments).to eq([{
- text: "[project_path_with_namespace](project_web_url)\n[Job #3](deployable_url), SHA [12345678](commit_url), by Jane Person (jane)",
+ text: "[project_path_with_namespace](project_web_url) with job [#3](deployable_url) by [Jane Person (jane)](user_url)\n[12345678](commit_url): commit title text",
color: "warning"
}])
end
@@ -142,7 +145,7 @@ describe ChatMessage::DeploymentMessage do
message = described_class.new(data)
expect(message.attachments).to eq([{
- text: "[project_path_with_namespace](project_web_url)\n[Job #3](deployable_url), SHA [12345678](commit_url), by Jane Person (jane)",
+ text: "[project_path_with_namespace](project_web_url) with job [#3](deployable_url) by [Jane Person (jane)](user_url)\n[12345678](commit_url): commit title text",
color: "#334455"
}])
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index bb0257e7456..2a17bd6002e 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -3164,61 +3164,105 @@ describe Project do
end
describe '.with_feature_available_for_user' do
- let!(:user) { create(:user) }
- let!(:feature) { MergeRequest }
- let!(:project) { create(:project, :public, :merge_requests_enabled) }
+ let(:user) { create(:user) }
+ let(:feature) { MergeRequest }
subject { described_class.with_feature_available_for_user(feature, user) }
- context 'when user has access to project' do
- subject { described_class.with_feature_available_for_user(feature, user) }
+ shared_examples 'feature disabled' do
+ let(:project) { create(:project, :public, :merge_requests_disabled) }
+
+ it 'does not return projects with the project feature disabled' do
+ is_expected.not_to include(project)
+ end
+ end
+
+ shared_examples 'feature public' do
+ let(:project) { create(:project, :public, :merge_requests_public) }
+
+ it 'returns projects with the project feature public' do
+ is_expected.to include(project)
+ end
+ end
+
+ shared_examples 'feature enabled' do
+ let(:project) { create(:project, :public, :merge_requests_enabled) }
+
+ it 'returns projects with the project feature enabled' do
+ is_expected.to include(project)
+ end
+ end
+
+ shared_examples 'feature access level is nil' do
+ let(:project) { create(:project, :public) }
+
+ it 'returns projects with the project feature access level nil' do
+ project.project_feature.update(merge_requests_access_level: nil)
+
+ is_expected.to include(project)
+ end
+ end
+ context 'with user' do
before do
project.add_guest(user)
end
- context 'when public project' do
- context 'when feature is public' do
- it 'returns project' do
- is_expected.to include(project)
+ it_behaves_like 'feature disabled'
+ it_behaves_like 'feature public'
+ it_behaves_like 'feature enabled'
+ it_behaves_like 'feature access level is nil'
+
+ context 'when feature is private' do
+ let(:project) { create(:project, :public, :merge_requests_private) }
+
+ context 'when user does not has access to the feature' do
+ it 'does not return projects with the project feature private' do
+ is_expected.not_to include(project)
end
end
- context 'when feature is private' do
- let!(:project) { create(:project, :public, :merge_requests_private) }
-
- it 'returns project when user has access to the feature' do
- project.add_maintainer(user)
+ context 'when user has access to the feature' do
+ it 'returns projects with the project feature private' do
+ project.add_reporter(user)
is_expected.to include(project)
end
-
- it 'does not return project when user does not have the minimum access level required' do
- is_expected.not_to include(project)
- end
end
end
+ end
- context 'when private project' do
- let!(:project) { create(:project) }
+ context 'user is an admin' do
+ let(:user) { create(:user, :admin) }
- it 'returns project when user has access to the feature' do
- project.add_maintainer(user)
+ it_behaves_like 'feature disabled'
+ it_behaves_like 'feature public'
+ it_behaves_like 'feature enabled'
+ it_behaves_like 'feature access level is nil'
- is_expected.to include(project)
- end
+ context 'when feature is private' do
+ let(:project) { create(:project, :public, :merge_requests_private) }
- it 'does not return project when user does not have the minimum access level required' do
- is_expected.not_to include(project)
+ it 'returns projects with the project feature private' do
+ is_expected.to include(project)
end
end
end
- context 'when user does not have access to project' do
- let!(:project) { create(:project) }
+ context 'without user' do
+ let(:user) { nil }
- it 'does not return project when user cant access project' do
- is_expected.not_to include(project)
+ it_behaves_like 'feature disabled'
+ it_behaves_like 'feature public'
+ it_behaves_like 'feature enabled'
+ it_behaves_like 'feature access level is nil'
+
+ context 'when feature is private' do
+ let(:project) { create(:project, :public, :merge_requests_private) }
+
+ it 'does not return projects with the project feature private' do
+ is_expected.not_to include(project)
+ end
end
end
end
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index c670b6aac56..738398a06f9 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -124,16 +124,30 @@ describe ProjectStatistics do
end
describe '.increment_statistic' do
- it 'increases the statistic by that amount' do
- expect { described_class.increment_statistic(project.id, :build_artifacts_size, 13) }
- .to change { statistics.reload.build_artifacts_size }
- .by(13)
+ shared_examples 'a statistic that increases storage_size' do
+ it 'increases the statistic by that amount' do
+ expect { described_class.increment_statistic(project.id, stat, 13) }
+ .to change { statistics.reload.send(stat) || 0 }
+ .by(13)
+ end
+
+ it 'increases also storage size by that amount' do
+ expect { described_class.increment_statistic(project.id, stat, 20) }
+ .to change { statistics.reload.storage_size }
+ .by(20)
+ end
end
- it 'increases also storage size by that amount' do
- expect { described_class.increment_statistic(project.id, :build_artifacts_size, 20) }
- .to change { statistics.reload.storage_size }
- .by(20)
+ context 'when adjusting :build_artifacts_size' do
+ let(:stat) { :build_artifacts_size }
+
+ it_behaves_like 'a statistic that increases storage_size'
+ end
+
+ context 'when adjusting :packages_size' do
+ let(:stat) { :packages_size }
+
+ it_behaves_like 'a statistic that increases storage_size'
end
context 'when the amount is 0' do
diff --git a/spec/models/release_spec.rb b/spec/models/release_spec.rb
index 0b19a4f8efc..7c106ce6b85 100644
--- a/spec/models/release_spec.rb
+++ b/spec/models/release_spec.rb
@@ -49,6 +49,11 @@ RSpec.describe Release do
it 'counts the link as an asset' do
is_expected.to eq(1 + Releases::Source::FORMATS.count)
end
+
+ it "excludes sources count when asked" do
+ assets_count = release.assets_count(except: [:sources])
+ expect(assets_count).to eq(1)
+ end
end
end
diff --git a/spec/models/remote_mirror_spec.rb b/spec/models/remote_mirror_spec.rb
index f743dfed31f..e14b19db915 100644
--- a/spec/models/remote_mirror_spec.rb
+++ b/spec/models/remote_mirror_spec.rb
@@ -373,6 +373,22 @@ describe RemoteMirror, :mailer do
end
end
+ describe '#disabled?' do
+ subject { remote_mirror.disabled? }
+
+ context 'when disabled' do
+ let(:remote_mirror) { build(:remote_mirror, enabled: false) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when enabled' do
+ let(:remote_mirror) { build(:remote_mirror, enabled: true) }
+
+ it { is_expected.to be_falsy }
+ end
+ end
+
def create_mirror(params)
project = FactoryBot.create(:project, :repository)
project.remote_mirrors.create!(params)
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index a6c3d5756aa..9ff0f355fd4 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -1451,6 +1451,91 @@ describe Repository do
end
end
+ describe '#rebase' do
+ let(:merge_request) { create(:merge_request, source_branch: 'feature', target_branch: 'master', source_project: project) }
+
+ shared_examples_for 'a method that can rebase successfully' do
+ it 'returns the rebase commit sha' do
+ rebase_commit_sha = repository.rebase(user, merge_request)
+ head_sha = merge_request.source_project.repository.commit(merge_request.source_branch).sha
+
+ expect(rebase_commit_sha).to eq(head_sha)
+ end
+
+ it 'sets the `rebase_commit_sha` for the given merge request' do
+ rebase_commit_sha = repository.rebase(user, merge_request)
+
+ expect(rebase_commit_sha).not_to be_nil
+ expect(merge_request.rebase_commit_sha).to eq(rebase_commit_sha)
+ end
+ end
+
+ context 'when two_step_rebase feature is enabled' do
+ before do
+ stub_feature_flags(two_step_rebase: true)
+ end
+
+ it_behaves_like 'a method that can rebase successfully'
+
+ it 'executes the new Gitaly RPC' do
+ expect_any_instance_of(Gitlab::GitalyClient::OperationService).to receive(:rebase)
+ expect_any_instance_of(Gitlab::GitalyClient::OperationService).not_to receive(:user_rebase)
+
+ repository.rebase(user, merge_request)
+ end
+
+ describe 'rolling back the `rebase_commit_sha`' do
+ let(:new_sha) { Digest::SHA1.hexdigest('foo') }
+
+ it 'does not rollback when there are no errors' do
+ second_response = double(pre_receive_error: nil, git_error: nil)
+ mock_gitaly(second_response)
+
+ repository.rebase(user, merge_request)
+
+ expect(merge_request.reload.rebase_commit_sha).to eq(new_sha)
+ end
+
+ it 'does rollback when an error is encountered in the second step' do
+ second_response = double(pre_receive_error: 'my_error', git_error: nil)
+ mock_gitaly(second_response)
+
+ expect do
+ repository.rebase(user, merge_request)
+ end.to raise_error(Gitlab::Git::PreReceiveError)
+
+ expect(merge_request.reload.rebase_commit_sha).to be_nil
+ end
+
+ def mock_gitaly(second_response)
+ responses = [
+ double(rebase_sha: new_sha).as_null_object,
+ second_response
+ ]
+
+ expect_any_instance_of(
+ Gitaly::OperationService::Stub
+ ).to receive(:user_rebase_confirmable).and_return(responses.each)
+ end
+ end
+ end
+
+ context 'when two_step_rebase feature is disabled' do
+ before do
+ stub_feature_flags(two_step_rebase: false)
+ end
+
+ it_behaves_like 'a method that can rebase successfully'
+
+ it 'executes the deprecated Gitaly RPC' do
+ expect_any_instance_of(Gitlab::GitalyClient::OperationService).to receive(:user_rebase)
+ expect_any_instance_of(Gitlab::GitalyClient::OperationService).not_to receive(:rebase)
+
+ repository.rebase(user, merge_request)
+ end
+ end
+ end
+
describe '#revert' do
let(:new_image_commit) { repository.commit('33f3729a45c02fc67d00adb1b8bca394b0e761d9') }
let(:update_image_commit) { repository.commit('2f63565e7aac07bcdadb654e253078b727143ec4') }
diff --git a/spec/models/user_preference_spec.rb b/spec/models/user_preference_spec.rb
index b2ef17a81d4..e09c91e874a 100644
--- a/spec/models/user_preference_spec.rb
+++ b/spec/models/user_preference_spec.rb
@@ -73,4 +73,10 @@ describe UserPreference do
it_behaves_like 'a sort_by preference'
end
end
+
+ describe '#timezone' do
+ it 'returns server time as default' do
+ expect(user_preference.timezone).to eq(Time.zone.tzinfo.name)
+ end
+ end
end
diff --git a/spec/policies/clusters/cluster_policy_spec.rb b/spec/policies/clusters/cluster_policy_spec.rb
index b2f0ca1bc30..cc3dde154dc 100644
--- a/spec/policies/clusters/cluster_policy_spec.rb
+++ b/spec/policies/clusters/cluster_policy_spec.rb
@@ -66,5 +66,21 @@ describe Clusters::ClusterPolicy, :models do
it { expect(policy).to be_disallowed :admin_cluster }
end
end
+
+ context 'instance cluster' do
+ let(:cluster) { create(:cluster, :instance) }
+
+ context 'when user' do
+ it { expect(policy).to be_disallowed :update_cluster }
+ it { expect(policy).to be_disallowed :admin_cluster }
+ end
+
+ context 'when admin' do
+ let(:user) { create(:admin) }
+
+ it { expect(policy).to be_allowed :update_cluster }
+ it { expect(policy).to be_allowed :admin_cluster }
+ end
+ end
end
end
diff --git a/spec/policies/clusters/instance_policy_spec.rb b/spec/policies/clusters/instance_policy_spec.rb
new file mode 100644
index 00000000000..9d755c6d29d
--- /dev/null
+++ b/spec/policies/clusters/instance_policy_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Clusters::InstancePolicy do
+ let(:user) { create(:user) }
+ let(:policy) { described_class.new(user, Clusters::Instance.new) }
+
+ describe 'rules' do
+ context 'when user' do
+ it { expect(policy).to be_disallowed :read_cluster }
+ it { expect(policy).to be_disallowed :update_cluster }
+ it { expect(policy).to be_disallowed :admin_cluster }
+ end
+
+ context 'when admin' do
+ let(:user) { create(:admin) }
+
+ context 'with instance_level_clusters enabled' do
+ it { expect(policy).to be_allowed :read_cluster }
+ it { expect(policy).to be_allowed :update_cluster }
+ it { expect(policy).to be_allowed :admin_cluster }
+ end
+
+ context 'with instance_level_clusters disabled' do
+ before do
+ stub_feature_flags(instance_clusters: false)
+ end
+
+ it { expect(policy).to be_disallowed :read_cluster }
+ it { expect(policy).to be_disallowed :update_cluster }
+ it { expect(policy).to be_disallowed :admin_cluster }
+ end
+ end
+ end
+end
diff --git a/spec/policies/personal_snippet_policy_spec.rb b/spec/policies/personal_snippet_policy_spec.rb
index a38e0dbd797..097000ceb6a 100644
--- a/spec/policies/personal_snippet_policy_spec.rb
+++ b/spec/policies/personal_snippet_policy_spec.rb
@@ -14,13 +14,6 @@ describe PersonalSnippetPolicy do
]
end
- let(:comment_permissions) do
- [
- :comment_personal_snippet,
- :create_note
- ]
- end
-
def permissions(user)
described_class.new(user, snippet)
end
@@ -33,7 +26,7 @@ describe PersonalSnippetPolicy do
it do
is_expected.to be_allowed(:read_personal_snippet)
- is_expected.to be_disallowed(*comment_permissions)
+ is_expected.to be_disallowed(:create_note)
is_expected.to be_disallowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
end
@@ -44,7 +37,7 @@ describe PersonalSnippetPolicy do
it do
is_expected.to be_allowed(:read_personal_snippet)
- is_expected.to be_allowed(*comment_permissions)
+ is_expected.to be_allowed(:create_note)
is_expected.to be_allowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
end
@@ -55,7 +48,7 @@ describe PersonalSnippetPolicy do
it do
is_expected.to be_allowed(:read_personal_snippet)
- is_expected.to be_allowed(*comment_permissions)
+ is_expected.to be_allowed(:create_note)
is_expected.to be_allowed(:award_emoji)
is_expected.to be_allowed(*author_permissions)
end
@@ -70,7 +63,7 @@ describe PersonalSnippetPolicy do
it do
is_expected.to be_disallowed(:read_personal_snippet)
- is_expected.to be_disallowed(*comment_permissions)
+ is_expected.to be_disallowed(:create_note)
is_expected.to be_disallowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
end
@@ -81,7 +74,7 @@ describe PersonalSnippetPolicy do
it do
is_expected.to be_allowed(:read_personal_snippet)
- is_expected.to be_allowed(*comment_permissions)
+ is_expected.to be_allowed(:create_note)
is_expected.to be_allowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
end
@@ -92,7 +85,7 @@ describe PersonalSnippetPolicy do
it do
is_expected.to be_disallowed(:read_personal_snippet)
- is_expected.to be_disallowed(*comment_permissions)
+ is_expected.to be_disallowed(:create_note)
is_expected.to be_disallowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
end
@@ -103,7 +96,7 @@ describe PersonalSnippetPolicy do
it do
is_expected.to be_allowed(:read_personal_snippet)
- is_expected.to be_allowed(*comment_permissions)
+ is_expected.to be_allowed(:create_note)
is_expected.to be_allowed(:award_emoji)
is_expected.to be_allowed(*author_permissions)
end
@@ -118,7 +111,7 @@ describe PersonalSnippetPolicy do
it do
is_expected.to be_disallowed(:read_personal_snippet)
- is_expected.to be_disallowed(*comment_permissions)
+ is_expected.to be_disallowed(:create_note)
is_expected.to be_disallowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
end
@@ -129,7 +122,7 @@ describe PersonalSnippetPolicy do
it do
is_expected.to be_disallowed(:read_personal_snippet)
- is_expected.to be_disallowed(*comment_permissions)
+ is_expected.to be_disallowed(:create_note)
is_expected.to be_disallowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
end
@@ -140,7 +133,7 @@ describe PersonalSnippetPolicy do
it do
is_expected.to be_allowed(:read_personal_snippet)
- is_expected.to be_disallowed(:comment_personal_snippet)
+ is_expected.to be_disallowed(:create_note)
is_expected.to be_disallowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
end
@@ -151,7 +144,7 @@ describe PersonalSnippetPolicy do
it do
is_expected.to be_disallowed(:read_personal_snippet)
- is_expected.to be_disallowed(*comment_permissions)
+ is_expected.to be_disallowed(:create_note)
is_expected.to be_disallowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
end
@@ -162,7 +155,7 @@ describe PersonalSnippetPolicy do
it do
is_expected.to be_allowed(:read_personal_snippet)
- is_expected.to be_allowed(*comment_permissions)
+ is_expected.to be_allowed(:create_note)
is_expected.to be_allowed(:award_emoji)
is_expected.to be_allowed(*author_permissions)
end
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index 42f8bf3137b..8075fcade5f 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -17,7 +17,7 @@ describe ProjectPolicy do
read_project_for_iids read_issue_iid read_label
read_milestone read_project_snippet read_project_member read_note
create_project create_issue create_note upload_file create_merge_request_in
- award_emoji
+ award_emoji read_release
]
end
@@ -26,7 +26,7 @@ describe ProjectPolicy do
download_code fork_project create_project_snippet update_issue
admin_issue admin_label admin_list read_commit_status read_build
read_container_image read_pipeline read_environment read_deployment
- read_merge_request download_wiki_code read_sentry_issue read_release
+ read_merge_request download_wiki_code read_sentry_issue
]
end
diff --git a/spec/presenters/clusters/cluster_presenter_spec.rb b/spec/presenters/clusters/cluster_presenter_spec.rb
index a9d786bc872..42701a5f8d1 100644
--- a/spec/presenters/clusters/cluster_presenter_spec.rb
+++ b/spec/presenters/clusters/cluster_presenter_spec.rb
@@ -210,6 +210,12 @@ describe Clusters::ClusterPresenter do
it { is_expected.to eq('Group cluster') }
end
+
+ context 'instance_type cluster' do
+ let(:cluster) { create(:cluster, :provided_by_gcp, :instance) }
+
+ it { is_expected.to eq('Instance cluster') }
+ end
end
describe '#show_path' do
@@ -227,6 +233,12 @@ describe Clusters::ClusterPresenter do
it { is_expected.to eq(group_cluster_path(group, cluster)) }
end
+
+ context 'instance_type cluster' do
+ let(:cluster) { create(:cluster, :provided_by_gcp, :instance) }
+
+ it { is_expected.to eq(admin_cluster_path(cluster)) }
+ end
end
describe '#read_only_kubernetes_platform_fields?' do
diff --git a/spec/requests/api/discussions_spec.rb b/spec/requests/api/discussions_spec.rb
index 35c448d187d..16036297ec7 100644
--- a/spec/requests/api/discussions_spec.rb
+++ b/spec/requests/api/discussions_spec.rb
@@ -13,7 +13,7 @@ describe API::Discussions do
let!(:issue) { create(:issue, project: project, author: user) }
let!(:issue_note) { create(:discussion_note_on_issue, noteable: issue, project: project, author: user) }
- it_behaves_like 'discussions API', 'projects', 'issues', 'iid' do
+ it_behaves_like 'discussions API', 'projects', 'issues', 'iid', can_reply_to_invididual_notes: true do
let(:parent) { project }
let(:noteable) { issue }
let(:note) { issue_note }
@@ -37,7 +37,7 @@ describe API::Discussions do
let!(:diff_note) { create(:diff_note_on_merge_request, noteable: noteable, project: project, author: user) }
let(:parent) { project }
- it_behaves_like 'discussions API', 'projects', 'merge_requests', 'iid'
+ it_behaves_like 'discussions API', 'projects', 'merge_requests', 'iid', can_reply_to_invididual_notes: true
it_behaves_like 'diff discussions API', 'projects', 'merge_requests', 'iid'
it_behaves_like 'resolvable discussions API', 'projects', 'merge_requests', 'iid'
end
diff --git a/spec/requests/api/group_variables_spec.rb b/spec/requests/api/group_variables_spec.rb
index 66b9aae4b58..d50bae3dc47 100644
--- a/spec/requests/api/group_variables_spec.rb
+++ b/spec/requests/api/group_variables_spec.rb
@@ -51,6 +51,7 @@ describe API::GroupVariables do
expect(response).to have_gitlab_http_status(200)
expect(json_response['value']).to eq(variable.value)
expect(json_response['protected']).to eq(variable.protected?)
+ expect(json_response['variable_type']).to eq(variable.variable_type)
end
it 'responds with 404 Not Found if requesting non-existing variable' do
@@ -94,17 +95,19 @@ describe API::GroupVariables do
expect(json_response['key']).to eq('TEST_VARIABLE_2')
expect(json_response['value']).to eq('PROTECTED_VALUE_2')
expect(json_response['protected']).to be_truthy
+ expect(json_response['variable_type']).to eq('env_var')
end
it 'creates variable with optional attributes' do
expect do
- post api("/groups/#{group.id}/variables", user), params: { key: 'TEST_VARIABLE_2', value: 'VALUE_2' }
+ post api("/groups/#{group.id}/variables", user), params: { variable_type: 'file', key: 'TEST_VARIABLE_2', value: 'VALUE_2' }
end.to change {group.variables.count}.by(1)
expect(response).to have_gitlab_http_status(201)
expect(json_response['key']).to eq('TEST_VARIABLE_2')
expect(json_response['value']).to eq('VALUE_2')
expect(json_response['protected']).to be_falsey
+ expect(json_response['variable_type']).to eq('file')
end
it 'does not allow to duplicate variable key' do
@@ -145,7 +148,7 @@ describe API::GroupVariables do
initial_variable = group.variables.reload.first
value_before = initial_variable.value
- put api("/groups/#{group.id}/variables/#{variable.key}", user), params: { value: 'VALUE_1_UP', protected: true }
+ put api("/groups/#{group.id}/variables/#{variable.key}", user), params: { variable_type: 'file', value: 'VALUE_1_UP', protected: true }
updated_variable = group.variables.reload.first
@@ -153,6 +156,7 @@ describe API::GroupVariables do
expect(value_before).to eq(variable.value)
expect(updated_variable.value).to eq('VALUE_1_UP')
expect(updated_variable).to be_protected
+ expect(json_response['variable_type']).to eq('file')
end
it 'responds with 404 Not Found if requesting non-existing variable' do
diff --git a/spec/requests/api/internal_spec.rb b/spec/requests/api/internal_spec.rb
index 88c19448373..bae0302f3ff 100644
--- a/spec/requests/api/internal_spec.rb
+++ b/spec/requests/api/internal_spec.rb
@@ -959,7 +959,9 @@ describe API::Internal do
it 'creates a new merge request' do
expect do
- post api('/internal/post_receive'), params: valid_params
+ Sidekiq::Testing.fake! do
+ post api('/internal/post_receive'), params: valid_params
+ end
end.to change { MergeRequest.count }.by(1)
end
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index ed2ef4c730b..c14507de186 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -863,7 +863,7 @@ describe API::Jobs do
end
describe 'POST /projects/:id/jobs/:job_id/retry' do
- let(:job) { create(:ci_build, :canceled, pipeline: pipeline) }
+ let(:job) { create(:ci_build, :failed, pipeline: pipeline) }
before do
post api("/projects/#{project.id}/jobs/#{job.id}/retry", api_user)
@@ -873,7 +873,7 @@ describe API::Jobs do
context 'user with :update_build permission' do
it 'retries non-running job' do
expect(response).to have_gitlab_http_status(201)
- expect(project.builds.first.status).to eq('canceled')
+ expect(project.builds.first.status).to eq('failed')
expect(json_response['status']).to eq('pending')
end
end
diff --git a/spec/requests/api/members_spec.rb b/spec/requests/api/members_spec.rb
index 79edbb301f2..48869cab4da 100644
--- a/spec/requests/api/members_spec.rb
+++ b/spec/requests/api/members_spec.rb
@@ -236,7 +236,7 @@ describe API::Members do
params: { user_id: stranger.id, access_level: Member::REPORTER }
expect(response).to have_gitlab_http_status(400)
- expect(json_response['message']['access_level']).to eq(["should be higher than Developer inherited membership from group #{parent.name}"])
+ expect(json_response['message']['access_level']).to eq(["should be greater than or equal to Developer inherited membership from group #{parent.name}"])
end
it 'creates the member if group level is lower', :nested_groups do
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 5c94a87529b..007f3517e64 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -1495,6 +1495,33 @@ describe API::MergeRequests do
expect(json_response['merge_when_pipeline_succeeds']).to eq(true)
end
+ context 'when the MR requires pipeline success' do
+ it 'returns 405 if the pipeline is missing' do
+ allow_any_instance_of(MergeRequest)
+ .to receive(:merge_when_pipeline_succeeds).and_return(true)
+ allow_any_instance_of(MergeRequest).to receive(:head_pipeline).and_return(nil)
+
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user)
+
+ expect(response).to have_gitlab_http_status(405)
+ expect(json_response['message']).to eq('Not allowed: pipeline does not exist')
+ end
+ end
+
+ context 'when the request requires pipeline success' do
+ it 'returns 405 if the pipeline is missing' do
+ allow_any_instance_of(MergeRequest)
+ .to receive(:merge_when_pipeline_succeeds).and_return(true)
+ allow_any_instance_of(MergeRequest).to receive(:head_pipeline).and_return(nil)
+
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user),
+ params: { merge_when_pipeline_succeeds: true }
+
+ expect(response).to have_gitlab_http_status(405)
+ expect(json_response['message']).to eq('Not allowed: pipeline does not exist')
+ end
+ end
+
it "returns 404 for an invalid merge request IID" do
put api("/projects/#{project.id}/merge_requests/12345/merge", user)
diff --git a/spec/requests/api/pipeline_schedules_spec.rb b/spec/requests/api/pipeline_schedules_spec.rb
index 870ef34437f..072bd02f2ac 100644
--- a/spec/requests/api/pipeline_schedules_spec.rb
+++ b/spec/requests/api/pipeline_schedules_spec.rb
@@ -91,6 +91,7 @@ describe API::PipelineSchedules do
let(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project, owner: developer) }
before do
+ pipeline_schedule.variables << build(:ci_pipeline_schedule_variable)
pipeline_schedule.pipelines << build(:ci_pipeline, project: project)
end
@@ -331,13 +332,14 @@ describe API::PipelineSchedules do
it 'creates pipeline_schedule_variable' do
expect do
post api("/projects/#{project.id}/pipeline_schedules/#{pipeline_schedule.id}/variables", developer),
- params: params
+ params: params.merge(variable_type: 'file')
end.to change { pipeline_schedule.variables.count }.by(1)
expect(response).to have_gitlab_http_status(:created)
expect(response).to match_response_schema('pipeline_schedule_variable')
expect(json_response['key']).to eq(params[:key])
expect(json_response['value']).to eq(params[:value])
+ expect(json_response['variable_type']).to eq('file')
end
end
@@ -389,11 +391,12 @@ describe API::PipelineSchedules do
context 'authenticated user with valid permissions' do
it 'updates pipeline_schedule_variable' do
put api("/projects/#{project.id}/pipeline_schedules/#{pipeline_schedule.id}/variables/#{pipeline_schedule_variable.key}", developer),
- params: { value: 'updated_value' }
+ params: { value: 'updated_value', variable_type: 'file' }
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('pipeline_schedule_variable')
expect(json_response['value']).to eq('updated_value')
+ expect(json_response['variable_type']).to eq('file')
end
end
diff --git a/spec/requests/api/pipelines_spec.rb b/spec/requests/api/pipelines_spec.rb
index 26158231444..35b3dd219f7 100644
--- a/spec/requests/api/pipelines_spec.rb
+++ b/spec/requests/api/pipelines_spec.rb
@@ -294,6 +294,7 @@ describe API::Pipelines do
expect(variable.key).to eq(expected_variable['key'])
expect(variable.value).to eq(expected_variable['value'])
+ expect(variable.variable_type).to eq(expected_variable['variable_type'])
end
end
@@ -314,7 +315,7 @@ describe API::Pipelines do
end
context 'variables given' do
- let(:variables) { [{ 'key' => 'UPLOAD_TO_S3', 'value' => 'true' }] }
+ let(:variables) { [{ 'variable_type' => 'file', 'key' => 'UPLOAD_TO_S3', 'value' => 'true' }] }
it 'creates and returns a new pipeline using the given variables' do
expect do
@@ -330,7 +331,7 @@ describe API::Pipelines do
end
describe 'using variables conditions' do
- let(:variables) { [{ 'key' => 'STAGING', 'value' => 'true' }] }
+ let(:variables) { [{ 'variable_type' => 'env_var', 'key' => 'STAGING', 'value' => 'true' }] }
before do
config = YAML.dump(test: { script: 'test', only: { variables: ['$STAGING'] } })
@@ -467,7 +468,7 @@ describe API::Pipelines do
subject
expect(response).to have_gitlab_http_status(200)
- expect(json_response).to contain_exactly({ "key" => "foo", "value" => "bar" })
+ expect(json_response).to contain_exactly({ "variable_type" => "env_var", "key" => "foo", "value" => "bar" })
end
end
end
@@ -488,7 +489,7 @@ describe API::Pipelines do
subject
expect(response).to have_gitlab_http_status(200)
- expect(json_response).to contain_exactly({ "key" => "foo", "value" => "bar" })
+ expect(json_response).to contain_exactly({ "variable_type" => "env_var", "key" => "foo", "value" => "bar" })
end
end
diff --git a/spec/requests/api/project_clusters_spec.rb b/spec/requests/api/project_clusters_spec.rb
index 94e6ca2c07c..5357be3cdee 100644
--- a/spec/requests/api/project_clusters_spec.rb
+++ b/spec/requests/api/project_clusters_spec.rb
@@ -189,6 +189,7 @@ describe API::ProjectClusters do
{
name: 'test-cluster',
domain: 'domain.example.com',
+ managed: false,
platform_kubernetes_attributes: platform_kubernetes_attributes
}
end
@@ -220,6 +221,7 @@ describe API::ProjectClusters do
expect(cluster_result.project).to eq(project)
expect(cluster_result.name).to eq('test-cluster')
expect(cluster_result.domain).to eq('domain.example.com')
+ expect(cluster_result.managed).to be_falsy
expect(platform_kubernetes.rbac?).to be_truthy
expect(platform_kubernetes.api_url).to eq(api_url)
expect(platform_kubernetes.namespace).to eq(namespace)
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 577f61ae8d0..16d306f39cd 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -504,8 +504,9 @@ describe API::Projects do
project4.add_reporter(user2)
end
- it 'returns an array of groups the user has at least developer access' do
+ it 'returns an array of projects the user has at least developer access' do
get api('/projects', user2), params: { min_access_level: 30 }
+
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
diff --git a/spec/requests/api/releases_spec.rb b/spec/requests/api/releases_spec.rb
index 71ec091c42c..8603fa2a73d 100644
--- a/spec/requests/api/releases_spec.rb
+++ b/spec/requests/api/releases_spec.rb
@@ -52,7 +52,7 @@ describe API::Releases do
it 'matches response schema' do
get api("/projects/#{project.id}/releases", maintainer)
- expect(response).to match_response_schema('releases')
+ expect(response).to match_response_schema('public_api/v4/releases')
end
end
@@ -69,10 +69,25 @@ describe API::Releases do
end
context 'when user is a guest' do
- it 'responds 403 Forbidden' do
+ let!(:release) do
+ create(:release,
+ project: project,
+ tag: 'v0.1',
+ author: maintainer,
+ created_at: 2.days.ago)
+ end
+
+ it 'responds 200 OK' do
get api("/projects/#{project.id}/releases", guest)
- expect(response).to have_gitlab_http_status(:forbidden)
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it "does not expose tag, commit and source code" do
+ get api("/projects/#{project.id}/releases", guest)
+
+ expect(response).to match_response_schema('public_api/v4/release/releases_for_guest')
+ expect(json_response[0]['assets']['count']).to eq(release.links.count)
end
context 'when project is public' do
@@ -83,6 +98,13 @@ describe API::Releases do
expect(response).to have_gitlab_http_status(:ok)
end
+
+ it "exposes tag, commit and source code" do
+ get api("/projects/#{project.id}/releases", guest)
+
+ expect(response).to match_response_schema('public_api/v4/releases')
+ expect(json_response[0]['assets']['count']).to eq(release.links.count + release.sources.count)
+ end
end
end
@@ -135,7 +157,7 @@ describe API::Releases do
it 'matches response schema' do
get api("/projects/#{project.id}/releases/v0.1", maintainer)
- expect(response).to match_response_schema('release')
+ expect(response).to match_response_schema('public_api/v4/release')
end
it 'contains source information as assets' do
@@ -225,6 +247,17 @@ describe API::Releases do
expect(response).to have_gitlab_http_status(:ok)
end
+
+ it "exposes tag and commit" do
+ create(:release,
+ project: project,
+ tag: 'v0.1',
+ author: maintainer,
+ created_at: 2.days.ago)
+ get api("/projects/#{project.id}/releases/v0.1", guest)
+
+ expect(response).to match_response_schema('public_api/v4/release')
+ end
end
end
end
@@ -306,7 +339,7 @@ describe API::Releases do
it 'matches response schema' do
post api("/projects/#{project.id}/releases", maintainer), params: params
- expect(response).to match_response_schema('release')
+ expect(response).to match_response_schema('public_api/v4/release')
end
it 'does not create a new tag' do
@@ -378,7 +411,7 @@ describe API::Releases do
it 'matches response schema' do
post api("/projects/#{project.id}/releases", maintainer), params: params
- expect(response).to match_response_schema('release')
+ expect(response).to match_response_schema('public_api/v4/release')
end
end
@@ -532,7 +565,7 @@ describe API::Releases do
it 'matches response schema' do
put api("/projects/#{project.id}/releases/v0.1", maintainer), params: params
- expect(response).to match_response_schema('release')
+ expect(response).to match_response_schema('public_api/v4/release')
end
context 'when user tries to update sha' do
@@ -624,7 +657,7 @@ describe API::Releases do
it 'matches response schema' do
delete api("/projects/#{project.id}/releases/v0.1", maintainer)
- expect(response).to match_response_schema('release')
+ expect(response).to match_response_schema('public_api/v4/release')
end
context 'when there are no corresponding releases' do
diff --git a/spec/requests/api/tags_spec.rb b/spec/requests/api/tags_spec.rb
index fffe878ddbd..d898319e709 100644
--- a/spec/requests/api/tags_spec.rb
+++ b/spec/requests/api/tags_spec.rb
@@ -378,7 +378,7 @@ describe API::Tags do
post api(route, user), params: { description: description }
expect(response).to have_gitlab_http_status(201)
- expect(response).to match_response_schema('public_api/v4/release')
+ expect(response).to match_response_schema('public_api/v4/release/tag_release')
expect(json_response['tag_name']).to eq(tag_name)
expect(json_response['description']).to eq(description)
end
diff --git a/spec/requests/api/variables_spec.rb b/spec/requests/api/variables_spec.rb
index 5df6baf0ddf..cc07869a744 100644
--- a/spec/requests/api/variables_spec.rb
+++ b/spec/requests/api/variables_spec.rb
@@ -43,6 +43,7 @@ describe API::Variables do
expect(response).to have_gitlab_http_status(200)
expect(json_response['value']).to eq(variable.value)
expect(json_response['protected']).to eq(variable.protected?)
+ expect(json_response['variable_type']).to eq('env_var')
end
it 'responds with 404 Not Found if requesting non-existing variable' do
@@ -80,17 +81,19 @@ describe API::Variables do
expect(json_response['key']).to eq('TEST_VARIABLE_2')
expect(json_response['value']).to eq('PROTECTED_VALUE_2')
expect(json_response['protected']).to be_truthy
+ expect(json_response['variable_type']).to eq('env_var')
end
it 'creates variable with optional attributes' do
expect do
- post api("/projects/#{project.id}/variables", user), params: { key: 'TEST_VARIABLE_2', value: 'VALUE_2' }
+ post api("/projects/#{project.id}/variables", user), params: { variable_type: 'file', key: 'TEST_VARIABLE_2', value: 'VALUE_2' }
end.to change {project.variables.count}.by(1)
expect(response).to have_gitlab_http_status(201)
expect(json_response['key']).to eq('TEST_VARIABLE_2')
expect(json_response['value']).to eq('VALUE_2')
expect(json_response['protected']).to be_falsey
+ expect(json_response['variable_type']).to eq('file')
end
it 'does not allow to duplicate variable key' do
@@ -125,7 +128,7 @@ describe API::Variables do
initial_variable = project.variables.reload.first
value_before = initial_variable.value
- put api("/projects/#{project.id}/variables/#{variable.key}", user), params: { value: 'VALUE_1_UP', protected: true }
+ put api("/projects/#{project.id}/variables/#{variable.key}", user), params: { variable_type: 'file', value: 'VALUE_1_UP', protected: true }
updated_variable = project.variables.reload.first
@@ -133,6 +136,7 @@ describe API::Variables do
expect(value_before).to eq(variable.value)
expect(updated_variable.value).to eq('VALUE_1_UP')
expect(updated_variable).to be_protected
+ expect(updated_variable.variable_type).to eq('file')
end
it 'responds with 404 Not Found if requesting non-existing variable' do
diff --git a/spec/routing/uploads_routing_spec.rb b/spec/routing/uploads_routing_spec.rb
new file mode 100644
index 00000000000..6a041ffdd6c
--- /dev/null
+++ b/spec/routing/uploads_routing_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Uploads', 'routing' do
+ it 'allows creating uploads for personal snippets' do
+ expect(post('/uploads/personal_snippet?id=1')).to route_to(
+ controller: 'uploads',
+ action: 'create',
+ model: 'personal_snippet',
+ id: '1'
+ )
+ end
+
+ it 'does not allow creating uploads for other models' do
+ UploadsController::MODEL_CLASSES.keys.compact.each do |model|
+ next if model == 'personal_snippet'
+
+ expect(post("/uploads/#{model}?id=1")).not_to be_routable
+ end
+ end
+end
diff --git a/spec/serializers/pipeline_entity_spec.rb b/spec/serializers/pipeline_entity_spec.rb
index dba7fd91747..47f767ae4ab 100644
--- a/spec/serializers/pipeline_entity_spec.rb
+++ b/spec/serializers/pipeline_entity_spec.rb
@@ -137,7 +137,7 @@ describe PipelineEntity do
context 'when pipeline is detached merge request pipeline' do
let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
let(:project) { merge_request.target_project }
- let(:pipeline) { merge_request.merge_request_pipelines.first }
+ let(:pipeline) { merge_request.pipelines_for_merge_request.first }
it 'makes detached flag true' do
expect(subject[:flags][:detached_merge_request_pipeline]).to be_truthy
@@ -185,7 +185,7 @@ describe PipelineEntity do
context 'when pipeline is merge request pipeline' do
let(:merge_request) { create(:merge_request, :with_merge_request_pipeline, merge_sha: 'abc') }
let(:project) { merge_request.target_project }
- let(:pipeline) { merge_request.merge_request_pipelines.first }
+ let(:pipeline) { merge_request.pipelines_for_merge_request.first }
it 'makes detached flag false' do
expect(subject[:flags][:detached_merge_request_pipeline]).to be_falsy
diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb
index d9023036534..54e6abc2d3a 100644
--- a/spec/serializers/pipeline_serializer_spec.rb
+++ b/spec/serializers/pipeline_serializer_spec.rb
@@ -156,8 +156,8 @@ describe PipelineSerializer do
it 'verifies number of queries', :request_store do
recorded = ActiveRecord::QueryRecorder.new { subject }
-
expected_queries = Gitlab.ee? ? 38 : 31
+
expect(recorded.count).to be_within(2).of(expected_queries)
expect(recorded.cached_count).to eq(0)
end
diff --git a/spec/serializers/stage_entity_spec.rb b/spec/serializers/stage_entity_spec.rb
index 2034c7891ef..6b1185d1283 100644
--- a/spec/serializers/stage_entity_spec.rb
+++ b/spec/serializers/stage_entity_spec.rb
@@ -48,6 +48,10 @@ describe StageEntity do
expect(subject[:title]).to eq 'test: passed'
end
+ it 'does not contain play_details info' do
+ expect(subject[:status][:action]).not_to be_present
+ end
+
context 'when the jobs should be grouped' do
let(:entity) { described_class.new(stage, request: request, grouped: true) }
@@ -66,5 +70,29 @@ describe StageEntity do
end
end
end
+
+ context 'with a skipped stage ' do
+ let(:stage) { create(:ci_stage_entity, status: 'skipped') }
+
+ it 'contains play_all_manual' do
+ expect(subject[:status][:action]).to be_present
+ end
+ end
+
+ context 'with a scheduled stage ' do
+ let(:stage) { create(:ci_stage_entity, status: 'scheduled') }
+
+ it 'contains play_all_manual' do
+ expect(subject[:status][:action]).to be_present
+ end
+ end
+
+ context 'with a manual stage ' do
+ let(:stage) { create(:ci_stage_entity, status: 'manual') }
+
+ it 'contains play_all_manual' do
+ expect(subject[:status][:action]).to be_present
+ end
+ end
end
end
diff --git a/spec/serializers/stage_serializer_spec.rb b/spec/serializers/stage_serializer_spec.rb
new file mode 100644
index 00000000000..aae17cfbcb9
--- /dev/null
+++ b/spec/serializers/stage_serializer_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe StageSerializer do
+ let(:project) { create(:project, :repository) }
+ let(:user) { create(:user) }
+ let(:resource) { create(:ci_stage_entity) }
+
+ let(:serializer) do
+ described_class.new(current_user: user, project: project)
+ end
+
+ subject { serializer.represent(resource) }
+
+ describe '#represent' do
+ context 'with a single entity' do
+ it 'serializes the stage object' do
+ expect(subject[:name]).to eq(resource.name)
+ end
+ end
+
+ context 'with an array of entities' do
+ let(:resource) { create_list(:ci_stage_entity, 2) }
+
+ it 'serializes the array of pipelines' do
+ expect(subject).not_to be_empty
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 8a80652b3d8..9a3ac75e418 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -773,7 +773,7 @@ describe Ci::CreatePipelineService do
end
end
- describe 'Merge request pipelines' do
+ describe 'Pipelines for merge requests' do
let(:pipeline) do
execute_service(source: source,
merge_request: merge_request,
@@ -817,12 +817,14 @@ describe Ci::CreatePipelineService do
let(:merge_request) do
create(:merge_request,
source_project: project,
- source_branch: Gitlab::Git.ref_name(ref_name),
+ source_branch: 'feature',
target_project: project,
target_branch: 'master')
end
- it 'creates a merge request pipeline' do
+ let(:ref_name) { merge_request.ref_path }
+
+ it 'creates a detached merge request pipeline' do
expect(pipeline).to be_persisted
expect(pipeline).to be_merge_request_event
expect(pipeline.merge_request).to eq(merge_request)
@@ -837,6 +839,13 @@ describe Ci::CreatePipelineService do
expect(pipeline.target_sha).to be_nil
end
+ it 'schedules update for the head pipeline of the merge request' do
+ expect(UpdateHeadPipelineForMergeRequestWorker)
+ .to receive(:perform_async).with(merge_request.id)
+
+ pipeline
+ end
+
context 'when target sha is specified' do
let(:target_sha) { merge_request.target_branch_sha }
@@ -858,15 +867,16 @@ describe Ci::CreatePipelineService do
let(:merge_request) do
create(:merge_request,
source_project: project,
- source_branch: Gitlab::Git.ref_name(ref_name),
+ source_branch: 'feature',
target_project: target_project,
target_branch: 'master')
end
+ let(:ref_name) { 'refs/heads/feature' }
let!(:project) { fork_project(target_project, nil, repository: true) }
let!(:target_project) { create(:project, :repository) }
- it 'creates a merge request pipeline in the forked project' do
+ it 'creates a legacy detached merge request pipeline in the forked project' do
expect(pipeline).to be_persisted
expect(project.ci_pipelines).to eq([pipeline])
expect(target_project.ci_pipelines).to be_empty
@@ -884,7 +894,7 @@ describe Ci::CreatePipelineService do
}
end
- it 'does not create a merge request pipeline' do
+ it 'does not create a detached merge request pipeline' do
expect(pipeline).not_to be_persisted
expect(pipeline.errors[:base]).to eq(["No stages / jobs for this pipeline."])
end
@@ -894,7 +904,7 @@ describe Ci::CreatePipelineService do
context 'when merge request is not specified' do
let(:merge_request) { nil }
- it 'does not create a merge request pipeline' do
+ it 'does not create a detached merge request pipeline' do
expect(pipeline).not_to be_persisted
expect(pipeline.errors[:merge_request]).to eq(["can't be blank"])
end
@@ -928,7 +938,7 @@ describe Ci::CreatePipelineService do
target_branch: 'master')
end
- it 'does not create a merge request pipeline' do
+ it 'does not create a detached merge request pipeline' do
expect(pipeline).not_to be_persisted
expect(pipeline.errors[:base])
@@ -939,7 +949,7 @@ describe Ci::CreatePipelineService do
context 'when merge request is not specified' do
let(:merge_request) { nil }
- it 'does not create a merge request pipeline' do
+ it 'does not create a detached merge request pipeline' do
expect(pipeline).not_to be_persisted
expect(pipeline.errors[:base])
@@ -968,7 +978,7 @@ describe Ci::CreatePipelineService do
target_branch: 'master')
end
- it 'does not create a merge request pipeline' do
+ it 'does not create a detached merge request pipeline' do
expect(pipeline).not_to be_persisted
expect(pipeline.errors[:base])
@@ -999,7 +1009,7 @@ describe Ci::CreatePipelineService do
target_branch: 'master')
end
- it 'does not create a merge request pipeline' do
+ it 'does not create a detached merge request pipeline' do
expect(pipeline).not_to be_persisted
expect(pipeline.errors[:base])
@@ -1028,7 +1038,7 @@ describe Ci::CreatePipelineService do
target_branch: 'master')
end
- it 'does not create a merge request pipeline' do
+ it 'does not create a detached merge request pipeline' do
expect(pipeline).not_to be_persisted
expect(pipeline.errors[:base])
diff --git a/spec/services/ci/play_manual_stage_service_spec.rb b/spec/services/ci/play_manual_stage_service_spec.rb
new file mode 100644
index 00000000000..5d812745c7f
--- /dev/null
+++ b/spec/services/ci/play_manual_stage_service_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::PlayManualStageService, '#execute' do
+ let(:current_user) { create(:user) }
+ let(:pipeline) { create(:ci_pipeline, user: current_user) }
+ let(:project) { pipeline.project }
+ let(:service) { described_class.new(project, current_user, pipeline: pipeline) }
+ let(:stage_status) { 'manual' }
+
+ let(:stage) do
+ create(:ci_stage_entity,
+ pipeline: pipeline,
+ project: project,
+ name: 'test')
+ end
+
+ before do
+ project.add_maintainer(current_user)
+ create_builds_for_stage(status: stage_status)
+ end
+
+ context 'when pipeline has manual builds' do
+ before do
+ service.execute(stage)
+ end
+
+ it 'starts manual builds from pipeline' do
+ expect(pipeline.builds.manual.count).to eq(0)
+ end
+
+ it 'updates manual builds' do
+ pipeline.builds.each do |build|
+ expect(build.user).to eq(current_user)
+ end
+ end
+ end
+
+ context 'when pipeline has no manual builds' do
+ let(:stage_status) { 'failed' }
+
+ before do
+ service.execute(stage)
+ end
+
+ it 'does not update the builds' do
+ expect(pipeline.builds.failed.count).to eq(3)
+ end
+ end
+
+ context 'when user does not have permission on a specific build' do
+ before do
+ allow_any_instance_of(Ci::Build).to receive(:play)
+ .and_raise(Gitlab::Access::AccessDeniedError)
+
+ service.execute(stage)
+ end
+
+ it 'logs the error' do
+ expect(Gitlab::AppLogger).to receive(:error)
+ .exactly(stage.builds.manual.count)
+
+ service.execute(stage)
+ end
+ end
+
+ def create_builds_for_stage(options)
+ options.merge!({
+ when: 'manual',
+ pipeline: pipeline,
+ stage: stage.name,
+ stage_id: stage.id,
+ user: pipeline.user
+ })
+
+ create_list(:ci_build, 3, options)
+ end
+end
diff --git a/spec/services/clusters/applications/create_service_spec.rb b/spec/services/clusters/applications/create_service_spec.rb
index 20555873503..bb86a742f0e 100644
--- a/spec/services/clusters/applications/create_service_spec.rb
+++ b/spec/services/clusters/applications/create_service_spec.rb
@@ -151,8 +151,8 @@ describe Clusters::Applications::CreateService do
'helm' | :application_helm | true | false
'ingress' | :application_ingress | true | true
'runner' | :application_runner | true | true
+ 'prometheus' | :application_prometheus | true | true
'jupyter' | :application_jupyter | false | true
- 'prometheus' | :application_prometheus | false | true
end
with_them do
diff --git a/spec/services/clusters/build_service_spec.rb b/spec/services/clusters/build_service_spec.rb
index da0cb42b3a1..f3e852726f4 100644
--- a/spec/services/clusters/build_service_spec.rb
+++ b/spec/services/clusters/build_service_spec.rb
@@ -21,5 +21,13 @@ describe Clusters::BuildService do
is_expected.to be_group_type
end
end
+
+ describe 'when cluster subject is an instance' do
+ let(:cluster_subject) { Clusters::Instance.new }
+
+ it 'sets the cluster_type to instance_type' do
+ is_expected.to be_instance_type
+ end
+ end
end
end
diff --git a/spec/services/clusters/refresh_service_spec.rb b/spec/services/clusters/refresh_service_spec.rb
index 9e442ebf4e9..94c35228955 100644
--- a/spec/services/clusters/refresh_service_spec.rb
+++ b/spec/services/clusters/refresh_service_spec.rb
@@ -121,5 +121,11 @@ describe Clusters::RefreshService do
end
end
end
+
+ context 'cluster is not managed' do
+ let!(:cluster) { create(:cluster, :project, :not_managed, projects: [project]) }
+
+ include_examples 'does not create a kubernetes namespace'
+ end
end
end
diff --git a/spec/services/lfs/file_transformer_spec.rb b/spec/services/lfs/file_transformer_spec.rb
index 2c6e86069f3..888eea6e91e 100644
--- a/spec/services/lfs/file_transformer_spec.rb
+++ b/spec/services/lfs/file_transformer_spec.rb
@@ -64,6 +64,25 @@ describe Lfs::FileTransformer do
expect(result.encoding).to eq('text')
end
+ context 'when an actual file is passed' do
+ let(:file) { Tempfile.new(file_path) }
+
+ before do
+ file.write(file_content)
+ file.rewind
+ end
+
+ after do
+ file.unlink
+ end
+
+ it "creates an LfsObject with the file's content" do
+ subject.new_file(file_path, file)
+
+ expect(LfsObject.last.file.read).to eq file_content
+ end
+ end
+
context "when doesn't use LFS" do
let(:file_path) { 'other.filetype' }
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index c795176a1e4..ed48f4b1e44 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -195,7 +195,7 @@ describe MergeRequests::CreateService do
expect(merge_request).to be_persisted
merge_request.reload
- expect(merge_request.merge_request_pipelines.count).to eq(1)
+ expect(merge_request.pipelines_for_merge_request.count).to eq(1)
expect(merge_request.actual_head_pipeline).to be_detached_merge_request_pipeline
end
@@ -247,7 +247,7 @@ describe MergeRequests::CreateService do
expect(merge_request).to be_persisted
merge_request.reload
- expect(merge_request.merge_request_pipelines.count).to eq(0)
+ expect(merge_request.pipelines_for_merge_request.count).to eq(0)
end
end
@@ -281,7 +281,7 @@ describe MergeRequests::CreateService do
expect(merge_request).to be_persisted
merge_request.reload
- expect(merge_request.merge_request_pipelines.count).to eq(0)
+ expect(merge_request.pipelines_for_merge_request.count).to eq(0)
end
end
end
diff --git a/spec/services/merge_requests/rebase_service_spec.rb b/spec/services/merge_requests/rebase_service_spec.rb
index 0d9523a4c2c..a443e4588d9 100644
--- a/spec/services/merge_requests/rebase_service_spec.rb
+++ b/spec/services/merge_requests/rebase_service_spec.rb
@@ -73,7 +73,7 @@ describe MergeRequests::RebaseService do
end
context 'valid params' do
- describe 'successful rebase' do
+ shared_examples_for 'a service that can execute a successful rebase' do
before do
service.execute(merge_request)
end
@@ -99,6 +99,22 @@ describe MergeRequests::RebaseService do
end
end
+ context 'when the two_step_rebase feature is enabled' do
+ before do
+ stub_feature_flags(two_step_rebase: true)
+ end
+
+ it_behaves_like 'a service that can execute a successful rebase'
+ end
+
+ context 'when the two_step_rebase feature is disabled' do
+ before do
+ stub_feature_flags(two_step_rebase: false)
+ end
+
+ it_behaves_like 'a service that can execute a successful rebase'
+ end
+
context 'fork' do
describe 'successful fork rebase' do
let(:forked_project) do
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index d20b2d81763..7258428589f 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -166,8 +166,8 @@ describe MergeRequests::RefreshService do
it 'create detached merge request pipeline with commits' do
expect { subject }
- .to change { @merge_request.merge_request_pipelines.count }.by(1)
- .and change { @another_merge_request.merge_request_pipelines.count }.by(0)
+ .to change { @merge_request.pipelines_for_merge_request.count }.by(1)
+ .and change { @another_merge_request.pipelines_for_merge_request.count }.by(0)
expect(@merge_request.has_commits?).to be_truthy
expect(@another_merge_request.has_commits?).to be_falsy
@@ -175,13 +175,13 @@ describe MergeRequests::RefreshService do
it 'does not create detached merge request pipeline for forked project' do
expect { subject }
- .not_to change { @fork_merge_request.merge_request_pipelines.count }
+ .not_to change { @fork_merge_request.pipelines_for_merge_request.count }
end
it 'create detached merge request pipeline for non-fork merge request' do
subject
- expect(@merge_request.merge_request_pipelines.first)
+ expect(@merge_request.pipelines_for_merge_request.first)
.to be_detached_merge_request_pipeline
end
@@ -190,7 +190,7 @@ describe MergeRequests::RefreshService do
it 'does not create detached merge request pipeline' do
expect { subject }
- .not_to change { @merge_request.merge_request_pipelines.count }
+ .not_to change { @merge_request.pipelines_for_merge_request.count }
end
end
@@ -199,9 +199,9 @@ describe MergeRequests::RefreshService do
it 'creates legacy detached merge request pipeline for fork merge request' do
expect { subject }
- .to change { @fork_merge_request.merge_request_pipelines.count }.by(1)
+ .to change { @fork_merge_request.pipelines_for_merge_request.count }.by(1)
- expect(@fork_merge_request.merge_request_pipelines.first)
+ expect(@fork_merge_request.pipelines_for_merge_request.first)
.to be_legacy_detached_merge_request_pipeline
end
end
@@ -214,7 +214,7 @@ describe MergeRequests::RefreshService do
it 'create legacy detached merge request pipeline for non-fork merge request' do
subject
- expect(@merge_request.merge_request_pipelines.first)
+ expect(@merge_request.pipelines_for_merge_request.first)
.to be_legacy_detached_merge_request_pipeline
end
end
@@ -245,11 +245,11 @@ describe MergeRequests::RefreshService do
it 'does not re-create a duplicate detached merge request pipeline' do
expect do
service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/master')
- end.to change { @merge_request.merge_request_pipelines.count }.by(1)
+ end.to change { @merge_request.pipelines_for_merge_request.count }.by(1)
expect do
service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/master')
- end.not_to change { @merge_request.merge_request_pipelines.count }
+ end.not_to change { @merge_request.pipelines_for_merge_request.count }
end
end
end
@@ -266,7 +266,7 @@ describe MergeRequests::RefreshService do
it 'does not create a detached merge request pipeline' do
expect { subject }
- .not_to change { @merge_request.merge_request_pipelines.count }
+ .not_to change { @merge_request.pipelines_for_merge_request.count }
end
end
end
diff --git a/spec/services/projects/cleanup_service_spec.rb b/spec/services/projects/cleanup_service_spec.rb
index 29eabc86327..5c246854eb7 100644
--- a/spec/services/projects/cleanup_service_spec.rb
+++ b/spec/services/projects/cleanup_service_spec.rb
@@ -6,13 +6,13 @@ describe Projects::CleanupService do
let(:project) { create(:project, :repository, bfg_object_map: fixture_file_upload('spec/fixtures/bfg_object_map.txt')) }
let(:object_map) { project.bfg_object_map }
+ let(:cleaner) { service.__send__(:repository_cleaner) }
+
subject(:service) { described_class.new(project) }
describe '#execute' do
- it 'runs the apply_bfg_object_map gitaly RPC' do
- expect_next_instance_of(Gitlab::Git::RepositoryCleaner) do |cleaner|
- expect(cleaner).to receive(:apply_bfg_object_map).with(kind_of(IO))
- end
+ it 'runs the apply_bfg_object_map_stream gitaly RPC' do
+ expect(cleaner).to receive(:apply_bfg_object_map_stream).with(kind_of(IO))
service.execute
end
@@ -37,10 +37,91 @@ describe Projects::CleanupService do
expect(object_map.exists?).to be_falsy
end
+ context 'with a tainted merge request diff' do
+ let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let(:diff) { merge_request.merge_request_diff }
+ let(:entry) { build_entry(diff.commits.first.id) }
+
+ before do
+ allow(cleaner)
+ .to receive(:apply_bfg_object_map_stream)
+ .and_yield(Gitaly::ApplyBfgObjectMapStreamResponse.new(entries: [entry]))
+ end
+
+ it 'removes the tainted commit from the database' do
+ service.execute
+
+ expect(MergeRequestDiff.exists?(diff.id)).to be_falsy
+ end
+
+ it 'ignores non-commit responses from Gitaly' do
+ entry.type = :UNKNOWN
+
+ service.execute
+
+ expect(MergeRequestDiff.exists?(diff.id)).to be_truthy
+ end
+ end
+
+ context 'with a tainted diff note' do
+ let(:diff_note) { create(:diff_note_on_commit, project: project) }
+ let(:note_diff_file) { diff_note.note_diff_file }
+ let(:entry) { build_entry(diff_note.commit_id) }
+
+ let(:highlight_cache) { Gitlab::DiscussionsDiff::HighlightCache }
+ let(:cache_id) { note_diff_file.id }
+
+ before do
+ allow(cleaner)
+ .to receive(:apply_bfg_object_map_stream)
+ .and_yield(Gitaly::ApplyBfgObjectMapStreamResponse.new(entries: [entry]))
+ end
+
+ it 'removes the tainted commit from the database' do
+ service.execute
+
+ expect(NoteDiffFile.exists?(note_diff_file.id)).to be_falsy
+ end
+
+ it 'removes the highlight cache from redis' do
+ write_cache(highlight_cache, cache_id, [{}])
+
+ expect(read_cache(highlight_cache, cache_id)).not_to be_nil
+
+ service.execute
+
+ expect(read_cache(highlight_cache, cache_id)).to be_nil
+ end
+
+ it 'ignores non-commit responses from Gitaly' do
+ entry.type = :UNKNOWN
+
+ service.execute
+
+ expect(NoteDiffFile.exists?(note_diff_file.id)).to be_truthy
+ end
+ end
+
it 'raises an error if no object map can be found' do
object_map.remove!
expect { service.execute }.to raise_error(described_class::NoUploadError)
end
end
+
+ def build_entry(old_oid)
+ Gitaly::ApplyBfgObjectMapStreamResponse::Entry.new(
+ type: :COMMIT,
+ old_oid: old_oid,
+ new_oid: Gitlab::Git::BLANK_SHA
+ )
+ end
+
+ def read_cache(cache, key)
+ cache.read_multiple([key]).first
+ end
+
+ def write_cache(cache, key, value)
+ cache.write_multiple(key => value)
+ end
end
diff --git a/spec/services/projects/housekeeping_service_spec.rb b/spec/services/projects/housekeeping_service_spec.rb
index 368cf123c3e..f651db70cbd 100644
--- a/spec/services/projects/housekeeping_service_spec.rb
+++ b/spec/services/projects/housekeeping_service_spec.rb
@@ -81,6 +81,9 @@ describe Projects::HousekeepingService do
# At push 10, 20, ... (except those above)
expect(GitGarbageCollectWorker).to receive(:perform_async).with(project.id, :incremental_repack, :the_lease_key, :the_uuid)
.exactly(16).times
+ # At push 6, 12, 18, ... (except those above)
+ expect(GitGarbageCollectWorker).to receive(:perform_async).with(project.id, :pack_refs, :the_lease_key, :the_uuid)
+ .exactly(27).times
201.times do
subject.increment!
diff --git a/spec/services/system_hooks_service_spec.rb b/spec/services/system_hooks_service_spec.rb
index 5dc7394b84f..f5c6e972953 100644
--- a/spec/services/system_hooks_service_spec.rb
+++ b/spec/services/system_hooks_service_spec.rb
@@ -86,20 +86,20 @@ describe SystemHooksService do
context 'group_rename' do
it 'contains old and new path' do
- allow(group).to receive(:path_was).and_return('old-path')
+ allow(group).to receive(:path_before_last_save).and_return('old-path')
data = event_data(group, :rename)
expect(data).to include(:event_name, :name, :created_at, :updated_at, :full_path, :path, :group_id, :old_path, :old_full_path)
expect(data[:path]).to eq(group.path)
expect(data[:full_path]).to eq(group.path)
- expect(data[:old_path]).to eq(group.path_was)
- expect(data[:old_full_path]).to eq(group.path_was)
+ expect(data[:old_path]).to eq(group.path_before_last_save)
+ expect(data[:old_full_path]).to eq(group.path_before_last_save)
end
it 'contains old and new full_path for subgroup' do
subgroup = create(:group, parent: group)
- allow(subgroup).to receive(:path_was).and_return('old-path')
+ allow(subgroup).to receive(:path_before_last_save).and_return('old-path')
data = event_data(subgroup, :rename)
@@ -110,13 +110,13 @@ describe SystemHooksService do
context 'user_rename' do
it 'contains old and new username' do
- allow(user).to receive(:username_was).and_return('old-username')
+ allow(user).to receive(:username_before_last_save).and_return('old-username')
data = event_data(user, :rename)
expect(data).to include(:event_name, :name, :created_at, :updated_at, :email, :user_id, :username, :old_username)
expect(data[:username]).to eq(user.username)
- expect(data[:old_username]).to eq(user.username_was)
+ expect(data[:old_username]).to eq(user.username_before_last_save)
end
end
diff --git a/spec/services/users/update_service_spec.rb b/spec/services/users/update_service_spec.rb
index e04c05418b0..9384287f98a 100644
--- a/spec/services/users/update_service_spec.rb
+++ b/spec/services/users/update_service_spec.rb
@@ -13,6 +13,15 @@ describe Users::UpdateService do
expect(user.name).to eq('New Name')
end
+ it 'updates time preferences' do
+ result = update_user(user, timezone: 'Europe/Warsaw', time_display_relative: true, time_format_in_24h: false)
+
+ expect(result).to eq(status: :success)
+ expect(user.reload.timezone).to eq('Europe/Warsaw')
+ expect(user.time_display_relative).to eq(true)
+ expect(user.time_format_in_24h).to eq(false)
+ end
+
it 'returns an error result when record cannot be updated' do
result = {}
expect do
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index fbc5fcea7b9..9266bee34d6 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -53,7 +53,7 @@ RSpec.configure do |config|
config.display_try_failure_messages = true
config.infer_spec_type_from_file_location!
- config.full_backtrace = true
+ config.full_backtrace = !!ENV['CI']
config.define_derived_metadata(file_path: %r{/spec/}) do |metadata|
location = metadata[:location]
diff --git a/spec/support/capybara.rb b/spec/support/capybara.rb
index 18a7a392c12..875a9a76e12 100644
--- a/spec/support/capybara.rb
+++ b/spec/support/capybara.rb
@@ -17,6 +17,8 @@ JS_CONSOLE_FILTER = Regexp.union([
"Download the Vue Devtools extension"
])
+CAPYBARA_WINDOW_SIZE = [1366, 768].freeze
+
Capybara.register_driver :chrome do |app|
capabilities = Selenium::WebDriver::Remote::Capabilities.chrome(
# This enables access to logs with `page.driver.manage.get_log(:browser)`
@@ -29,7 +31,7 @@ Capybara.register_driver :chrome do |app|
)
options = Selenium::WebDriver::Chrome::Options.new
- options.add_argument("window-size=1240,1400")
+ options.add_argument("window-size=#{CAPYBARA_WINDOW_SIZE.join(',')}")
# Chrome won't work properly in a Docker container in sandbox mode
options.add_argument("no-sandbox")
@@ -78,8 +80,11 @@ RSpec.configure do |config|
protocol: 'http')
# reset window size between tests
- unless session.current_window.size == [1240, 1400]
- session.current_window.resize_to(1240, 1400) rescue nil
+ unless session.current_window.size == CAPYBARA_WINDOW_SIZE
+ begin
+ session.current_window.resize_to(*CAPYBARA_WINDOW_SIZE)
+ rescue # ?
+ end
end
end
diff --git a/spec/support/helpers/features/notes_helpers.rb b/spec/support/helpers/features/notes_helpers.rb
index 89517fde6e2..38f30a14409 100644
--- a/spec/support/helpers/features/notes_helpers.rb
+++ b/spec/support/helpers/features/notes_helpers.rb
@@ -23,8 +23,18 @@ module Spec
def preview_note(text)
page.within('.js-main-target-form') do
- fill_in('note[note]', with: text)
+ filled_text = fill_in('note[note]', with: text)
+
+ begin
+ # Dismiss quick action prompt if it appears
+ filled_text.parent.send_keys(:escape)
+ rescue Selenium::WebDriver::Error::ElementNotInteractableError
+ # It's fine if we can't escape when there's no prompt.
+ end
+
click_on('Preview')
+
+ yield if block_given?
end
end
end
diff --git a/spec/support/helpers/filtered_search_helpers.rb b/spec/support/helpers/filtered_search_helpers.rb
index 03057a102c5..34ef185ea27 100644
--- a/spec/support/helpers/filtered_search_helpers.rb
+++ b/spec/support/helpers/filtered_search_helpers.rb
@@ -78,20 +78,17 @@ module FilteredSearchHelpers
# .tokens-container to make sure the correct names and values are rendered
def expect_tokens(tokens)
page.within '.filtered-search-box .tokens-container' do
- page.all(:css, '.tokens-container li .selectable').each_with_index do |el, index|
- token_name = tokens[index][:name]
- token_value = tokens[index][:value]
- token_emoji = tokens[index][:emoji_name]
+ token_elements = page.all(:css, 'li.filtered-search-token')
- expect(el.find('.name')).to have_content(token_name)
+ tokens.each_with_index do |token, index|
+ el = token_elements[index]
- if token_value
- expect(el.find('.value')).to have_content(token_value)
- end
+ expect(el.find('.name')).to have_content(token[:name])
+ expect(el.find('.value')).to have_content(token[:value]) if token[:value].present?
# gl-emoji content is blank when the emoji unicode is not supported
- if token_emoji
- selector = %(gl-emoji[data-name="#{token_emoji}"])
+ if token[:emoji_name].present?
+ selector = %(gl-emoji[data-name="#{token[:emoji_name]}"])
expect(el.find('.value')).to have_css(selector)
end
end
diff --git a/spec/support/helpers/javascript_fixtures_helpers.rb b/spec/support/helpers/javascript_fixtures_helpers.rb
index 9cae8f934db..494398dc4de 100644
--- a/spec/support/helpers/javascript_fixtures_helpers.rb
+++ b/spec/support/helpers/javascript_fixtures_helpers.rb
@@ -15,7 +15,7 @@ module JavaScriptFixturesHelpers
end
def fixture_root_path
- 'spec/javascripts/fixtures'
+ (Gitlab.ee? ? 'ee/' : '') + 'spec/javascripts/fixtures'
end
# Public: Removes all fixture files from given directory
diff --git a/spec/support/helpers/mobile_helpers.rb b/spec/support/helpers/mobile_helpers.rb
index 9dc1f1de436..4230d315d9b 100644
--- a/spec/support/helpers/mobile_helpers.rb
+++ b/spec/support/helpers/mobile_helpers.rb
@@ -8,7 +8,7 @@ module MobileHelpers
end
def restore_window_size
- resize_window(1366, 768)
+ resize_window(*CAPYBARA_WINDOW_SIZE)
end
def resize_window(width, height)
diff --git a/spec/support/helpers/select2_helper.rb b/spec/support/helpers/select2_helper.rb
index f4f0415985c..87672c8896d 100644
--- a/spec/support/helpers/select2_helper.rb
+++ b/spec/support/helpers/select2_helper.rb
@@ -35,6 +35,10 @@ module Select2Helper
execute_script("$('#{selector}').select2('open');")
end
+ def close_select2(selector)
+ execute_script("$('#{selector}').select2('close');")
+ end
+
def scroll_select2_to_bottom(selector)
evaluate_script "$('#{selector}').scrollTop($('#{selector}')[0].scrollHeight); $('#{selector}');"
end
diff --git a/spec/support/shared_context/policies/project_policy_shared_context.rb b/spec/support/shared_context/policies/project_policy_shared_context.rb
index ee5cfcd850d..54d9f5b15f2 100644
--- a/spec/support/shared_context/policies/project_policy_shared_context.rb
+++ b/spec/support/shared_context/policies/project_policy_shared_context.rb
@@ -24,8 +24,7 @@ RSpec.shared_context 'ProjectPolicy context' do
download_code fork_project create_project_snippet update_issue
admin_issue admin_label admin_list read_commit_status read_build
read_container_image read_pipeline read_environment read_deployment
- read_merge_request download_wiki_code read_sentry_issue read_release
- read_prometheus
+ read_merge_request download_wiki_code read_sentry_issue read_prometheus
]
end
diff --git a/spec/support/shared_examples/application_setting_examples.rb b/spec/support/shared_examples/application_setting_examples.rb
index d8f7ba1185e..421303c97be 100644
--- a/spec/support/shared_examples/application_setting_examples.rb
+++ b/spec/support/shared_examples/application_setting_examples.rb
@@ -260,6 +260,7 @@ RSpec.shared_examples 'application settings examples' do
allow(Gitlab.config.sentry).to receive(:enabled).and_return(false)
allow(Gitlab.config.sentry).to receive(:dsn).and_return(nil)
+ allow(Gitlab.config.sentry).to receive(:clientside_dsn).and_return(nil)
expect(setting.sentry_enabled).to eq true
expect(setting.sentry_dsn).to eq 'https://b44a0828b72421a6d8e99efd68d44fa8@example.com/40'
@@ -277,12 +278,13 @@ RSpec.shared_examples 'application settings examples' do
allow(Gitlab.config.sentry).to receive(:enabled).and_return(true)
allow(Gitlab.config.sentry).to receive(:dsn).and_return('https://b44a0828b72421a6d8e99efd68d44fa8@example.com/42')
+ allow(Gitlab.config.sentry).to receive(:clientside_dsn).and_return('https://b44a0828b72421a6d8e99efd68d44fa8@example.com/43')
expect(setting).not_to receive(:read_attribute)
expect(setting.sentry_enabled).to eq true
expect(setting.sentry_dsn).to eq 'https://b44a0828b72421a6d8e99efd68d44fa8@example.com/42'
expect(setting.clientside_sentry_enabled).to eq true
- expect(setting.clientside_sentry_dsn). to eq 'https://b44a0828b72421a6d8e99efd68d44fa8@example.com/42'
+ expect(setting.clientside_sentry_dsn). to eq 'https://b44a0828b72421a6d8e99efd68d44fa8@example.com/43'
end
end
end
diff --git a/spec/support/shared_examples/ci/stage_shared_examples.rb b/spec/support/shared_examples/ci/stage_shared_examples.rb
new file mode 100644
index 00000000000..925974ed11e
--- /dev/null
+++ b/spec/support/shared_examples/ci/stage_shared_examples.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+shared_examples 'manual playable stage' do |stage_type|
+ let(:stage) { build(stage_type, status: status) }
+
+ describe '#manual_playable?' do
+ subject { stage.manual_playable? }
+
+ context 'when is manual' do
+ let(:status) { 'manual' }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when is scheduled' do
+ let(:status) { 'scheduled' }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when is skipped' do
+ let(:status) { 'skipped' }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+end
diff --git a/spec/support/shared_examples/ci_trace_shared_examples.rb b/spec/support/shared_examples/ci_trace_shared_examples.rb
index c603421d748..db935bcb388 100644
--- a/spec/support/shared_examples/ci_trace_shared_examples.rb
+++ b/spec/support/shared_examples/ci_trace_shared_examples.rb
@@ -329,14 +329,6 @@ shared_examples_for 'trace with disabled live trace feature' do
it_behaves_like 'read successfully with IO'
end
- context 'when current_path (with project_ci_id) exists' do
- before do
- expect(trace).to receive(:deprecated_path) { expand_fixture_path('trace/sample_trace') }
- end
-
- it_behaves_like 'read successfully with IO'
- end
-
context 'when db trace exists' do
before do
build.send(:write_attribute, :trace, "data")
@@ -396,37 +388,6 @@ shared_examples_for 'trace with disabled live trace feature' do
end
end
- context 'deprecated path' do
- let(:path) { trace.send(:deprecated_path) }
-
- context 'with valid ci_id' do
- before do
- build.project.update(ci_id: 1000)
-
- FileUtils.mkdir_p(File.dirname(path))
-
- File.open(path, "w") do |file|
- file.write("data")
- end
- end
-
- it "trace exist" do
- expect(trace.exist?).to be(true)
- end
-
- it "can be erased" do
- trace.erase!
- expect(trace.exist?).to be(false)
- end
- end
-
- context 'without valid ci_id' do
- it "does not return deprecated path" do
- expect(path).to be_nil
- end
- end
- end
-
context 'stored in database' do
before do
build.send(:write_attribute, :trace, "data")
diff --git a/spec/support/shared_examples/controllers/variables_shared_examples.rb b/spec/support/shared_examples/controllers/variables_shared_examples.rb
index b615a8f54cf..e80722857ec 100644
--- a/spec/support/shared_examples/controllers/variables_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/variables_shared_examples.rb
@@ -120,4 +120,16 @@ shared_examples 'PATCH #update updates variables' do
expect(response).to match_response_schema('variables')
end
end
+
+ context 'for variables of type file' do
+ let(:variables_attributes) do
+ [
+ new_variable_attributes.merge(variable_type: 'file')
+ ]
+ end
+
+ it 'creates new variable of type file' do
+ expect { subject }.to change { owner.variables.file.count }.by(1)
+ end
+ end
end
diff --git a/spec/support/shared_examples/models/ci_variable_shared_examples.rb b/spec/support/shared_examples/models/ci_variable_shared_examples.rb
new file mode 100644
index 00000000000..f93de8b6ff1
--- /dev/null
+++ b/spec/support/shared_examples/models/ci_variable_shared_examples.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+shared_examples_for 'CI variable' do
+ it { is_expected.to include_module(HasVariable) }
+
+ describe "variable type" do
+ it 'defines variable types' do
+ expect(described_class.variable_types).to eq({ "env_var" => 1, "file" => 2 })
+ end
+
+ it "defaults variable type to env_var" do
+ expect(subject.variable_type).to eq("env_var")
+ end
+
+ it "supports variable type file" do
+ variable = described_class.new(variable_type: :file)
+ expect(variable).to be_file
+ end
+ end
+
+ it 'strips whitespaces when assigning key' do
+ subject.key = " SECRET "
+ expect(subject.key).to eq("SECRET")
+ end
+
+ it 'can convert to runner variable' do
+ expect(subject.to_runner_variable.keys).to include(:key, :value, :public, :file)
+ end
+end
diff --git a/spec/support/shared_examples/models/member_shared_examples.rb b/spec/support/shared_examples/models/member_shared_examples.rb
index 77376496854..e5375bc8280 100644
--- a/spec/support/shared_examples/models/member_shared_examples.rb
+++ b/spec/support/shared_examples/models/member_shared_examples.rb
@@ -41,7 +41,7 @@ shared_examples_for 'inherited access level as a member of entity' do
member.update(access_level: Gitlab::Access::REPORTER)
- expect(member.errors.full_messages).to eq(["Access level should be higher than Developer inherited membership from group #{parent_entity.name}"])
+ expect(member.errors.full_messages).to eq(["Access level should be greater than or equal to Developer inherited membership from group #{parent_entity.name}"])
end
it 'allows changing the level from a non existing member' do
diff --git a/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb
index e0d0b790a0e..a79a61bc708 100644
--- a/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
shared_examples 'close quick action' do |issuable_type|
+ include Spec::Support::Helpers::Features::NotesHelpers
+
before do
project.add_maintainer(maintainer)
gitlab_sign_in(maintainer)
@@ -76,10 +78,7 @@ shared_examples 'close quick action' do |issuable_type|
it 'explains close quick action' do
visit public_send("project_#{issuable_type}_path", project, issuable)
- page.within('.js-main-target-form') do
- fill_in 'note[note]', with: "this is done, close\n/close"
- click_on 'Preview'
-
+ preview_note("this is done, close\n/close") do
expect(page).not_to have_content '/close'
expect(page).to have_content 'this is done, close'
expect(page).to have_content "Closes this #{issuable_type.to_s.humanize.downcase}."
diff --git a/spec/support/shared_examples/requests/api/discussions.rb b/spec/support/shared_examples/requests/api/discussions.rb
index eff8e401bad..96f79081d26 100644
--- a/spec/support/shared_examples/requests/api/discussions.rb
+++ b/spec/support/shared_examples/requests/api/discussions.rb
@@ -1,4 +1,4 @@
-shared_examples 'discussions API' do |parent_type, noteable_type, id_name|
+shared_examples 'discussions API' do |parent_type, noteable_type, id_name, can_reply_to_invididual_notes: false|
describe "GET /#{parent_type}/:id/#{noteable_type}/:noteable_id/discussions" do
it "returns an array of discussions" do
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", user)
@@ -136,13 +136,25 @@ shared_examples 'discussions API' do |parent_type, noteable_type, id_name|
expect(response).to have_gitlab_http_status(400)
end
- it "returns a 400 bad request error if discussion is individual note" do
- note.update_attribute(:type, nil)
+ context 'when the discussion is an individual note' do
+ before do
+ note.update!(type: nil)
- post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
- "discussions/#{note.discussion_id}/notes", user), params: { body: 'hi!' }
+ post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
+ "discussions/#{note.discussion_id}/notes", user), params: { body: 'hi!' }
+ end
- expect(response).to have_gitlab_http_status(400)
+ if can_reply_to_invididual_notes
+ it 'creates a new discussion' do
+ expect(response).to have_gitlab_http_status(201)
+ expect(json_response['body']).to eq('hi!')
+ expect(json_response['type']).to eq('DiscussionNote')
+ end
+ else
+ it 'returns 400 bad request' do
+ expect(response).to have_gitlab_http_status(400)
+ end
+ end
end
end
diff --git a/spec/uploaders/import_export_uploader_spec.rb b/spec/uploaders/import_export_uploader_spec.rb
index 825c1cabc14..2dea48e3a88 100644
--- a/spec/uploaders/import_export_uploader_spec.rb
+++ b/spec/uploaders/import_export_uploader_spec.rb
@@ -3,9 +3,18 @@ require 'spec_helper'
describe ImportExportUploader do
let(:model) { build_stubbed(:import_export_upload) }
let(:upload) { create(:upload, model: model) }
+ let(:import_export_upload) { ImportExportUpload.new }
subject { described_class.new(model, :import_file) }
+ context 'local store' do
+ describe '#move_to_store' do
+ it 'returns true' do
+ expect(subject.move_to_store).to be true
+ end
+ end
+ end
+
context "object_store is REMOTE" do
before do
stub_uploads_object_storage
@@ -16,5 +25,28 @@ describe ImportExportUploader do
it_behaves_like 'builds correct paths',
store_dir: %r[import_export_upload/import_file/],
upload_path: %r[import_export_upload/import_file/]
+
+ describe '#move_to_store' do
+ it 'returns false' do
+ expect(subject.move_to_store).to be false
+ end
+ end
+
+ describe 'with an export file directly uploaded' do
+ let(:tempfile) { Tempfile.new(['test', '.gz']) }
+
+ before do
+ stub_uploads_object_storage(described_class, direct_upload: true)
+ import_export_upload.export_file = tempfile
+ end
+
+ it 'cleans up cached file' do
+ cache_dir = File.join(import_export_upload.export_file.cache_path(nil), '*')
+
+ import_export_upload.save!
+
+ expect(Dir[cache_dir]).to be_empty
+ end
+ end
end
end
diff --git a/spec/workers/cluster_configure_worker_spec.rb b/spec/workers/cluster_configure_worker_spec.rb
index bdb8e0e9c84..daf014ac574 100644
--- a/spec/workers/cluster_configure_worker_spec.rb
+++ b/spec/workers/cluster_configure_worker_spec.rb
@@ -68,6 +68,16 @@ describe ClusterConfigureWorker, '#perform' do
it_behaves_like 'configured cluster'
end
+ context 'when cluster is not managed' do
+ let(:cluster) { create(:cluster, :not_managed) }
+
+ it 'does not configure the cluster' do
+ expect(Clusters::RefreshService).not_to receive(:create_or_update_namespaces_for_cluster)
+
+ described_class.new.perform(cluster.id)
+ end
+ end
+
context 'when cluster does not exist' do
it 'does not provision a cluster' do
expect_any_instance_of(Clusters::Gcp::Kubernetes::CreateOrUpdateNamespaceService).not_to receive(:execute)
diff --git a/spec/workers/git_garbage_collect_worker_spec.rb b/spec/workers/git_garbage_collect_worker_spec.rb
index 2459638c3e6..cc1c23bb9e7 100644
--- a/spec/workers/git_garbage_collect_worker_spec.rb
+++ b/spec/workers/git_garbage_collect_worker_spec.rb
@@ -115,6 +115,19 @@ describe GitGarbageCollectWorker do
end
end
+ context "pack_refs" do
+ before do
+ expect(subject).to receive(:get_lease_uuid).and_return(lease_uuid)
+ end
+
+ it "calls Gitaly" do
+ expect_any_instance_of(Gitlab::GitalyClient::RefService).to receive(:pack_refs)
+ .and_return(nil)
+
+ subject.perform(project.id, :pack_refs, lease_key, lease_uuid)
+ end
+ end
+
context "repack_incremental" do
before do
expect(subject).to receive(:get_lease_uuid).and_return(lease_uuid)