Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorAlex Hanselka <alex@gitlab.com>2018-12-07 00:43:03 +0300
committerAlex Hanselka <alex@gitlab.com>2018-12-07 00:43:03 +0300
commit6bd69d0df9fb6231f52c5d75b09650383a47dd25 (patch)
treed02a92ce4d8dcd2701ab5a256d22769c96674b0b /spec
parent85bfdd5c2b9bc3e3d7710ed01b5e1947a35309fc (diff)
parent26fc6a4958c012b3f6c15cc01a6684e87d381488 (diff)
Merge branch 'master' into 11-6-stable-prepare-rc4
* master: (235 commits) Change dropdown divider color to gray-200 (#dfdfdf) Add feature flag for workhorse content type calculation Use BFG object maps to clean projects Update gitaly-proto to v1.3.0 Fix gitlab:web_hook tasks Introduce Knative Serverless Tab Sort issues and merge requests in ascending and descending order Use approximate counts for big tables Fixed renamed and mode changed diff viewers Add a changelog Revert "Merge branch 'ce-6983-promote-starting-a-gitlab-com-trial' into 'master'" Bump the gitaly version to 1.7.0 Resolve "Can add an existing group member into a group project with new permissions but permissions are not overridden" Expose merge request pipeline variables Fixture for forks Add an 'How-To' section to the Review Apps doc Ensure the default ApplicationSetting record is created first [QA] Better retrieve job ID for a retried job Retry the review-{deploy,qa} jobs twice Optimized file search to work without limits ...
Diffstat (limited to 'spec')
-rw-r--r--spec/config/settings_spec.rb98
-rw-r--r--spec/controllers/boards/issues_controller_spec.rb6
-rw-r--r--spec/controllers/concerns/issuable_collections_spec.rb1
-rw-r--r--spec/controllers/projects/avatars_controller_spec.rb35
-rw-r--r--spec/controllers/projects/blob_controller_spec.rb5
-rw-r--r--spec/controllers/projects/jobs_controller_spec.rb58
-rw-r--r--spec/controllers/projects/merge_requests/diffs_controller_spec.rb12
-rw-r--r--spec/controllers/projects/raw_controller_spec.rb74
-rw-r--r--spec/controllers/projects/serverless/functions_controller_spec.rb72
-rw-r--r--spec/controllers/projects/settings/repository_controller_spec.rb31
-rw-r--r--spec/controllers/projects/wikis_controller_spec.rb82
-rw-r--r--spec/controllers/snippets_controller_spec.rb21
-rw-r--r--spec/factories/clusters/kubernetes_namespaces.rb8
-rw-r--r--spec/features/admin/admin_users_spec.rb53
-rw-r--r--spec/features/dashboard/merge_requests_spec.rb1
-rw-r--r--spec/features/groups/members/list_members_spec.rb9
-rw-r--r--spec/features/groups/members/manage_members_spec.rb9
-rw-r--r--spec/features/ide_spec.rb2
-rw-r--r--spec/features/issuables/default_sort_order_spec.rb179
-rw-r--r--spec/features/issuables/sorting_list_spec.rb226
-rw-r--r--spec/features/issues/filtered_search/filter_issues_spec.rb2
-rw-r--r--spec/features/issues/user_sorts_issues_spec.rb8
-rw-r--r--spec/features/merge_request/user_expands_diff_spec.rb26
-rw-r--r--spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb365
-rw-r--r--spec/features/merge_requests/user_sorts_merge_requests_spec.rb12
-rw-r--r--spec/features/projects/commit/builds_spec.rb2
-rw-r--r--spec/features/projects/labels/issues_sorted_by_priority_spec.rb4
-rw-r--r--spec/features/projects/members/list_spec.rb9
-rw-r--r--spec/features/projects/pages_spec.rb2
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb150
-rw-r--r--spec/features/projects/serverless/functions_spec.rb49
-rw-r--r--spec/features/projects/settings/repository_settings_spec.rb35
-rw-r--r--spec/features/projects_spec.rb2
-rw-r--r--spec/finders/group_members_finder_spec.rb2
-rw-r--r--spec/finders/projects/serverless/functions_finder_spec.rb60
-rw-r--r--spec/fixtures/api/schemas/entities/issue_board.json2
-rw-r--r--spec/fixtures/api/schemas/entities/issue_boards.json15
-rw-r--r--spec/fixtures/bfg_object_map.txt1
-rw-r--r--spec/fixtures/security-reports/feature-branch/gl-dependency-scanning-report.json32
-rw-r--r--spec/fixtures/security-reports/master/gl-dependency-scanning-report.json32
-rw-r--r--spec/frontend/.eslintrc.yml9
-rw-r--r--spec/frontend/dummy_spec.js1
-rw-r--r--spec/helpers/projects_helper_spec.rb25
-rw-r--r--spec/helpers/sorting_helper_spec.rb43
-rw-r--r--spec/javascripts/api_spec.js17
-rw-r--r--spec/javascripts/clusters/components/applications_spec.js63
-rw-r--r--spec/javascripts/diffs/components/diff_file_spec.js26
-rw-r--r--spec/javascripts/diffs/components/diff_gutter_avatars_spec.js29
-rw-r--r--spec/javascripts/diffs/store/actions_spec.js53
-rw-r--r--spec/javascripts/diffs/store/mutations_spec.js78
-rw-r--r--spec/javascripts/diffs/store/utils_spec.js22
-rw-r--r--spec/javascripts/lib/utils/file_upload_spec.js36
-rw-r--r--spec/javascripts/notes/components/noteable_discussion_spec.js46
-rw-r--r--spec/javascripts/notes/stores/mutation_spec.js10
-rw-r--r--spec/javascripts/pipelines/pipeline_url_spec.js7
-rw-r--r--spec/javascripts/vue_shared/components/diff_viewer/diff_viewer_spec.js26
-rw-r--r--spec/javascripts/vue_shared/components/diff_viewer/viewers/mode_changed_spec.js23
-rw-r--r--spec/lib/gitlab/background_migration/encrypt_runners_tokens_spec.rb77
-rw-r--r--spec/lib/gitlab/checks/branch_check_spec.rb90
-rw-r--r--spec/lib/gitlab/checks/change_access_spec.rb241
-rw-r--r--spec/lib/gitlab/checks/diff_check_spec.rb51
-rw-r--r--spec/lib/gitlab/checks/lfs_check_spec.rb52
-rw-r--r--spec/lib/gitlab/checks/push_check_spec.rb22
-rw-r--r--spec/lib/gitlab/checks/tag_check_spec.rb64
-rw-r--r--spec/lib/gitlab/ci/config/entry/except_policy_spec.rb15
-rw-r--r--spec/lib/gitlab/ci/config/entry/global_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb3
-rw-r--r--spec/lib/gitlab/ci/config/entry/jobs_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/config/entry/only_policy_spec.rb15
-rw-r--r--spec/lib/gitlab/ci/config/entry/policy_spec.rb167
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/build_spec.rb29
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb30
-rw-r--r--spec/lib/gitlab/crypto_helper_spec.rb37
-rw-r--r--spec/lib/gitlab/database/count/exact_count_strategy_spec.rb40
-rw-r--r--spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb48
-rw-r--r--spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb65
-rw-r--r--spec/lib/gitlab/database/count_spec.rb72
-rw-r--r--spec/lib/gitlab/diff/file_collection/commit_spec.rb4
-rw-r--r--spec/lib/gitlab/diff/file_collection/merge_request_diff_spec.rb27
-rw-r--r--spec/lib/gitlab/git/repository_cleaner_spec.rb32
-rw-r--r--spec/lib/gitlab/gitaly_client/cleanup_service_spec.rb19
-rw-r--r--spec/lib/gitlab/gpg/commit_spec.rb22
-rw-r--r--spec/lib/gitlab/group_hierarchy_spec.rb22
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml8
-rw-r--r--spec/lib/gitlab/import_export/project_tree_restorer_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/project_tree_saver_spec.rb10
-rw-r--r--spec/lib/gitlab/import_export/relation_rename_service_spec.rb111
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml1
-rw-r--r--spec/lib/gitlab/kubernetes/kube_client_spec.rb79
-rw-r--r--spec/lib/gitlab/kubernetes_spec.rb24
-rw-r--r--spec/lib/gitlab/lfs_token_spec.rb55
-rw-r--r--spec/lib/gitlab/project_search_results_spec.rb131
-rw-r--r--spec/lib/gitlab/search/found_blob_spec.rb138
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb41
-rw-r--r--spec/lib/gitlab/utils_spec.rb38
-rw-r--r--spec/lib/omni_auth/strategies/jwt_spec.rb70
-rw-r--r--spec/migrations/schedule_runners_token_encryption_spec.rb38
-rw-r--r--spec/models/blob_spec.rb3
-rw-r--r--spec/models/ci/build_metadata_spec.rb2
-rw-r--r--spec/models/ci/build_spec.rb30
-rw-r--r--spec/models/ci/build_trace_chunk_spec.rb2
-rw-r--r--spec/models/ci/job_artifact_spec.rb2
-rw-r--r--spec/models/ci/pipeline_spec.rb329
-rw-r--r--spec/models/ci/runner_spec.rb2
-rw-r--r--spec/models/ci/stage_spec.rb2
-rw-r--r--spec/models/clusters/applications/ingress_spec.rb2
-rw-r--r--spec/models/clusters/applications/knative_spec.rb42
-rw-r--r--spec/models/clusters/cluster_spec.rb122
-rw-r--r--spec/models/clusters/platforms/kubernetes_spec.rb32
-rw-r--r--spec/models/commit_status_spec.rb2
-rw-r--r--spec/models/concerns/chronic_duration_attribute_spec.rb3
-rw-r--r--spec/models/concerns/deployment_platform_spec.rb75
-rw-r--r--spec/models/concerns/token_authenticatable_spec.rb121
-rw-r--r--spec/models/concerns/token_authenticatable_strategies/base_spec.rb65
-rw-r--r--spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb156
-rw-r--r--spec/models/deployment_spec.rb2
-rw-r--r--spec/models/gpg_signature_spec.rb2
-rw-r--r--spec/models/group_spec.rb33
-rw-r--r--spec/models/internal_id_spec.rb2
-rw-r--r--spec/models/list_spec.rb2
-rw-r--r--spec/models/member_spec.rb23
-rw-r--r--spec/models/members/group_member_spec.rb22
-rw-r--r--spec/models/members/project_member_spec.rb15
-rw-r--r--spec/models/merge_request_spec.rb113
-rw-r--r--spec/models/namespace_spec.rb4
-rw-r--r--spec/models/notification_setting_spec.rb2
-rw-r--r--spec/models/project_auto_devops_spec.rb2
-rw-r--r--spec/models/project_spec.rb117
-rw-r--r--spec/models/prometheus_metric_spec.rb2
-rw-r--r--spec/models/push_event_payload_spec.rb2
-rw-r--r--spec/models/resource_label_event_spec.rb2
-rw-r--r--spec/models/user_callout_spec.rb2
-rw-r--r--spec/models/user_spec.rb12
-rw-r--r--spec/presenters/group_member_presenter_spec.rb8
-rw-r--r--spec/presenters/project_member_presenter_spec.rb6
-rw-r--r--spec/requests/api/commit_statuses_spec.rb4
-rw-r--r--spec/requests/api/commits_spec.rb2
-rw-r--r--spec/requests/api/files_spec.rb47
-rw-r--r--spec/requests/api/members_spec.rb31
-rw-r--r--spec/requests/api/pipelines_spec.rb12
-rw-r--r--spec/requests/api/projects_spec.rb2
-rw-r--r--spec/requests/api/triggers_spec.rb2
-rw-r--r--spec/requests/git_http_spec.rb2
-rw-r--r--spec/routing/project_routing_spec.rb20
-rw-r--r--spec/serializers/issue_board_entity_spec.rb23
-rw-r--r--spec/serializers/issue_serializer_spec.rb8
-rw-r--r--spec/serializers/pipeline_entity_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb225
-rw-r--r--spec/services/ci/retry_build_service_spec.rb6
-rw-r--r--spec/services/clusters/gcp/finalize_creation_service_spec.rb31
-rw-r--r--spec/services/clusters/gcp/kubernetes/create_or_update_namespace_service_spec.rb133
-rw-r--r--spec/services/clusters/gcp/kubernetes/create_or_update_service_account_service_spec.rb (renamed from spec/services/clusters/gcp/kubernetes/create_service_account_service_spec.rb)12
-rw-r--r--spec/services/clusters/refresh_service_spec.rb107
-rw-r--r--spec/services/merge_requests/create_service_spec.rb72
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb88
-rw-r--r--spec/services/notification_service_spec.rb21
-rw-r--r--spec/services/projects/cleanup_service_spec.rb44
-rw-r--r--spec/services/projects/create_service_spec.rb26
-rw-r--r--spec/services/projects/transfer_service_spec.rb26
-rw-r--r--spec/support/active_record_enum.rb12
-rw-r--r--spec/support/features/discussion_comments_shared_example.rb31
-rw-r--r--spec/support/helpers/features/list_rows_helpers.rb28
-rw-r--r--spec/support/helpers/features/sorting_helpers.rb4
-rw-r--r--spec/support/helpers/git_http_helpers.rb5
-rw-r--r--spec/support/helpers/kubernetes_helpers.rb86
-rw-r--r--spec/support/helpers/stub_configuration.rb5
-rw-r--r--spec/support/shared_contexts/change_access_checks_shared_context.rb29
-rw-r--r--spec/support/shared_examples/ci_trace_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/diff_file_collections.rb16
-rw-r--r--spec/support/shared_examples/file_finder.rb13
-rw-r--r--spec/support/shared_examples/models/member_shared_examples.rb77
-rw-r--r--spec/support/shared_examples/only_except_policy_examples.rb167
-rw-r--r--spec/tasks/gitlab/check_rake_spec.rb108
-rw-r--r--spec/tasks/gitlab/web_hook_rake_spec.rb92
-rw-r--r--spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb (renamed from spec/features/admin/admin_active_tab_spec.rb)34
-rw-r--r--spec/workers/cluster_platform_configure_worker_spec.rb52
-rw-r--r--spec/workers/pipeline_schedule_worker_spec.rb13
-rw-r--r--spec/workers/rebase_worker_spec.rb2
-rw-r--r--spec/workers/repository_cleanup_worker_spec.rb55
-rw-r--r--spec/workers/update_head_pipeline_for_merge_request_worker_spec.rb28
180 files changed, 6267 insertions, 1217 deletions
diff --git a/spec/config/settings_spec.rb b/spec/config/settings_spec.rb
index 83b2de47741..c89b5f48dc0 100644
--- a/spec/config/settings_spec.rb
+++ b/spec/config/settings_spec.rb
@@ -6,4 +6,102 @@ describe Settings do
expect(described_class.omniauth.enabled).to be true
end
end
+
+ describe '.attr_encrypted_db_key_base_truncated' do
+ it 'is a string with maximum 32 bytes size' do
+ expect(described_class.attr_encrypted_db_key_base_truncated.bytesize)
+ .to be <= 32
+ end
+ end
+
+ describe '.attr_encrypted_db_key_base_12' do
+ context 'when db key base secret is less than 12 bytes' do
+ before do
+ allow(described_class)
+ .to receive(:attr_encrypted_db_key_base)
+ .and_return('a' * 10)
+ end
+
+ it 'expands db key base secret to 12 bytes' do
+ expect(described_class.attr_encrypted_db_key_base_12)
+ .to eq(('a' * 10) + ('0' * 2))
+ end
+ end
+
+ context 'when key has multiple multi-byte UTF chars exceeding 12 bytes' do
+ before do
+ allow(described_class)
+ .to receive(:attr_encrypted_db_key_base)
+ .and_return('❤' * 18)
+ end
+
+ it 'does not use more than 32 bytes' do
+ db_key_base = described_class.attr_encrypted_db_key_base_12
+
+ expect(db_key_base).to eq('❤' * 4)
+ expect(db_key_base.bytesize).to eq 12
+ end
+ end
+ end
+
+ describe '.attr_encrypted_db_key_base_32' do
+ context 'when db key base secret is less than 32 bytes' do
+ before do
+ allow(described_class)
+ .to receive(:attr_encrypted_db_key_base)
+ .and_return('a' * 10)
+ end
+
+ it 'expands db key base secret to 32 bytes' do
+ expanded_key_base = ('a' * 10) + ('0' * 22)
+
+ expect(expanded_key_base.bytesize).to eq 32
+ expect(described_class.attr_encrypted_db_key_base_32)
+ .to eq expanded_key_base
+ end
+ end
+
+ context 'when db key base secret is 32 bytes' do
+ before do
+ allow(described_class)
+ .to receive(:attr_encrypted_db_key_base)
+ .and_return('a' * 32)
+ end
+
+ it 'returns original value' do
+ expect(described_class.attr_encrypted_db_key_base_32)
+ .to eq 'a' * 32
+ end
+ end
+
+ context 'when db key base contains multi-byte UTF character' do
+ before do
+ allow(described_class)
+ .to receive(:attr_encrypted_db_key_base)
+ .and_return('❤' * 6)
+ end
+
+ it 'does not use more than 32 bytes' do
+ db_key_base = described_class.attr_encrypted_db_key_base_32
+
+ expect(db_key_base).to eq '❤❤❤❤❤❤' + ('0' * 14)
+ expect(db_key_base.bytesize).to eq 32
+ end
+ end
+
+ context 'when db key base multi-byte UTF chars exceeding 32 bytes' do
+ before do
+ allow(described_class)
+ .to receive(:attr_encrypted_db_key_base)
+ .and_return('❤' * 18)
+ end
+
+ it 'does not use more than 32 bytes' do
+ db_key_base = described_class.attr_encrypted_db_key_base_32
+
+ expect(db_key_base).to eq(('❤' * 10) + ('0' * 2))
+ expect(db_key_base.bytesize).to eq 32
+ end
+ end
+ end
end
diff --git a/spec/controllers/boards/issues_controller_spec.rb b/spec/controllers/boards/issues_controller_spec.rb
index 98946e4287b..6d0483f0032 100644
--- a/spec/controllers/boards/issues_controller_spec.rb
+++ b/spec/controllers/boards/issues_controller_spec.rb
@@ -50,7 +50,7 @@ describe Boards::IssuesController do
parsed_response = JSON.parse(response.body)
- expect(response).to match_response_schema('issues')
+ expect(response).to match_response_schema('entities/issue_boards')
expect(parsed_response['issues'].length).to eq 2
expect(development.issues.map(&:relative_position)).not_to include(nil)
end
@@ -121,7 +121,7 @@ describe Boards::IssuesController do
parsed_response = JSON.parse(response.body)
- expect(response).to match_response_schema('issues')
+ expect(response).to match_response_schema('entities/issue_boards')
expect(parsed_response['issues'].length).to eq 2
end
end
@@ -168,7 +168,7 @@ describe Boards::IssuesController do
it 'returns the created issue' do
create_issue user: user, board: board, list: list1, title: 'New issue'
- expect(response).to match_response_schema('issue')
+ expect(response).to match_response_schema('entities/issue_board')
end
end
diff --git a/spec/controllers/concerns/issuable_collections_spec.rb b/spec/controllers/concerns/issuable_collections_spec.rb
index e93c923fd39..f87eed6ff9f 100644
--- a/spec/controllers/concerns/issuable_collections_spec.rb
+++ b/spec/controllers/concerns/issuable_collections_spec.rb
@@ -86,6 +86,7 @@ describe IssuableCollections do
it 'only allows whitelisted params' do
allow(controller).to receive(:cookies).and_return({})
+ allow(controller).to receive(:current_user).and_return(nil)
finder_options = controller.send(:finder_options)
diff --git a/spec/controllers/projects/avatars_controller_spec.rb b/spec/controllers/projects/avatars_controller_spec.rb
index 14059cff74c..5a77a7ac06f 100644
--- a/spec/controllers/projects/avatars_controller_spec.rb
+++ b/spec/controllers/projects/avatars_controller_spec.rb
@@ -26,12 +26,37 @@ describe Projects::AvatarsController do
context 'when the avatar is stored in the repository' do
let(:filepath) { 'files/images/logo-white.png' }
- it 'sends the avatar' do
- subject
+ context 'when feature flag workhorse_set_content_type is' do
+ before do
+ stub_feature_flags(workhorse_set_content_type: flag_value)
+ end
- expect(response).to have_gitlab_http_status(200)
- expect(response.header['Content-Type']).to eq('image/png')
- expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with('git-blob:')
+ context 'enabled' do
+ let(:flag_value) { true }
+
+ it 'sends the avatar' do
+ subject
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response.header['Content-Disposition']).to eq('inline')
+ expect(response.header['Content-Type']).to eq 'image/png'
+ expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with('git-blob:')
+ expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
+ end
+ end
+
+ context 'disabled' do
+ let(:flag_value) { false }
+
+ it 'sends the avatar' do
+ subject
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response.header['Content-Type']).to eq('image/png')
+ expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with('git-blob:')
+ expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq nil
+ end
+ end
end
end
diff --git a/spec/controllers/projects/blob_controller_spec.rb b/spec/controllers/projects/blob_controller_spec.rb
index 3c5a21c47fa..9fc6af6a045 100644
--- a/spec/controllers/projects/blob_controller_spec.rb
+++ b/spec/controllers/projects/blob_controller_spec.rb
@@ -35,6 +35,11 @@ describe Projects::BlobController do
let(:id) { 'binary-encoding/encoding/binary-1.bin' }
it { is_expected.to respond_with(:success) }
end
+
+ context "Markdown file" do
+ let(:id) { 'master/README.md' }
+ it { is_expected.to respond_with(:success) }
+ end
end
context 'with file path and JSON format' do
diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb
index da3d658d061..51a7cc63cef 100644
--- a/spec/controllers/projects/jobs_controller_spec.rb
+++ b/spec/controllers/projects/jobs_controller_spec.rb
@@ -838,23 +838,48 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
context "when job has a trace artifact" do
let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
- it 'returns a trace' do
- response = subject
+ context 'when feature flag workhorse_set_content_type is' do
+ before do
+ stub_feature_flags(workhorse_set_content_type: flag_value)
+ end
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.headers["Content-Type"]).to eq("text/plain; charset=utf-8")
- expect(response.body).to eq(job.job_artifacts_trace.open.read)
+ context 'enabled' do
+ let(:flag_value) { true }
+
+ it "sets #{Gitlab::Workhorse::DETECT_HEADER} header" do
+ response = subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers["Content-Type"]).to eq("text/plain; charset=utf-8")
+ expect(response.body).to eq(job.job_artifacts_trace.open.read)
+ expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
+ end
+ end
+
+ context 'disabled' do
+ let(:flag_value) { false }
+
+ it 'returns a trace' do
+ response = subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers["Content-Type"]).to eq("text/plain; charset=utf-8")
+ expect(response.body).to eq(job.job_artifacts_trace.open.read)
+ expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to be nil
+ end
+ end
end
end
context "when job has a trace file" do
let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) }
- it "send a trace file" do
+ it 'sends a trace file' do
response = subject
expect(response).to have_gitlab_http_status(:ok)
expect(response.headers["Content-Type"]).to eq("text/plain; charset=utf-8")
+ expect(response.headers["Content-Disposition"]).to match(/^inline/)
expect(response.body).to eq("BUILD TRACE")
end
end
@@ -866,12 +891,27 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
job.update_column(:trace, "Sample trace")
end
- it "send a trace file" do
+ it 'sends a trace file' do
response = subject
expect(response).to have_gitlab_http_status(:ok)
- expect(response.headers["Content-Type"]).to eq("text/plain; charset=utf-8")
- expect(response.body).to eq("Sample trace")
+ expect(response.headers['Content-Type']).to eq('text/plain; charset=utf-8')
+ expect(response.headers['Content-Disposition']).to match(/^inline/)
+ expect(response.body).to eq('Sample trace')
+ end
+
+ context 'when trace format is not text/plain' do
+ before do
+ job.update_column(:trace, '<html></html>')
+ end
+
+ it 'sets content disposition to attachment' do
+ response = subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers['Content-Type']).to eq('text/plain; charset=utf-8')
+ expect(response.headers['Content-Disposition']).to match(/^attachment/)
+ end
end
end
diff --git a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
index 9dc06436c72..8fc5d302af6 100644
--- a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
@@ -36,6 +36,18 @@ describe Projects::MergeRequests::DiffsController do
end
end
+ context 'when note has no position' do
+ before do
+ create(:legacy_diff_note_on_merge_request, project: project, noteable: merge_request, position: nil)
+ end
+
+ it 'serializes merge request diff collection' do
+ expect_any_instance_of(DiffsSerializer).to receive(:represent).with(an_instance_of(Gitlab::Diff::FileCollection::MergeRequestDiff), an_instance_of(Hash))
+
+ go
+ end
+ end
+
context 'with forked projects with submodules' do
render_views
diff --git a/spec/controllers/projects/raw_controller_spec.rb b/spec/controllers/projects/raw_controller_spec.rb
index 6b658bf5295..d3cd15fbcd7 100644
--- a/spec/controllers/projects/raw_controller_spec.rb
+++ b/spec/controllers/projects/raw_controller_spec.rb
@@ -14,26 +14,74 @@ describe Projects::RawController do
context 'regular filename' do
let(:filepath) { 'master/README.md' }
- it 'delivers ASCII file' do
- subject
-
- expect(response).to have_gitlab_http_status(200)
- expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
- expect(response.header['Content-Disposition'])
- .to eq('inline')
- expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with('git-blob:')
+ context 'when feature flag workhorse_set_content_type is' do
+ before do
+ stub_feature_flags(workhorse_set_content_type: flag_value)
+
+ subject
+ end
+
+ context 'enabled' do
+ let(:flag_value) { true }
+
+ it 'delivers ASCII file' do
+ expect(response).to have_gitlab_http_status(200)
+ expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
+ expect(response.header['Content-Disposition']).to eq('inline')
+ expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
+ expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with('git-blob:')
+ end
+ end
+
+ context 'disabled' do
+ let(:flag_value) { false }
+
+ it 'delivers ASCII file' do
+ expect(response).to have_gitlab_http_status(200)
+ expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
+ expect(response.header['Content-Disposition']).to eq('inline')
+ expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq nil
+ expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with('git-blob:')
+ end
+ end
end
end
context 'image header' do
let(:filepath) { 'master/files/images/6049019_460s.jpg' }
- it 'sets image content type header' do
- subject
+ context 'when feature flag workhorse_set_content_type is' do
+ before do
+ stub_feature_flags(workhorse_set_content_type: flag_value)
+ end
+
+ context 'enabled' do
+ let(:flag_value) { true }
+
+ it 'leaves image content disposition' do
+ subject
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response.header['Content-Type']).to eq('image/jpeg')
+ expect(response.header['Content-Disposition']).to eq('inline')
+ expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
+ expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with('git-blob:')
+ end
+ end
+
+ context 'disabled' do
+ let(:flag_value) { false }
+
+ it 'sets image content type header' do
+ subject
- expect(response).to have_gitlab_http_status(200)
- expect(response.header['Content-Type']).to eq('image/jpeg')
- expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with('git-blob:')
+ expect(response).to have_gitlab_http_status(200)
+ expect(response.header['Content-Type']).to eq('image/jpeg')
+ expect(response.header['Content-Disposition']).to eq('inline')
+ expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq nil
+ expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with('git-blob:')
+ end
+ end
end
end
diff --git a/spec/controllers/projects/serverless/functions_controller_spec.rb b/spec/controllers/projects/serverless/functions_controller_spec.rb
new file mode 100644
index 00000000000..284b582b1f5
--- /dev/null
+++ b/spec/controllers/projects/serverless/functions_controller_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::Serverless::FunctionsController do
+ include KubernetesHelpers
+ include ReactiveCachingHelpers
+
+ let(:user) { create(:user) }
+ let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:knative) { create(:clusters_applications_knative, :installed, cluster: cluster) }
+ let(:service) { cluster.platform_kubernetes }
+ let(:project) { cluster.project}
+
+ let(:namespace) do
+ create(:cluster_kubernetes_namespace,
+ cluster: cluster,
+ cluster_project: cluster.cluster_project,
+ project: cluster.cluster_project.project)
+ end
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+ end
+
+ def params(opts = {})
+ opts.reverse_merge(namespace_id: project.namespace.to_param,
+ project_id: project.to_param)
+ end
+
+ describe 'GET #index' do
+ context 'empty cache' do
+ it 'has no data' do
+ get :index, params({ format: :json })
+
+ expect(response).to have_gitlab_http_status(204)
+ end
+
+ it 'renders an html page' do
+ get :index, params
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+ end
+ end
+
+ describe 'GET #index with data', :use_clean_rails_memory_store_caching do
+ before do
+ stub_reactive_cache(knative, services: kube_knative_services_body(namespace: namespace.namespace, name: cluster.project.name)["items"])
+ end
+
+ it 'has data' do
+ get :index, params({ format: :json })
+
+ expect(response).to have_gitlab_http_status(200)
+
+ expect(json_response).to contain_exactly(
+ a_hash_including(
+ "name" => project.name,
+ "url" => "http://#{project.name}.#{namespace.namespace}.example.com"
+ )
+ )
+ end
+
+ it 'has data in html' do
+ get :index, params
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+ end
+end
diff --git a/spec/controllers/projects/settings/repository_controller_spec.rb b/spec/controllers/projects/settings/repository_controller_spec.rb
index 9cee40b7553..69ec971bb75 100644
--- a/spec/controllers/projects/settings/repository_controller_spec.rb
+++ b/spec/controllers/projects/settings/repository_controller_spec.rb
@@ -17,4 +17,35 @@ describe Projects::Settings::RepositoryController do
expect(response).to render_template(:show)
end
end
+
+ describe 'PUT cleanup' do
+ def do_put!
+ object_map = fixture_file_upload('spec/fixtures/bfg_object_map.txt')
+
+ Sidekiq::Testing.fake! do
+ put :cleanup, namespace_id: project.namespace, project_id: project, project: { object_map: object_map }
+ end
+ end
+
+ context 'feature enabled' do
+ it 'enqueues a RepositoryCleanupWorker' do
+ stub_feature_flags(project_cleanup: true)
+
+ do_put!
+
+ expect(response).to redirect_to project_settings_repository_path(project)
+ expect(RepositoryCleanupWorker.jobs.count).to eq(1)
+ end
+ end
+
+ context 'feature disabled' do
+ it 'shows a 404 error' do
+ stub_feature_flags(project_cleanup: false)
+
+ do_put!
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
end
diff --git a/spec/controllers/projects/wikis_controller_spec.rb b/spec/controllers/projects/wikis_controller_spec.rb
index 6d75152857b..b974d927856 100644
--- a/spec/controllers/projects/wikis_controller_spec.rb
+++ b/spec/controllers/projects/wikis_controller_spec.rb
@@ -52,24 +52,56 @@ describe Projects::WikisController do
let(:path) { upload_file_to_wiki(project, user, file_name) }
- before do
- subject
- end
-
subject { get :show, namespace_id: project.namespace, project_id: project, id: path }
context 'when file is an image' do
let(:file_name) { 'dk.png' }
- it 'renders the content inline' do
- expect(response.headers['Content-Disposition']).to match(/^inline/)
- end
+ context 'when feature flag workhorse_set_content_type is' do
+ before do
+ stub_feature_flags(workhorse_set_content_type: flag_value)
+
+ subject
+ end
- context 'when file is a svg' do
- let(:file_name) { 'unsanitized.svg' }
+ context 'enabled' do
+ let(:flag_value) { true }
- it 'renders the content as an attachment' do
- expect(response.headers['Content-Disposition']).to match(/^attachment/)
+ it 'delivers the image' do
+ expect(response.headers['Content-Type']).to eq('image/png')
+ expect(response.headers['Content-Disposition']).to match(/^inline/)
+ expect(response.headers[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
+ end
+
+ context 'when file is a svg' do
+ let(:file_name) { 'unsanitized.svg' }
+
+ it 'delivers the image' do
+ expect(response.headers['Content-Type']).to eq('image/svg+xml')
+ expect(response.headers['Content-Disposition']).to match(/^attachment/)
+ expect(response.headers[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
+ end
+ end
+ end
+
+ context 'disabled' do
+ let(:flag_value) { false }
+
+ it 'renders the content inline' do
+ expect(response.headers['Content-Type']).to eq('image/png')
+ expect(response.headers['Content-Disposition']).to match(/^inline/)
+ expect(response.headers[Gitlab::Workhorse::DETECT_HEADER]).to eq nil
+ end
+
+ context 'when file is a svg' do
+ let(:file_name) { 'unsanitized.svg' }
+
+ it 'renders the content as an attachment' do
+ expect(response.headers['Content-Type']).to eq('image/svg+xml')
+ expect(response.headers['Content-Disposition']).to match(/^attachment/)
+ expect(response.headers[Gitlab::Workhorse::DETECT_HEADER]).to eq nil
+ end
+ end
end
end
end
@@ -77,8 +109,32 @@ describe Projects::WikisController do
context 'when file is a pdf' do
let(:file_name) { 'git-cheat-sheet.pdf' }
- it 'sets the content type to application/octet-stream' do
- expect(response.headers['Content-Type']).to eq 'application/octet-stream'
+ context 'when feature flag workhorse_set_content_type is' do
+ before do
+ stub_feature_flags(workhorse_set_content_type: flag_value)
+
+ subject
+ end
+
+ context 'enabled' do
+ let(:flag_value) { true }
+
+ it 'sets the content type to sets the content response headers' do
+ expect(response.headers['Content-Type']).to eq 'application/octet-stream'
+ expect(response.headers['Content-Disposition']).to match(/^inline/)
+ expect(response.headers[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
+ end
+ end
+
+ context 'disabled' do
+ let(:flag_value) { false }
+
+ it 'sets the content response headers' do
+ expect(response.headers['Content-Type']).to eq 'application/octet-stream'
+ expect(response.headers['Content-Disposition']).to match(/^inline/)
+ expect(response.headers[Gitlab::Workhorse::DETECT_HEADER]).to eq nil
+ end
+ end
end
end
end
diff --git a/spec/controllers/snippets_controller_spec.rb b/spec/controllers/snippets_controller_spec.rb
index 9effe47ab05..957bab638b1 100644
--- a/spec/controllers/snippets_controller_spec.rb
+++ b/spec/controllers/snippets_controller_spec.rb
@@ -437,7 +437,10 @@ describe SnippetsController do
end
context 'when signed in user is the author' do
+ let(:flag_value) { false }
+
before do
+ stub_feature_flags(workhorse_set_content_type: flag_value)
get :raw, id: personal_snippet.to_param
end
@@ -451,6 +454,24 @@ describe SnippetsController do
expect(response.header['Content-Disposition']).to match(/inline/)
end
+
+ context 'when feature flag workhorse_set_content_type is' do
+ context 'enabled' do
+ let(:flag_value) { true }
+
+ it "sets #{Gitlab::Workhorse::DETECT_HEADER} header" do
+ expect(response).to have_gitlab_http_status(200)
+ expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
+ end
+ end
+
+ context 'disabled' do
+ it "does not set #{Gitlab::Workhorse::DETECT_HEADER} header" do
+ expect(response).to have_gitlab_http_status(200)
+ expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to be nil
+ end
+ end
+ end
end
end
diff --git a/spec/factories/clusters/kubernetes_namespaces.rb b/spec/factories/clusters/kubernetes_namespaces.rb
index 6ad93fb0f45..3b50a57433f 100644
--- a/spec/factories/clusters/kubernetes_namespaces.rb
+++ b/spec/factories/clusters/kubernetes_namespaces.rb
@@ -5,10 +5,12 @@ FactoryBot.define do
association :cluster, :project, :provided_by_gcp
after(:build) do |kubernetes_namespace|
- cluster_project = kubernetes_namespace.cluster.cluster_project
+ if kubernetes_namespace.cluster.project_type?
+ cluster_project = kubernetes_namespace.cluster.cluster_project
- kubernetes_namespace.project = cluster_project.project
- kubernetes_namespace.cluster_project = cluster_project
+ kubernetes_namespace.project = cluster_project.project
+ kubernetes_namespace.cluster_project = cluster_project
+ end
end
trait :with_token do
diff --git a/spec/features/admin/admin_users_spec.rb b/spec/features/admin/admin_users_spec.rb
index d5516b334b9..931095936a6 100644
--- a/spec/features/admin/admin_users_spec.rb
+++ b/spec/features/admin/admin_users_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe "Admin::Users" do
+ include Spec::Support::Helpers::Features::ListRowsHelpers
+
let!(:user) do
create(:omniauth_user, provider: 'twitter', extern_uid: '123456')
end
@@ -30,6 +32,51 @@ describe "Admin::Users" do
expect(page).to have_button('Delete user and contributions')
end
+ describe 'search and sort' do
+ before do
+ create(:user, name: 'Foo Bar')
+ create(:user, name: 'Foo Baz')
+ create(:user, name: 'Dmitriy')
+ end
+
+ it 'searches users by name' do
+ visit admin_users_path(search_query: 'Foo')
+
+ expect(page).to have_content('Foo Bar')
+ expect(page).to have_content('Foo Baz')
+ expect(page).not_to have_content('Dmitriy')
+ end
+
+ it 'sorts users by name' do
+ visit admin_users_path
+
+ sort_by('Name')
+
+ expect(first_row.text).to include('Dmitriy')
+ expect(second_row.text).to include('Foo Bar')
+ end
+
+ it 'sorts search results only' do
+ visit admin_users_path(search_query: 'Foo')
+
+ sort_by('Name')
+
+ expect(page).not_to have_content('Dmitriy')
+ expect(first_row.text).to include('Foo Bar')
+ expect(second_row.text).to include('Foo Baz')
+ end
+
+ it 'searches with respect of sorting' do
+ visit admin_users_path(sort: 'Name')
+
+ fill_in :search_query, with: 'Foo'
+ click_button('Search users')
+
+ expect(first_row.text).to include('Foo Bar')
+ expect(second_row.text).to include('Foo Baz')
+ end
+ end
+
describe 'Two-factor Authentication filters' do
it 'counts users who have enabled 2FA' do
create(:user, :two_factor)
@@ -566,4 +613,10 @@ describe "Admin::Users" do
def check_breadcrumb(content)
expect(find('.breadcrumbs-sub-title')).to have_content(content)
end
+
+ def sort_by(text)
+ page.within('.user-sort-dropdown') do
+ click_link text
+ end
+ end
end
diff --git a/spec/features/dashboard/merge_requests_spec.rb b/spec/features/dashboard/merge_requests_spec.rb
index 9ffa75aee47..282bf542e77 100644
--- a/spec/features/dashboard/merge_requests_spec.rb
+++ b/spec/features/dashboard/merge_requests_spec.rb
@@ -6,7 +6,6 @@ describe 'Dashboard Merge Requests' do
include ProjectForksHelper
let(:current_user) { create :user }
- let(:user) { current_user }
let(:project) { create(:project) }
let(:public_project) { create(:project, :public, :repository) }
diff --git a/spec/features/groups/members/list_members_spec.rb b/spec/features/groups/members/list_members_spec.rb
index e1587a8b6a5..4ba7161601e 100644
--- a/spec/features/groups/members/list_members_spec.rb
+++ b/spec/features/groups/members/list_members_spec.rb
@@ -2,6 +2,7 @@ require 'spec_helper'
describe 'Groups > Members > List members' do
include Select2Helper
+ include Spec::Support::Helpers::Features::ListRowsHelpers
let(:user1) { create(:user, name: 'John Doe') }
let(:user2) { create(:user, name: 'Mary Jane') }
@@ -43,12 +44,4 @@ describe 'Groups > Members > List members' do
let(:user_with_status) { user2 }
end
end
-
- def first_row
- page.all('ul.content-list > li')[0]
- end
-
- def second_row
- page.all('ul.content-list > li')[1]
- end
end
diff --git a/spec/features/groups/members/manage_members_spec.rb b/spec/features/groups/members/manage_members_spec.rb
index 0eda2c7f26d..e2b4a491a13 100644
--- a/spec/features/groups/members/manage_members_spec.rb
+++ b/spec/features/groups/members/manage_members_spec.rb
@@ -2,6 +2,7 @@ require 'spec_helper'
describe 'Groups > Members > Manage members' do
include Select2Helper
+ include Spec::Support::Helpers::Features::ListRowsHelpers
let(:user1) { create(:user, name: 'John Doe') }
let(:user2) { create(:user, name: 'Mary Jane') }
@@ -119,14 +120,6 @@ describe 'Groups > Members > Manage members' do
end
end
- def first_row
- page.all('ul.content-list > li')[0]
- end
-
- def second_row
- page.all('ul.content-list > li')[1]
- end
-
def add_user(id, role)
page.within ".users-group-form" do
select2(id, from: "#user_ids", multiple: true)
diff --git a/spec/features/ide_spec.rb b/spec/features/ide_spec.rb
index 65989c36c1e..6eb59ef72c2 100644
--- a/spec/features/ide_spec.rb
+++ b/spec/features/ide_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe 'IDE', :js do
- describe 'sub-groups' do
+ describe 'sub-groups', :nested_groups do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:subgroup) { create(:group, parent: group) }
diff --git a/spec/features/issuables/default_sort_order_spec.rb b/spec/features/issuables/default_sort_order_spec.rb
deleted file mode 100644
index caee7a67aec..00000000000
--- a/spec/features/issuables/default_sort_order_spec.rb
+++ /dev/null
@@ -1,179 +0,0 @@
-require 'spec_helper'
-
-describe 'Projects > Issuables > Default sort order' do
- let(:project) { create(:project, :public) }
-
- let(:first_created_issuable) { issuables.order_created_asc.first }
- let(:last_created_issuable) { issuables.order_created_desc.first }
-
- let(:first_updated_issuable) { issuables.order_updated_asc.first }
- let(:last_updated_issuable) { issuables.order_updated_desc.first }
-
- context 'for merge requests' do
- include MergeRequestHelpers
-
- let!(:issuables) do
- timestamps = [{ created_at: 3.minutes.ago, updated_at: 20.seconds.ago },
- { created_at: 2.minutes.ago, updated_at: 30.seconds.ago },
- { created_at: 4.minutes.ago, updated_at: 10.seconds.ago }]
-
- timestamps.each_with_index do |ts, i|
- create issuable_type, { title: "#{issuable_type}_#{i}",
- source_branch: "#{issuable_type}_#{i}",
- source_project: project }.merge(ts)
- end
-
- MergeRequest.all
- end
-
- context 'in the "merge requests" tab', :js do
- let(:issuable_type) { :merge_request }
-
- it 'is "last created"' do
- visit_merge_requests project
-
- expect(first_merge_request).to include(last_created_issuable.title)
- expect(last_merge_request).to include(first_created_issuable.title)
- end
- end
-
- context 'in the "merge requests / open" tab', :js do
- let(:issuable_type) { :merge_request }
-
- it 'is "created date"' do
- visit_merge_requests_with_state(project, 'open')
-
- expect(selected_sort_order).to eq('created date')
- expect(first_merge_request).to include(last_created_issuable.title)
- expect(last_merge_request).to include(first_created_issuable.title)
- end
- end
-
- context 'in the "merge requests / merged" tab', :js do
- let(:issuable_type) { :merged_merge_request }
-
- it 'is "last updated"' do
- visit_merge_requests_with_state(project, 'merged')
-
- expect(find('.issues-other-filters')).to have_content('Last updated')
- expect(first_merge_request).to include(last_updated_issuable.title)
- expect(last_merge_request).to include(first_updated_issuable.title)
- end
- end
-
- context 'in the "merge requests / closed" tab', :js do
- let(:issuable_type) { :closed_merge_request }
-
- it 'is "last updated"' do
- visit_merge_requests_with_state(project, 'closed')
-
- expect(find('.issues-other-filters')).to have_content('Last updated')
- expect(first_merge_request).to include(last_updated_issuable.title)
- expect(last_merge_request).to include(first_updated_issuable.title)
- end
- end
-
- context 'in the "merge requests / all" tab', :js do
- let(:issuable_type) { :merge_request }
-
- it 'is "created date"' do
- visit_merge_requests_with_state(project, 'all')
-
- expect(find('.issues-other-filters')).to have_content('Created date')
- expect(first_merge_request).to include(last_created_issuable.title)
- expect(last_merge_request).to include(first_created_issuable.title)
- end
- end
- end
-
- context 'for issues' do
- include IssueHelpers
-
- let!(:issuables) do
- timestamps = [{ created_at: 3.minutes.ago, updated_at: 20.seconds.ago },
- { created_at: 2.minutes.ago, updated_at: 30.seconds.ago },
- { created_at: 4.minutes.ago, updated_at: 10.seconds.ago }]
-
- timestamps.each_with_index do |ts, i|
- create issuable_type, { title: "#{issuable_type}_#{i}",
- project: project }.merge(ts)
- end
-
- Issue.all
- end
-
- context 'in the "issues" tab', :js do
- let(:issuable_type) { :issue }
-
- it 'is "created date"' do
- visit_issues project
-
- expect(find('.issues-other-filters')).to have_content('Created date')
- expect(first_issue).to include(last_created_issuable.title)
- expect(last_issue).to include(first_created_issuable.title)
- end
- end
-
- context 'in the "issues / open" tab', :js do
- let(:issuable_type) { :issue }
-
- it 'is "created date"' do
- visit_issues_with_state(project, 'open')
-
- expect(find('.issues-other-filters')).to have_content('Created date')
- expect(first_issue).to include(last_created_issuable.title)
- expect(last_issue).to include(first_created_issuable.title)
- end
- end
-
- context 'in the "issues / closed" tab', :js do
- let(:issuable_type) { :closed_issue }
-
- it 'is "last updated"' do
- visit_issues_with_state(project, 'closed')
-
- expect(find('.issues-other-filters')).to have_content('Last updated')
- expect(first_issue).to include(last_updated_issuable.title)
- expect(last_issue).to include(first_updated_issuable.title)
- end
- end
-
- context 'in the "issues / all" tab', :js do
- let(:issuable_type) { :issue }
-
- it 'is "created date"' do
- visit_issues_with_state(project, 'all')
-
- expect(find('.issues-other-filters')).to have_content('Created date')
- expect(first_issue).to include(last_created_issuable.title)
- expect(last_issue).to include(first_created_issuable.title)
- end
- end
-
- context 'when the sort in the URL is id_desc' do
- let(:issuable_type) { :issue }
-
- before do
- visit_issues(project, sort: 'id_desc')
- end
-
- it 'shows the sort order as created date' do
- expect(find('.issues-other-filters')).to have_content('Created date')
- expect(first_issue).to include(last_created_issuable.title)
- expect(last_issue).to include(first_created_issuable.title)
- end
- end
- end
-
- def selected_sort_order
- find('.filter-dropdown-container .dropdown button').text.downcase
- end
-
- def visit_merge_requests_with_state(project, state)
- visit_merge_requests project, state: state
- end
-
- def visit_issues_with_state(project, state)
- visit_issues project, state: state
- end
-end
diff --git a/spec/features/issuables/sorting_list_spec.rb b/spec/features/issuables/sorting_list_spec.rb
new file mode 100644
index 00000000000..0601dd47c03
--- /dev/null
+++ b/spec/features/issuables/sorting_list_spec.rb
@@ -0,0 +1,226 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+describe 'Sort Issuable List' do
+ let(:project) { create(:project, :public) }
+
+ let(:first_created_issuable) { issuables.order_created_asc.first }
+ let(:last_created_issuable) { issuables.order_created_desc.first }
+
+ let(:first_updated_issuable) { issuables.order_updated_asc.first }
+ let(:last_updated_issuable) { issuables.order_updated_desc.first }
+
+ context 'for merge requests' do
+ include MergeRequestHelpers
+
+ let!(:issuables) do
+ timestamps = [{ created_at: 3.minutes.ago, updated_at: 20.seconds.ago },
+ { created_at: 2.minutes.ago, updated_at: 30.seconds.ago },
+ { created_at: 4.minutes.ago, updated_at: 10.seconds.ago }]
+
+ timestamps.each_with_index do |ts, i|
+ create issuable_type, { title: "#{issuable_type}_#{i}",
+ source_branch: "#{issuable_type}_#{i}",
+ source_project: project }.merge(ts)
+ end
+
+ MergeRequest.all
+ end
+
+ context 'default sort order' do
+ context 'in the "merge requests" tab', :js do
+ let(:issuable_type) { :merge_request }
+
+ it 'is "last created"' do
+ visit_merge_requests project
+
+ expect(first_merge_request).to include(last_created_issuable.title)
+ expect(last_merge_request).to include(first_created_issuable.title)
+ end
+ end
+
+ context 'in the "merge requests / open" tab', :js do
+ let(:issuable_type) { :merge_request }
+
+ it 'is "created date"' do
+ visit_merge_requests_with_state(project, 'open')
+
+ expect(selected_sort_order).to eq('created date')
+ expect(first_merge_request).to include(last_created_issuable.title)
+ expect(last_merge_request).to include(first_created_issuable.title)
+ end
+ end
+
+ context 'in the "merge requests / merged" tab', :js do
+ let(:issuable_type) { :merged_merge_request }
+
+ it 'is "last updated"' do
+ visit_merge_requests_with_state(project, 'merged')
+
+ expect(find('.issues-other-filters')).to have_content('Last updated')
+ expect(first_merge_request).to include(last_updated_issuable.title)
+ expect(last_merge_request).to include(first_updated_issuable.title)
+ end
+ end
+
+ context 'in the "merge requests / closed" tab', :js do
+ let(:issuable_type) { :closed_merge_request }
+
+ it 'is "last updated"' do
+ visit_merge_requests_with_state(project, 'closed')
+
+ expect(find('.issues-other-filters')).to have_content('Last updated')
+ expect(first_merge_request).to include(last_updated_issuable.title)
+ expect(last_merge_request).to include(first_updated_issuable.title)
+ end
+ end
+
+ context 'in the "merge requests / all" tab', :js do
+ let(:issuable_type) { :merge_request }
+
+ it 'is "created date"' do
+ visit_merge_requests_with_state(project, 'all')
+
+ expect(find('.issues-other-filters')).to have_content('Created date')
+ expect(first_merge_request).to include(last_created_issuable.title)
+ expect(last_merge_request).to include(first_created_issuable.title)
+ end
+ end
+
+ context 'custom sorting' do
+ let(:issuable_type) { :merge_request }
+
+ it 'supports sorting in asc and desc order' do
+ visit_merge_requests_with_state(project, 'open')
+
+ page.within('.issues-other-filters') do
+ click_button('Created date')
+ click_link('Last updated')
+ end
+
+ expect(first_merge_request).to include(last_updated_issuable.title)
+ expect(last_merge_request).to include(first_updated_issuable.title)
+
+ find('.issues-other-filters .filter-dropdown-container .qa-reverse-sort').click
+
+ expect(first_merge_request).to include(first_updated_issuable.title)
+ expect(last_merge_request).to include(last_updated_issuable.title)
+ end
+ end
+ end
+ end
+
+ context 'for issues' do
+ include IssueHelpers
+
+ let!(:issuables) do
+ timestamps = [{ created_at: 3.minutes.ago, updated_at: 20.seconds.ago },
+ { created_at: 2.minutes.ago, updated_at: 30.seconds.ago },
+ { created_at: 4.minutes.ago, updated_at: 10.seconds.ago }]
+
+ timestamps.each_with_index do |ts, i|
+ create issuable_type, { title: "#{issuable_type}_#{i}",
+ project: project }.merge(ts)
+ end
+
+ Issue.all
+ end
+
+ context 'default sort order' do
+ context 'in the "issues" tab', :js do
+ let(:issuable_type) { :issue }
+
+ it 'is "created date"' do
+ visit_issues project
+
+ expect(find('.issues-other-filters')).to have_content('Created date')
+ expect(first_issue).to include(last_created_issuable.title)
+ expect(last_issue).to include(first_created_issuable.title)
+ end
+ end
+
+ context 'in the "issues / open" tab', :js do
+ let(:issuable_type) { :issue }
+
+ it 'is "created date"' do
+ visit_issues_with_state(project, 'open')
+
+ expect(find('.issues-other-filters')).to have_content('Created date')
+ expect(first_issue).to include(last_created_issuable.title)
+ expect(last_issue).to include(first_created_issuable.title)
+ end
+ end
+
+ context 'in the "issues / closed" tab', :js do
+ let(:issuable_type) { :closed_issue }
+
+ it 'is "last updated"' do
+ visit_issues_with_state(project, 'closed')
+
+ expect(find('.issues-other-filters')).to have_content('Last updated')
+ expect(first_issue).to include(last_updated_issuable.title)
+ expect(last_issue).to include(first_updated_issuable.title)
+ end
+ end
+
+ context 'in the "issues / all" tab', :js do
+ let(:issuable_type) { :issue }
+
+ it 'is "created date"' do
+ visit_issues_with_state(project, 'all')
+
+ expect(find('.issues-other-filters')).to have_content('Created date')
+ expect(first_issue).to include(last_created_issuable.title)
+ expect(last_issue).to include(first_created_issuable.title)
+ end
+ end
+
+ context 'when the sort in the URL is id_desc' do
+ let(:issuable_type) { :issue }
+
+ before do
+ visit_issues(project, sort: 'id_desc')
+ end
+
+ it 'shows the sort order as created date' do
+ expect(find('.issues-other-filters')).to have_content('Created date')
+ expect(first_issue).to include(last_created_issuable.title)
+ expect(last_issue).to include(first_created_issuable.title)
+ end
+ end
+ end
+
+ context 'custom sorting' do
+ let(:issuable_type) { :issue }
+
+ it 'supports sorting in asc and desc order' do
+ visit_issues_with_state(project, 'open')
+
+ page.within('.issues-other-filters') do
+ click_button('Created date')
+ click_link('Last updated')
+ end
+
+ expect(first_issue).to include(last_updated_issuable.title)
+ expect(last_issue).to include(first_updated_issuable.title)
+
+ find('.issues-other-filters .filter-dropdown-container .qa-reverse-sort').click
+
+ expect(first_issue).to include(first_updated_issuable.title)
+ expect(last_issue).to include(last_updated_issuable.title)
+ end
+ end
+ end
+
+ def selected_sort_order
+ find('.filter-dropdown-container .dropdown button').text.downcase
+ end
+
+ def visit_merge_requests_with_state(project, state)
+ visit_merge_requests project, state: state
+ end
+
+ def visit_issues_with_state(project, state)
+ visit_issues project, state: state
+ end
+end
diff --git a/spec/features/issues/filtered_search/filter_issues_spec.rb b/spec/features/issues/filtered_search/filter_issues_spec.rb
index 4d9b8262f21..a29380a180e 100644
--- a/spec/features/issues/filtered_search/filter_issues_spec.rb
+++ b/spec/features/issues/filtered_search/filter_issues_spec.rb
@@ -430,7 +430,7 @@ describe 'Filter issues', :js do
expect_issues_list_count(2)
- sort_toggle = find('.filter-dropdown-container .dropdown-menu-toggle')
+ sort_toggle = find('.filter-dropdown-container .dropdown')
sort_toggle.click
find('.filter-dropdown-container .dropdown-menu li a', text: 'Created date').click
diff --git a/spec/features/issues/user_sorts_issues_spec.rb b/spec/features/issues/user_sorts_issues_spec.rb
index 3bc93933183..eebd2d57cca 100644
--- a/spec/features/issues/user_sorts_issues_spec.rb
+++ b/spec/features/issues/user_sorts_issues_spec.rb
@@ -20,9 +20,9 @@ describe "User sorts issues" do
end
it 'keeps the sort option' do
- find('.filter-dropdown-container button.dropdown-menu-toggle').click
+ find('.filter-dropdown-container .dropdown').click
- page.within('.content ul.dropdown-menu.dropdown-menu-right li') do
+ page.within('ul.dropdown-menu.dropdown-menu-right li') do
click_link('Milestone')
end
@@ -40,9 +40,9 @@ describe "User sorts issues" do
end
it "sorts by popularity" do
- find(".filter-dropdown-container button.dropdown-menu-toggle").click
+ find('.filter-dropdown-container .dropdown').click
- page.within(".content ul.dropdown-menu.dropdown-menu-right li") do
+ page.within('ul.dropdown-menu.dropdown-menu-right li') do
click_link("Popularity")
end
diff --git a/spec/features/merge_request/user_expands_diff_spec.rb b/spec/features/merge_request/user_expands_diff_spec.rb
new file mode 100644
index 00000000000..3560b8d90bb
--- /dev/null
+++ b/spec/features/merge_request/user_expands_diff_spec.rb
@@ -0,0 +1,26 @@
+require 'spec_helper'
+
+describe 'User expands diff', :js do
+ let(:project) { create(:project, :public, :repository) }
+ let(:merge_request) { create(:merge_request, source_branch: 'expand-collapse-files', source_project: project, target_project: project) }
+
+ before do
+ allow(Gitlab::Git::Diff).to receive(:size_limit).and_return(100.kilobytes)
+ allow(Gitlab::Git::Diff).to receive(:collapse_limit).and_return(10.kilobytes)
+
+ visit(diffs_project_merge_request_path(project, merge_request))
+
+ wait_for_requests
+ end
+
+ it 'allows user to expand diff' do
+ page.within find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd"]') do
+ click_link 'Click to expand it.'
+
+ wait_for_requests
+
+ expect(page).not_to have_content('Click to expand it.')
+ expect(page).to have_selector('.code')
+ end
+ end
+end
diff --git a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
new file mode 100644
index 00000000000..7b473faa884
--- /dev/null
+++ b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
@@ -0,0 +1,365 @@
+# frozen_string_literal: true
+
+require 'rails_helper'
+
+describe 'Merge request > User sees merge request pipelines', :js do
+ include ProjectForksHelper
+ include TestReportsHelper
+
+ let(:project) { create(:project, :public, :repository) }
+ let(:user) { project.creator }
+
+ let(:config) do
+ {
+ build: {
+ script: 'build'
+ },
+ test: {
+ script: 'test',
+ only: ['merge_requests']
+ },
+ deploy: {
+ script: 'deploy',
+ except: ['merge_requests']
+ }
+ }
+ end
+
+ before do
+ stub_application_setting(auto_devops_enabled: false)
+ stub_feature_flags(ci_merge_request_pipeline: true)
+ stub_ci_pipeline_yaml_file(YAML.dump(config))
+ project.add_maintainer(user)
+ sign_in(user)
+ end
+
+ context 'when a user created a merge request in the parent project' do
+ let(:merge_request) do
+ create(:merge_request,
+ source_project: project,
+ target_project: project,
+ source_branch: 'feature',
+ target_branch: 'master')
+ end
+
+ let!(:push_pipeline) do
+ Ci::CreatePipelineService.new(project, user, ref: 'feature')
+ .execute(:push)
+ end
+
+ let!(:merge_request_pipeline) do
+ Ci::CreatePipelineService.new(project, user, ref: 'feature')
+ .execute(:merge_request, merge_request: merge_request)
+ end
+
+ before do
+ visit project_merge_request_path(project, merge_request)
+
+ page.within('.merge-request-tabs') do
+ click_link('Pipelines')
+ end
+ end
+
+ it 'sees branch pipelines and merge request pipelines in correct order' do
+ page.within('.ci-table') do
+ expect(page).to have_selector('.ci-pending', count: 2)
+ expect(first('.js-pipeline-url-link')).to have_content("##{merge_request_pipeline.id}")
+ end
+ end
+
+ it 'sees the latest merge request pipeline as the head pipeline' do
+ page.within('.ci-widget-content') do
+ expect(page).to have_content("##{merge_request_pipeline.id}")
+ end
+ end
+
+ context 'when a user updated a merge request in the parent project' do
+ let!(:push_pipeline_2) do
+ Ci::CreatePipelineService.new(project, user, ref: 'feature')
+ .execute(:push)
+ end
+
+ let!(:merge_request_pipeline_2) do
+ Ci::CreatePipelineService.new(project, user, ref: 'feature')
+ .execute(:merge_request, merge_request: merge_request)
+ end
+
+ before do
+ visit project_merge_request_path(project, merge_request)
+
+ page.within('.merge-request-tabs') do
+ click_link('Pipelines')
+ end
+ end
+
+ it 'sees branch pipelines and merge request pipelines in correct order' do
+ page.within('.ci-table') do
+ expect(page).to have_selector('.ci-pending', count: 4)
+
+ expect(all('.js-pipeline-url-link')[0])
+ .to have_content("##{merge_request_pipeline_2.id}")
+
+ expect(all('.js-pipeline-url-link')[1])
+ .to have_content("##{merge_request_pipeline.id}")
+
+ expect(all('.js-pipeline-url-link')[2])
+ .to have_content("##{push_pipeline_2.id}")
+
+ expect(all('.js-pipeline-url-link')[3])
+ .to have_content("##{push_pipeline.id}")
+ end
+ end
+
+ it 'sees merge request tag for merge request pipelines' do
+ page.within('.ci-table') do
+ expect(all('.pipeline-tags')[0])
+ .to have_content("merge request")
+
+ expect(all('.pipeline-tags')[1])
+ .to have_content("merge request")
+
+ expect(all('.pipeline-tags')[2])
+ .not_to have_content("merge request")
+
+ expect(all('.pipeline-tags')[3])
+ .not_to have_content("merge request")
+ end
+ end
+
+ it 'sees the latest merge request pipeline as the head pipeline' do
+ page.within('.ci-widget-content') do
+ expect(page).to have_content("##{merge_request_pipeline_2.id}")
+ end
+ end
+ end
+
+ context 'when a user merges a merge request in the parent project' do
+ before do
+ click_button 'Merge when pipeline succeeds'
+
+ wait_for_requests
+ end
+
+ context 'when merge request pipeline is pending' do
+ it 'waits the head pipeline' do
+ expect(page).to have_content('to be merged automatically when the pipeline succeeds')
+ expect(page).to have_link('Cancel automatic merge')
+ end
+ end
+
+ context 'when merge request pipeline succeeds' do
+ before do
+ merge_request_pipeline.succeed!
+
+ wait_for_requests
+ end
+
+ it 'merges the merge request' do
+ expect(page).to have_content('Merged by')
+ expect(page).to have_link('Revert')
+ end
+ end
+
+ context 'when branch pipeline succeeds' do
+ before do
+ push_pipeline.succeed!
+
+ wait_for_requests
+ end
+
+ it 'waits the head pipeline' do
+ expect(page).to have_content('to be merged automatically when the pipeline succeeds')
+ expect(page).to have_link('Cancel automatic merge')
+ end
+ end
+ end
+
+ context 'when there are no `merge_requests` keyword in .gitlab-ci.yml' do
+ let(:config) do
+ {
+ build: {
+ script: 'build'
+ },
+ test: {
+ script: 'test'
+ },
+ deploy: {
+ script: 'deploy'
+ }
+ }
+ end
+
+ it 'sees a branch pipeline in pipeline tab' do
+ page.within('.ci-table') do
+ expect(page).to have_selector('.ci-pending', count: 1)
+ expect(first('.js-pipeline-url-link')).to have_content("##{push_pipeline.id}")
+ end
+ end
+
+ it 'sees the latest branch pipeline as the head pipeline' do
+ page.within('.ci-widget-content') do
+ expect(page).to have_content("##{push_pipeline.id}")
+ end
+ end
+ end
+ end
+
+ context 'when a user created a merge request from a forked project to the parent project' do
+ let(:merge_request) do
+ create(:merge_request,
+ source_project: forked_project,
+ target_project: project,
+ source_branch: 'feature',
+ target_branch: 'master')
+ end
+
+ let!(:push_pipeline) do
+ Ci::CreatePipelineService.new(forked_project, user2, ref: 'feature')
+ .execute(:push)
+ end
+
+ let!(:merge_request_pipeline) do
+ Ci::CreatePipelineService.new(forked_project, user2, ref: 'feature')
+ .execute(:merge_request, merge_request: merge_request)
+ end
+
+ let(:forked_project) { fork_project(project, user2, repository: true) }
+ let(:user2) { create(:user) }
+
+ before do
+ forked_project.add_maintainer(user2)
+
+ visit project_merge_request_path(project, merge_request)
+
+ page.within('.merge-request-tabs') do
+ click_link('Pipelines')
+ end
+ end
+
+ it 'sees branch pipelines and merge request pipelines in correct order' do
+ page.within('.ci-table') do
+ expect(page).to have_selector('.ci-pending', count: 2)
+ expect(first('.js-pipeline-url-link')).to have_content("##{merge_request_pipeline.id}")
+ end
+ end
+
+ it 'sees the latest merge request pipeline as the head pipeline' do
+ page.within('.ci-widget-content') do
+ expect(page).to have_content("##{merge_request_pipeline.id}")
+ end
+ end
+
+ it 'sees pipeline list in forked project' do
+ visit project_pipelines_path(forked_project)
+
+ expect(page).to have_selector('.ci-pending', count: 2)
+ end
+
+ context 'when a user updated a merge request from a forked project to the parent project' do
+ let!(:push_pipeline_2) do
+ Ci::CreatePipelineService.new(forked_project, user2, ref: 'feature')
+ .execute(:push)
+ end
+
+ let!(:merge_request_pipeline_2) do
+ Ci::CreatePipelineService.new(forked_project, user2, ref: 'feature')
+ .execute(:merge_request, merge_request: merge_request)
+ end
+
+ before do
+ visit project_merge_request_path(project, merge_request)
+
+ page.within('.merge-request-tabs') do
+ click_link('Pipelines')
+ end
+ end
+
+ it 'sees branch pipelines and merge request pipelines in correct order' do
+ page.within('.ci-table') do
+ expect(page).to have_selector('.ci-pending', count: 4)
+
+ expect(all('.js-pipeline-url-link')[0])
+ .to have_content("##{merge_request_pipeline_2.id}")
+
+ expect(all('.js-pipeline-url-link')[1])
+ .to have_content("##{merge_request_pipeline.id}")
+
+ expect(all('.js-pipeline-url-link')[2])
+ .to have_content("##{push_pipeline_2.id}")
+
+ expect(all('.js-pipeline-url-link')[3])
+ .to have_content("##{push_pipeline.id}")
+ end
+ end
+
+ it 'sees merge request tag for merge request pipelines' do
+ page.within('.ci-table') do
+ expect(all('.pipeline-tags')[0])
+ .to have_content("merge request")
+
+ expect(all('.pipeline-tags')[1])
+ .to have_content("merge request")
+
+ expect(all('.pipeline-tags')[2])
+ .not_to have_content("merge request")
+
+ expect(all('.pipeline-tags')[3])
+ .not_to have_content("merge request")
+ end
+ end
+
+ it 'sees the latest merge request pipeline as the head pipeline' do
+ page.within('.ci-widget-content') do
+ expect(page).to have_content("##{merge_request_pipeline_2.id}")
+ end
+ end
+
+ it 'sees pipeline list in forked project' do
+ visit project_pipelines_path(forked_project)
+
+ expect(page).to have_selector('.ci-pending', count: 4)
+ end
+ end
+
+ context 'when a user merges a merge request from a forked project to the parent project' do
+ before do
+ click_button 'Merge when pipeline succeeds'
+
+ wait_for_requests
+ end
+
+ context 'when merge request pipeline is pending' do
+ it 'waits the head pipeline' do
+ expect(page).to have_content('to be merged automatically when the pipeline succeeds')
+ expect(page).to have_link('Cancel automatic merge')
+ end
+ end
+
+ context 'when merge request pipeline succeeds' do
+ before do
+ merge_request_pipeline.succeed!
+
+ wait_for_requests
+ end
+
+ it 'merges the merge request' do
+ expect(page).to have_content('Merged by')
+ expect(page).to have_link('Revert')
+ end
+ end
+
+ context 'when branch pipeline succeeds' do
+ before do
+ push_pipeline.succeed!
+
+ wait_for_requests
+ end
+
+ it 'waits the head pipeline' do
+ expect(page).to have_content('to be merged automatically when the pipeline succeeds')
+ expect(page).to have_link('Cancel automatic merge')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/features/merge_requests/user_sorts_merge_requests_spec.rb b/spec/features/merge_requests/user_sorts_merge_requests_spec.rb
index 61e8f1c4662..fa887110c13 100644
--- a/spec/features/merge_requests/user_sorts_merge_requests_spec.rb
+++ b/spec/features/merge_requests/user_sorts_merge_requests_spec.rb
@@ -19,9 +19,9 @@ describe 'User sorts merge requests' do
end
it 'keeps the sort option' do
- find('.filter-dropdown-container button.dropdown-menu-toggle').click
+ find('.filter-dropdown-container .dropdown').click
- page.within('.content ul.dropdown-menu.dropdown-menu-right li') do
+ page.within('ul.dropdown-menu.dropdown-menu-right li') do
click_link('Milestone')
end
@@ -49,9 +49,9 @@ describe 'User sorts merge requests' do
it 'separates remember sorting with issues' do
create(:issue, project: project)
- find('.filter-dropdown-container button.dropdown-menu-toggle').click
+ find('.filter-dropdown-container .dropdown').click
- page.within('.content ul.dropdown-menu.dropdown-menu-right li') do
+ page.within('ul.dropdown-menu.dropdown-menu-right li') do
click_link('Milestone')
end
@@ -70,9 +70,9 @@ describe 'User sorts merge requests' do
end
it 'sorts by popularity' do
- find('.filter-dropdown-container button.dropdown-menu-toggle').click
+ find('.filter-dropdown-container .dropdown').click
- page.within('.content ul.dropdown-menu.dropdown-menu-right li') do
+ page.within('ul.dropdown-menu.dropdown-menu-right li') do
click_link('Popularity')
end
diff --git a/spec/features/projects/commit/builds_spec.rb b/spec/features/projects/commit/builds_spec.rb
index bd254caddfb..caf69796d52 100644
--- a/spec/features/projects/commit/builds_spec.rb
+++ b/spec/features/projects/commit/builds_spec.rb
@@ -20,7 +20,7 @@ describe 'project commit pipelines', :js do
visit pipelines_project_commit_path(project, project.commit.sha)
page.within('.table-holder') do
- expect(page).to have_content project.pipelines[0].id # pipeline ids
+ expect(page).to have_content project.ci_pipelines[0].id # pipeline ids
end
end
end
diff --git a/spec/features/projects/labels/issues_sorted_by_priority_spec.rb b/spec/features/projects/labels/issues_sorted_by_priority_spec.rb
index b778c72bc76..25417cf4955 100644
--- a/spec/features/projects/labels/issues_sorted_by_priority_spec.rb
+++ b/spec/features/projects/labels/issues_sorted_by_priority_spec.rb
@@ -32,7 +32,7 @@ describe 'Issue prioritization' do
visit project_issues_path(project, sort: 'label_priority')
# Ensure we are indicating that issues are sorted by priority
- expect(page).to have_selector('.dropdown-menu-toggle', text: 'Label priority')
+ expect(page).to have_selector('.dropdown', text: 'Label priority')
page.within('.issues-holder') do
issue_titles = all('.issues-list .issue-title-text').map(&:text)
@@ -70,7 +70,7 @@ describe 'Issue prioritization' do
sign_in user
visit project_issues_path(project, sort: 'label_priority')
- expect(page).to have_selector('.dropdown-menu-toggle', text: 'Label priority')
+ expect(page).to have_selector('.dropdown', text: 'Label priority')
page.within('.issues-holder') do
issue_titles = all('.issues-list .issue-title-text').map(&:text)
diff --git a/spec/features/projects/members/list_spec.rb b/spec/features/projects/members/list_spec.rb
index c2e980e75b8..cf309492808 100644
--- a/spec/features/projects/members/list_spec.rb
+++ b/spec/features/projects/members/list_spec.rb
@@ -2,6 +2,7 @@ require 'spec_helper'
describe 'Project members list' do
include Select2Helper
+ include Spec::Support::Helpers::Features::ListRowsHelpers
let(:user1) { create(:user, name: 'John Doe') }
let(:user2) { create(:user, name: 'Mary Jane') }
@@ -83,14 +84,6 @@ describe 'Project members list' do
end
end
- def first_row
- page.all('ul.content-list > li')[0]
- end
-
- def second_row
- page.all('ul.content-list > li')[1]
- end
-
def add_user(id, role)
page.within ".users-project-form" do
select2(id, from: "#user_ids", multiple: true)
diff --git a/spec/features/projects/pages_spec.rb b/spec/features/projects/pages_spec.rb
index 831f22a0e69..435fb229b69 100644
--- a/spec/features/projects/pages_spec.rb
+++ b/spec/features/projects/pages_spec.rb
@@ -300,7 +300,7 @@ describe 'Pages' do
let(:pipeline) do
commit_sha = project.commit('HEAD').sha
- project.pipelines.create(
+ project.ci_pipelines.create(
ref: 'HEAD',
sha: commit_sha,
source: :push,
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index 049bbca958f..a37ad9c3f43 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -499,4 +499,154 @@ describe 'Pipeline', :js do
end
end
end
+
+ context 'when user sees pipeline flags in a pipeline detail page' do
+ let(:project) { create(:project, :repository) }
+
+ context 'when pipeline is latest' do
+ include_context 'pipeline builds'
+
+ let(:pipeline) do
+ create(:ci_pipeline,
+ project: project,
+ ref: 'master',
+ sha: project.commit.id,
+ user: user)
+ end
+
+ before do
+ visit project_pipeline_path(project, pipeline)
+ end
+
+ it 'contains badge that indicates it is the latest build' do
+ page.within(all('.well-segment')[1]) do
+ expect(page).to have_content 'latest'
+ end
+ end
+ end
+
+ context 'when pipeline has configuration errors' do
+ include_context 'pipeline builds'
+
+ let(:pipeline) do
+ create(:ci_pipeline,
+ :invalid,
+ project: project,
+ ref: 'master',
+ sha: project.commit.id,
+ user: user)
+ end
+
+ before do
+ visit project_pipeline_path(project, pipeline)
+ end
+
+ it 'contains badge that indicates errors' do
+ page.within(all('.well-segment')[1]) do
+ expect(page).to have_content 'yaml invalid'
+ end
+ end
+
+ it 'contains badge with tooltip which contains error' do
+ expect(pipeline).to have_yaml_errors
+
+ page.within(all('.well-segment')[1]) do
+ expect(page).to have_selector(
+ %Q{span[title="#{pipeline.yaml_errors}"]})
+ end
+ end
+
+ it 'contains badge that indicates failure reason' do
+ expect(page).to have_content 'error'
+ end
+
+ it 'contains badge with tooltip which contains failure reason' do
+ expect(pipeline.failure_reason?).to eq true
+
+ page.within(all('.well-segment')[1]) do
+ expect(page).to have_selector(
+ %Q{span[title="#{pipeline.present.failure_reason}"]})
+ end
+ end
+ end
+
+ context 'when pipeline is stuck' do
+ include_context 'pipeline builds'
+
+ let(:pipeline) do
+ create(:ci_pipeline,
+ project: project,
+ ref: 'master',
+ sha: project.commit.id,
+ user: user)
+ end
+
+ before do
+ create(:ci_build, :pending, pipeline: pipeline)
+ visit project_pipeline_path(project, pipeline)
+ end
+
+ it 'contains badge that indicates being stuck' do
+ page.within(all('.well-segment')[1]) do
+ expect(page).to have_content 'stuck'
+ end
+ end
+ end
+
+ context 'when pipeline uses auto devops' do
+ include_context 'pipeline builds'
+
+ let(:project) { create(:project, :repository, auto_devops_attributes: { enabled: true }) }
+ let(:pipeline) do
+ create(:ci_pipeline,
+ :auto_devops_source,
+ project: project,
+ ref: 'master',
+ sha: project.commit.id,
+ user: user)
+ end
+
+ before do
+ visit project_pipeline_path(project, pipeline)
+ end
+
+ it 'contains badge that indicates using auto devops' do
+ page.within(all('.well-segment')[1]) do
+ expect(page).to have_content 'Auto DevOps'
+ end
+ end
+ end
+
+ context 'when pipeline runs in a merge request context' do
+ include_context 'pipeline builds'
+
+ let(:pipeline) do
+ create(:ci_pipeline,
+ source: :merge_request,
+ project: merge_request.source_project,
+ ref: 'feature',
+ sha: merge_request.diff_head_sha,
+ user: user,
+ merge_request: merge_request)
+ end
+
+ let(:merge_request) do
+ create(:merge_request,
+ source_project: project,
+ source_branch: 'feature',
+ target_project: project,
+ target_branch: 'master')
+ end
+
+ before do
+ visit project_pipeline_path(project, pipeline)
+ end
+
+ it 'contains badge that indicates merge request pipeline' do
+ page.within(all('.well-segment')[1]) do
+ expect(page).to have_content 'merge request'
+ end
+ end
+ end
+ end
end
diff --git a/spec/features/projects/serverless/functions_spec.rb b/spec/features/projects/serverless/functions_spec.rb
new file mode 100644
index 00000000000..766c63725b3
--- /dev/null
+++ b/spec/features/projects/serverless/functions_spec.rb
@@ -0,0 +1,49 @@
+require 'spec_helper'
+
+describe 'Functions', :js do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_maintainer(user)
+ gitlab_sign_in(user)
+ end
+
+ context 'when user does not have a cluster and visits the serverless page' do
+ before do
+ visit project_serverless_functions_path(project)
+ end
+
+ it 'sees an empty state' do
+ expect(page).to have_link('Install Knative')
+ expect(page).to have_selector('.empty-state')
+ end
+ end
+
+ context 'when the user does have a cluster and visits the serverless page' do
+ let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+
+ before do
+ visit project_serverless_functions_path(project)
+ end
+
+ it 'sees an empty state' do
+ expect(page).to have_link('Install Knative')
+ expect(page).to have_selector('.empty-state')
+ end
+ end
+
+ context 'when the user has a cluster and knative installed and visits the serverless page' do
+ let!(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:knative) { create(:clusters_applications_knative, :installed, cluster: cluster) }
+ let(:project) { knative.cluster.project }
+
+ before do
+ visit project_serverless_functions_path(project)
+ end
+
+ it 'sees an empty listing of serverless functions' do
+ expect(page).to have_selector('.gl-responsive-table-row')
+ end
+ end
+end
diff --git a/spec/features/projects/settings/repository_settings_spec.rb b/spec/features/projects/settings/repository_settings_spec.rb
index b7a22316d26..418e22f8c35 100644
--- a/spec/features/projects/settings/repository_settings_spec.rb
+++ b/spec/features/projects/settings/repository_settings_spec.rb
@@ -196,5 +196,40 @@ describe 'Projects > Settings > Repository settings' do
end
end
end
+
+ context 'repository cleanup settings' do
+ let(:object_map_file) { Rails.root.join('spec', 'fixtures', 'bfg_object_map.txt') }
+
+ context 'feature enabled' do
+ it 'uploads an object map file', :js do
+ stub_feature_flags(project_cleanup: true)
+
+ visit project_settings_repository_path(project)
+
+ expect(page).to have_content('Repository cleanup')
+
+ page.within('#cleanup') do
+ attach_file('project[bfg_object_map]', object_map_file, visible: false)
+
+ Sidekiq::Testing.fake! do
+ click_button 'Start cleanup'
+ end
+ end
+
+ expect(page).to have_content('Repository cleanup has started')
+ expect(RepositoryCleanupWorker.jobs.count).to eq(1)
+ end
+ end
+
+ context 'feature disabled' do
+ it 'does not show the settings' do
+ stub_feature_flags(project_cleanup: false)
+
+ visit project_settings_repository_path(project)
+
+ expect(page).not_to have_content('Repository cleanup')
+ end
+ end
+ end
end
end
diff --git a/spec/features/projects_spec.rb b/spec/features/projects_spec.rb
index 0add129dde2..b56bb272b46 100644
--- a/spec/features/projects_spec.rb
+++ b/spec/features/projects_spec.rb
@@ -277,7 +277,7 @@ describe 'Project' do
end
end
- context 'for subgroups', :js do
+ context 'for subgroups', :js, :nested_groups do
let(:group) { create(:group) }
let(:subgroup) { create(:group, parent: group) }
let(:project) { create(:project, :repository, group: subgroup) }
diff --git a/spec/finders/group_members_finder_spec.rb b/spec/finders/group_members_finder_spec.rb
index f545da3aee4..8975ea0f063 100644
--- a/spec/finders/group_members_finder_spec.rb
+++ b/spec/finders/group_members_finder_spec.rb
@@ -19,7 +19,7 @@ describe GroupMembersFinder, '#execute' do
end
it 'returns members for nested group', :nested_groups do
- group.add_maintainer(user2)
+ group.add_developer(user2)
nested_group.request_access(user4)
member1 = group.add_maintainer(user1)
member3 = nested_group.add_maintainer(user2)
diff --git a/spec/finders/projects/serverless/functions_finder_spec.rb b/spec/finders/projects/serverless/functions_finder_spec.rb
new file mode 100644
index 00000000000..60d02b12054
--- /dev/null
+++ b/spec/finders/projects/serverless/functions_finder_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::Serverless::FunctionsFinder do
+ include KubernetesHelpers
+ include ReactiveCachingHelpers
+
+ let(:user) { create(:user) }
+ let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:service) { cluster.platform_kubernetes }
+ let(:project) { cluster.project}
+
+ let(:namespace) do
+ create(:cluster_kubernetes_namespace,
+ cluster: cluster,
+ cluster_project: cluster.cluster_project,
+ project: cluster.cluster_project.project)
+ end
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ describe 'retrieve data from knative' do
+ it 'does not have knative installed' do
+ expect(described_class.new(project.clusters).execute).to be_empty
+ end
+
+ context 'has knative installed' do
+ let!(:knative) { create(:clusters_applications_knative, :installed, cluster: cluster) }
+
+ it 'there are no functions' do
+ expect(described_class.new(project.clusters).execute).to be_empty
+ end
+
+ it 'there are functions', :use_clean_rails_memory_store_caching do
+ stub_reactive_cache(knative, services: kube_knative_services_body(namespace: namespace.namespace, name: cluster.project.name)["items"])
+
+ expect(described_class.new(project.clusters).execute).not_to be_empty
+ end
+ end
+ end
+
+ describe 'verify if knative is installed' do
+ context 'knative is not installed' do
+ it 'does not have knative installed' do
+ expect(described_class.new(project.clusters).installed?).to be false
+ end
+ end
+
+ context 'knative is installed' do
+ let!(:knative) { create(:clusters_applications_knative, :installed, cluster: cluster) }
+
+ it 'does have knative installed' do
+ expect(described_class.new(project.clusters).installed?).to be true
+ end
+ end
+ end
+end
diff --git a/spec/fixtures/api/schemas/entities/issue_board.json b/spec/fixtures/api/schemas/entities/issue_board.json
index 3e252ddd13c..f7b270ffa8d 100644
--- a/spec/fixtures/api/schemas/entities/issue_board.json
+++ b/spec/fixtures/api/schemas/entities/issue_board.json
@@ -9,7 +9,7 @@
"project_id": { "type": "integer" },
"relative_position": { "type": ["integer", "null"] },
"time_estimate": { "type": "integer" },
- "weight": { "type": "integer" },
+ "weight": { "type": ["integer", "null"] },
"project": {
"type": "object",
"properties": {
diff --git a/spec/fixtures/api/schemas/entities/issue_boards.json b/spec/fixtures/api/schemas/entities/issue_boards.json
new file mode 100644
index 00000000000..0ac1d9468c8
--- /dev/null
+++ b/spec/fixtures/api/schemas/entities/issue_boards.json
@@ -0,0 +1,15 @@
+{
+ "type": "object",
+ "required" : [
+ "issues",
+ "size"
+ ],
+ "properties" : {
+ "issues": {
+ "type": "array",
+ "items": { "$ref": "issue_board.json" }
+ },
+ "size": { "type": "integer" }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/bfg_object_map.txt b/spec/fixtures/bfg_object_map.txt
new file mode 100644
index 00000000000..c60171d8770
--- /dev/null
+++ b/spec/fixtures/bfg_object_map.txt
@@ -0,0 +1 @@
+f1d2d2f924e986ac86fdf7b36c94bcdf32beec15 e242ed3bffccdf271b7fbaf34ed72d089537b42f
diff --git a/spec/fixtures/security-reports/feature-branch/gl-dependency-scanning-report.json b/spec/fixtures/security-reports/feature-branch/gl-dependency-scanning-report.json
index 314f04107eb..ce66f562175 100644
--- a/spec/fixtures/security-reports/feature-branch/gl-dependency-scanning-report.json
+++ b/spec/fixtures/security-reports/feature-branch/gl-dependency-scanning-report.json
@@ -11,7 +11,13 @@
"name": "Gemnasium"
},
"location": {
- "file": "app/pom.xml"
+ "file": "app/pom.xml",
+ "dependency": {
+ "package": {
+ "name": "io.netty/netty"
+ },
+ "version": "3.9.1.Final"
+ }
},
"identifiers": [
{
@@ -55,7 +61,13 @@
"name": "Gemnasium"
},
"location": {
- "file": "app/requirements.txt"
+ "file": "app/requirements.txt",
+ "dependency": {
+ "package": {
+ "name": "Django"
+ },
+ "version": "1.11.3"
+ }
},
"identifiers": [
{
@@ -93,7 +105,13 @@
"name": "Gemnasium"
},
"location": {
- "file": "rails/Gemfile.lock"
+ "file": "rails/Gemfile.lock",
+ "dependency": {
+ "package": {
+ "name": "nokogiri"
+ },
+ "version": "1.8.0"
+ }
},
"identifiers": [
{
@@ -131,7 +149,13 @@
"name": "bundler-audit"
},
"location": {
- "file": "sast-sample-rails/Gemfile.lock"
+ "file": "sast-sample-rails/Gemfile.lock",
+ "dependency": {
+ "package": {
+ "name": "ffi"
+ },
+ "version": "1.9.18"
+ }
},
"identifiers": [
{
diff --git a/spec/fixtures/security-reports/master/gl-dependency-scanning-report.json b/spec/fixtures/security-reports/master/gl-dependency-scanning-report.json
index 314f04107eb..ce66f562175 100644
--- a/spec/fixtures/security-reports/master/gl-dependency-scanning-report.json
+++ b/spec/fixtures/security-reports/master/gl-dependency-scanning-report.json
@@ -11,7 +11,13 @@
"name": "Gemnasium"
},
"location": {
- "file": "app/pom.xml"
+ "file": "app/pom.xml",
+ "dependency": {
+ "package": {
+ "name": "io.netty/netty"
+ },
+ "version": "3.9.1.Final"
+ }
},
"identifiers": [
{
@@ -55,7 +61,13 @@
"name": "Gemnasium"
},
"location": {
- "file": "app/requirements.txt"
+ "file": "app/requirements.txt",
+ "dependency": {
+ "package": {
+ "name": "Django"
+ },
+ "version": "1.11.3"
+ }
},
"identifiers": [
{
@@ -93,7 +105,13 @@
"name": "Gemnasium"
},
"location": {
- "file": "rails/Gemfile.lock"
+ "file": "rails/Gemfile.lock",
+ "dependency": {
+ "package": {
+ "name": "nokogiri"
+ },
+ "version": "1.8.0"
+ }
},
"identifiers": [
{
@@ -131,7 +149,13 @@
"name": "bundler-audit"
},
"location": {
- "file": "sast-sample-rails/Gemfile.lock"
+ "file": "sast-sample-rails/Gemfile.lock",
+ "dependency": {
+ "package": {
+ "name": "ffi"
+ },
+ "version": "1.9.18"
+ }
},
"identifiers": [
{
diff --git a/spec/frontend/.eslintrc.yml b/spec/frontend/.eslintrc.yml
new file mode 100644
index 00000000000..6d73977a891
--- /dev/null
+++ b/spec/frontend/.eslintrc.yml
@@ -0,0 +1,9 @@
+---
+env:
+ jest/globals: true
+plugins:
+- jest
+settings:
+ import/resolver:
+ jest:
+ jestConfigFile: "config/jest.config.js"
diff --git a/spec/frontend/dummy_spec.js b/spec/frontend/dummy_spec.js
new file mode 100644
index 00000000000..2bfef25e9c6
--- /dev/null
+++ b/spec/frontend/dummy_spec.js
@@ -0,0 +1 @@
+it('does nothing', () => {});
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index 976b6c312b4..a857b7646b2 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -471,6 +471,31 @@ describe ProjectsHelper do
end
end
+ describe 'link_to_bfg' do
+ subject { helper.link_to_bfg }
+
+ it 'generates a hardcoded link to the BFG Repo-Cleaner' do
+ result = helper.link_to_bfg
+ doc = Nokogiri::HTML.fragment(result)
+
+ expect(doc.children.size).to eq(1)
+
+ link = doc.children.first
+
+ aggregate_failures do
+ expect(result).to be_html_safe
+
+ expect(link.name).to eq('a')
+ expect(link[:target]).to eq('_blank')
+ expect(link[:rel]).to eq('noopener noreferrer')
+ expect(link[:href]).to eq('https://rtyley.github.io/bfg-repo-cleaner/')
+ expect(link.inner_html).to eq('BFG')
+
+ expect(result).to be_html_safe
+ end
+ end
+ end
+
describe '#legacy_render_context' do
it 'returns the redcarpet engine' do
params = { legacy_render: '1' }
diff --git a/spec/helpers/sorting_helper_spec.rb b/spec/helpers/sorting_helper_spec.rb
new file mode 100644
index 00000000000..cba0d93e144
--- /dev/null
+++ b/spec/helpers/sorting_helper_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+describe SortingHelper do
+ include ApplicationHelper
+ include IconsHelper
+
+ describe '#issuable_sort_option_title' do
+ it 'returns correct title for issuable_sort_option_overrides key' do
+ expect(issuable_sort_option_title('created_asc')).to eq('Created date')
+ end
+
+ it 'returns correct title for a valid sort value' do
+ expect(issuable_sort_option_title('priority')).to eq('Priority')
+ end
+
+ it 'returns nil for invalid sort value' do
+ expect(issuable_sort_option_title('invalid_key')).to eq(nil)
+ end
+ end
+
+ describe '#issuable_sort_direction_button' do
+ before do
+ allow(self).to receive(:request).and_return(double(path: 'http://test.com', query_parameters: {}))
+ end
+
+ it 'returns icon with sort-highest when sort is created_date' do
+ expect(issuable_sort_direction_button('created_date')).to include('sort-highest')
+ end
+
+ it 'returns icon with sort-lowest when sort is asc' do
+ expect(issuable_sort_direction_button('created_asc')).to include('sort-lowest')
+ end
+
+ it 'returns icon with sort-lowest when sorting by milestone' do
+ expect(issuable_sort_direction_button('milestone')).to include('sort-lowest')
+ end
+
+ it 'returns icon with sort-lowest when sorting by due_date' do
+ expect(issuable_sort_direction_button('due_date')).to include('sort-lowest')
+ end
+ end
+end
diff --git a/spec/javascripts/api_spec.js b/spec/javascripts/api_spec.js
index 7de38913bae..46f72214831 100644
--- a/spec/javascripts/api_spec.js
+++ b/spec/javascripts/api_spec.js
@@ -180,6 +180,23 @@ describe('Api', () => {
});
});
+ describe('projectRunners', () => {
+ it('fetches the runners of a project', done => {
+ const projectPath = 7;
+ const params = { scope: 'active' };
+ const mockData = [{ id: 4 }];
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/runners`;
+ mock.onGet(expectedUrl, { params }).reply(200, mockData);
+
+ Api.projectRunners(projectPath, { params })
+ .then(({ data }) => {
+ expect(data).toEqual(mockData);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
describe('newLabel', () => {
it('creates a new label', done => {
const namespace = 'some namespace';
diff --git a/spec/javascripts/clusters/components/applications_spec.js b/spec/javascripts/clusters/components/applications_spec.js
index 928bf70f3a2..e46edec9abb 100644
--- a/spec/javascripts/clusters/components/applications_spec.js
+++ b/spec/javascripts/clusters/components/applications_spec.js
@@ -1,5 +1,6 @@
import Vue from 'vue';
import applications from '~/clusters/components/applications.vue';
+import { CLUSTER_TYPE } from '~/clusters/constants';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
describe('Applications', () => {
@@ -14,9 +15,10 @@ describe('Applications', () => {
vm.$destroy();
});
- describe('', () => {
+ describe('Project cluster applications', () => {
beforeEach(() => {
vm = mountComponent(Applications, {
+ type: CLUSTER_TYPE.PROJECT,
applications: {
helm: { title: 'Helm Tiller' },
ingress: { title: 'Ingress' },
@@ -30,31 +32,76 @@ describe('Applications', () => {
});
it('renders a row for Helm Tiller', () => {
- expect(vm.$el.querySelector('.js-cluster-application-row-helm')).toBeDefined();
+ expect(vm.$el.querySelector('.js-cluster-application-row-helm')).not.toBeNull();
});
it('renders a row for Ingress', () => {
- expect(vm.$el.querySelector('.js-cluster-application-row-ingress')).toBeDefined();
+ expect(vm.$el.querySelector('.js-cluster-application-row-ingress')).not.toBeNull();
});
it('renders a row for Cert-Manager', () => {
- expect(vm.$el.querySelector('.js-cluster-application-row-cert_manager')).toBeDefined();
+ expect(vm.$el.querySelector('.js-cluster-application-row-cert_manager')).not.toBeNull();
});
it('renders a row for Prometheus', () => {
- expect(vm.$el.querySelector('.js-cluster-application-row-prometheus')).toBeDefined();
+ expect(vm.$el.querySelector('.js-cluster-application-row-prometheus')).not.toBeNull();
});
it('renders a row for GitLab Runner', () => {
- expect(vm.$el.querySelector('.js-cluster-application-row-runner')).toBeDefined();
+ expect(vm.$el.querySelector('.js-cluster-application-row-runner')).not.toBeNull();
});
it('renders a row for Jupyter', () => {
- expect(vm.$el.querySelector('.js-cluster-application-row-jupyter')).not.toBe(null);
+ expect(vm.$el.querySelector('.js-cluster-application-row-jupyter')).not.toBeNull();
});
it('renders a row for Knative', () => {
- expect(vm.$el.querySelector('.js-cluster-application-row-knative')).not.toBe(null);
+ expect(vm.$el.querySelector('.js-cluster-application-row-knative')).not.toBeNull();
+ });
+ });
+
+ describe('Group cluster applications', () => {
+ beforeEach(() => {
+ vm = mountComponent(Applications, {
+ type: CLUSTER_TYPE.GROUP,
+ applications: {
+ helm: { title: 'Helm Tiller' },
+ ingress: { title: 'Ingress' },
+ cert_manager: { title: 'Cert-Manager' },
+ runner: { title: 'GitLab Runner' },
+ prometheus: { title: 'Prometheus' },
+ jupyter: { title: 'JupyterHub' },
+ knative: { title: 'Knative' },
+ },
+ });
+ });
+
+ it('renders a row for Helm Tiller', () => {
+ expect(vm.$el.querySelector('.js-cluster-application-row-helm')).not.toBeNull();
+ });
+
+ it('renders a row for Ingress', () => {
+ expect(vm.$el.querySelector('.js-cluster-application-row-ingress')).not.toBeNull();
+ });
+
+ it('renders a row for Cert-Manager', () => {
+ expect(vm.$el.querySelector('.js-cluster-application-row-cert_manager')).not.toBeNull();
+ });
+
+ it('renders a row for Prometheus', () => {
+ expect(vm.$el.querySelector('.js-cluster-application-row-prometheus')).toBeNull();
+ });
+
+ it('renders a row for GitLab Runner', () => {
+ expect(vm.$el.querySelector('.js-cluster-application-row-runner')).toBeNull();
+ });
+
+ it('renders a row for Jupyter', () => {
+ expect(vm.$el.querySelector('.js-cluster-application-row-jupyter')).toBeNull();
+ });
+
+ it('renders a row for Knative', () => {
+ expect(vm.$el.querySelector('.js-cluster-application-row-knative')).toBeNull();
});
});
diff --git a/spec/javascripts/diffs/components/diff_file_spec.js b/spec/javascripts/diffs/components/diff_file_spec.js
index 51bb4807960..1af49282c36 100644
--- a/spec/javascripts/diffs/components/diff_file_spec.js
+++ b/spec/javascripts/diffs/components/diff_file_spec.js
@@ -74,6 +74,32 @@ describe('DiffFile', () => {
});
});
+ it('should be collapsed for renamed files', done => {
+ vm.file.renderIt = true;
+ vm.file.collapsed = false;
+ vm.file.highlighted_diff_lines = null;
+ vm.file.renamed_file = true;
+
+ vm.$nextTick(() => {
+ expect(vm.$el.innerText).not.toContain('This diff is collapsed');
+
+ done();
+ });
+ });
+
+ it('should be collapsed for mode changed files', done => {
+ vm.file.renderIt = true;
+ vm.file.collapsed = false;
+ vm.file.highlighted_diff_lines = null;
+ vm.file.mode_changed = true;
+
+ vm.$nextTick(() => {
+ expect(vm.$el.innerText).not.toContain('This diff is collapsed');
+
+ done();
+ });
+ });
+
it('should have loading icon while loading a collapsed diffs', done => {
vm.file.collapsed = true;
vm.isLoadingCollapsedDiff = true;
diff --git a/spec/javascripts/diffs/components/diff_gutter_avatars_spec.js b/spec/javascripts/diffs/components/diff_gutter_avatars_spec.js
index ad2605a5c5c..cdd30919b09 100644
--- a/spec/javascripts/diffs/components/diff_gutter_avatars_spec.js
+++ b/spec/javascripts/diffs/components/diff_gutter_avatars_spec.js
@@ -89,6 +89,35 @@ describe('DiffGutterAvatars', () => {
expect(component.discussions[0].expanded).toEqual(false);
component.$store.dispatch('setInitialNotes', []);
});
+
+ it('forces expansion of all discussions', () => {
+ spyOn(component.$store, 'dispatch');
+
+ component.discussions[0].expanded = true;
+ component.discussions.push({
+ ...component.discussions[0],
+ id: '123test',
+ expanded: false,
+ });
+
+ component.toggleDiscussions();
+
+ expect(component.$store.dispatch.calls.argsFor(0)).toEqual([
+ 'toggleDiscussion',
+ {
+ discussionId: component.discussions[0].id,
+ forceExpanded: true,
+ },
+ ]);
+
+ expect(component.$store.dispatch.calls.argsFor(1)).toEqual([
+ 'toggleDiscussion',
+ {
+ discussionId: component.discussions[1].id,
+ forceExpanded: true,
+ },
+ ]);
+ });
});
});
diff --git a/spec/javascripts/diffs/store/actions_spec.js b/spec/javascripts/diffs/store/actions_spec.js
index 205138bd845..4b339a0553f 100644
--- a/spec/javascripts/diffs/store/actions_spec.js
+++ b/spec/javascripts/diffs/store/actions_spec.js
@@ -379,27 +379,50 @@ describe('DiffsStoreActions', () => {
describe('loadCollapsedDiff', () => {
it('should fetch data and call mutation with response and the give parameter', done => {
- const file = { hash: 123, loadCollapsedDiffUrl: '/load/collapsed/diff/url' };
+ const file = { hash: 123, load_collapsed_diff_url: '/load/collapsed/diff/url' };
const data = { hash: 123, parallelDiffLines: [{ lineCode: 1 }] };
const mock = new MockAdapter(axios);
+ const commit = jasmine.createSpy('commit');
mock.onGet(file.loadCollapsedDiffUrl).reply(200, data);
- testAction(
- loadCollapsedDiff,
- file,
- {},
- [
- {
- type: types.ADD_COLLAPSED_DIFFS,
- payload: { file, data },
- },
- ],
- [],
- () => {
+ loadCollapsedDiff({ commit, getters: { commitId: null } }, file)
+ .then(() => {
+ expect(commit).toHaveBeenCalledWith(types.ADD_COLLAPSED_DIFFS, { file, data });
+
mock.restore();
done();
- },
- );
+ })
+ .catch(done.fail);
+ });
+
+ it('should fetch data without commit ID', () => {
+ const file = { load_collapsed_diff_url: '/load/collapsed/diff/url' };
+ const getters = {
+ commitId: null,
+ };
+
+ spyOn(axios, 'get').and.returnValue(Promise.resolve({ data: {} }));
+
+ loadCollapsedDiff({ commit() {}, getters }, file);
+
+ expect(axios.get).toHaveBeenCalledWith(file.load_collapsed_diff_url, {
+ params: { commit_id: null },
+ });
+ });
+
+ it('should fetch data with commit ID', () => {
+ const file = { load_collapsed_diff_url: '/load/collapsed/diff/url' };
+ const getters = {
+ commitId: '123',
+ };
+
+ spyOn(axios, 'get').and.returnValue(Promise.resolve({ data: {} }));
+
+ loadCollapsedDiff({ commit() {}, getters }, file);
+
+ expect(axios.get).toHaveBeenCalledWith(file.load_collapsed_diff_url, {
+ params: { commit_id: '123' },
+ });
});
});
diff --git a/spec/javascripts/diffs/store/mutations_spec.js b/spec/javascripts/diffs/store/mutations_spec.js
index 7a06c178f0b..23e8761bc55 100644
--- a/spec/javascripts/diffs/store/mutations_spec.js
+++ b/spec/javascripts/diffs/store/mutations_spec.js
@@ -199,6 +199,84 @@ describe('DiffsStoreMutations', () => {
expect(state.diffFiles[0].highlighted_diff_lines[0].discussions[0].id).toEqual(1);
});
+ it('should not duplicate discussions on line', () => {
+ const diffPosition = {
+ base_sha: 'ed13df29948c41ba367caa757ab3ec4892509910',
+ head_sha: 'b921914f9a834ac47e6fd9420f78db0f83559130',
+ new_line: null,
+ new_path: '500-lines-4.txt',
+ old_line: 5,
+ old_path: '500-lines-4.txt',
+ start_sha: 'ed13df29948c41ba367caa757ab3ec4892509910',
+ };
+
+ const state = {
+ latestDiff: true,
+ diffFiles: [
+ {
+ file_hash: 'ABC',
+ parallel_diff_lines: [
+ {
+ left: {
+ line_code: 'ABC_1',
+ discussions: [],
+ },
+ right: {
+ line_code: 'ABC_1',
+ discussions: [],
+ },
+ },
+ ],
+ highlighted_diff_lines: [
+ {
+ line_code: 'ABC_1',
+ discussions: [],
+ },
+ ],
+ },
+ ],
+ };
+ const discussion = {
+ id: 1,
+ line_code: 'ABC_1',
+ diff_discussion: true,
+ resolvable: true,
+ original_position: diffPosition,
+ position: diffPosition,
+ diff_file: {
+ file_hash: state.diffFiles[0].file_hash,
+ },
+ };
+
+ const diffPositionByLineCode = {
+ ABC_1: diffPosition,
+ };
+
+ mutations[types.SET_LINE_DISCUSSIONS_FOR_FILE](state, {
+ discussion,
+ diffPositionByLineCode,
+ });
+
+ expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions.length).toEqual(1);
+ expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions[0].id).toEqual(1);
+ expect(state.diffFiles[0].parallel_diff_lines[0].right.discussions).toEqual([]);
+
+ expect(state.diffFiles[0].highlighted_diff_lines[0].discussions.length).toEqual(1);
+ expect(state.diffFiles[0].highlighted_diff_lines[0].discussions[0].id).toEqual(1);
+
+ mutations[types.SET_LINE_DISCUSSIONS_FOR_FILE](state, {
+ discussion,
+ diffPositionByLineCode,
+ });
+
+ expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions.length).toEqual(1);
+ expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions[0].id).toEqual(1);
+ expect(state.diffFiles[0].parallel_diff_lines[0].right.discussions).toEqual([]);
+
+ expect(state.diffFiles[0].highlighted_diff_lines[0].discussions.length).toEqual(1);
+ expect(state.diffFiles[0].highlighted_diff_lines[0].discussions[0].id).toEqual(1);
+ });
+
it('should add legacy discussions to the given line', () => {
const diffPosition = {
base_sha: 'ed13df29948c41ba367caa757ab3ec4892509910',
diff --git a/spec/javascripts/diffs/store/utils_spec.js b/spec/javascripts/diffs/store/utils_spec.js
index d4ef17c5ef8..717f983da65 100644
--- a/spec/javascripts/diffs/store/utils_spec.js
+++ b/spec/javascripts/diffs/store/utils_spec.js
@@ -559,4 +559,26 @@ describe('DiffsStoreUtils', () => {
]);
});
});
+
+ describe('getDiffMode', () => {
+ it('returns mode when matched in file', () => {
+ expect(
+ utils.getDiffMode({
+ renamed_file: true,
+ }),
+ ).toBe('renamed');
+ });
+
+ it('returns mode_changed if key has no match', () => {
+ expect(
+ utils.getDiffMode({
+ mode_changed: true,
+ }),
+ ).toBe('mode_changed');
+ });
+
+ it('defaults to replaced', () => {
+ expect(utils.getDiffMode({})).toBe('replaced');
+ });
+ });
});
diff --git a/spec/javascripts/lib/utils/file_upload_spec.js b/spec/javascripts/lib/utils/file_upload_spec.js
new file mode 100644
index 00000000000..92c9cc70aaf
--- /dev/null
+++ b/spec/javascripts/lib/utils/file_upload_spec.js
@@ -0,0 +1,36 @@
+import fileUpload from '~/lib/utils/file_upload';
+
+describe('File upload', () => {
+ beforeEach(() => {
+ setFixtures(`
+ <form>
+ <button class="js-button" type="button">Click me!</button>
+ <input type="text" class="js-input" />
+ <span class="js-filename"></span>
+ </form>
+ `);
+
+ fileUpload('.js-button', '.js-input');
+ });
+
+ it('clicks file input after clicking button', () => {
+ const btn = document.querySelector('.js-button');
+ const input = document.querySelector('.js-input');
+
+ spyOn(input, 'click');
+
+ btn.click();
+
+ expect(input.click).toHaveBeenCalled();
+ });
+
+ it('updates file name text', () => {
+ const input = document.querySelector('.js-input');
+
+ input.value = 'path/to/file/index.js';
+
+ input.dispatchEvent(new CustomEvent('change'));
+
+ expect(document.querySelector('.js-filename').textContent).toEqual('index.js');
+ });
+});
diff --git a/spec/javascripts/notes/components/noteable_discussion_spec.js b/spec/javascripts/notes/components/noteable_discussion_spec.js
index 76e9cd03d2d..ab9c52346d6 100644
--- a/spec/javascripts/notes/components/noteable_discussion_spec.js
+++ b/spec/javascripts/notes/components/noteable_discussion_spec.js
@@ -6,7 +6,6 @@ import { noteableDataMock, discussionMock, notesDataMock } from '../mock_data';
import mockDiffFile from '../../diffs/mock_data/diff_file';
const discussionWithTwoUnresolvedNotes = 'merge_requests/resolved_diff_discussion.json';
-const diffDiscussionFixture = 'merge_requests/diff_discussion.json';
describe('noteable_discussion component', () => {
const Component = Vue.extend(noteableDiscussion);
@@ -79,51 +78,6 @@ describe('noteable_discussion component', () => {
});
});
- describe('computed', () => {
- describe('isRepliesCollapsed', () => {
- it('should return false for diff discussions', done => {
- const diffDiscussion = getJSONFixture(diffDiscussionFixture)[0];
- vm.$store.dispatch('setInitialNotes', [diffDiscussion]);
-
- Vue.nextTick()
- .then(() => {
- expect(vm.isRepliesCollapsed).toEqual(false);
- expect(vm.$el.querySelector('.js-toggle-replies')).not.toBeNull();
- expect(vm.$el.querySelector('.discussion-reply-holder')).not.toBeNull();
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('should return false if discussion does not have a reply', () => {
- const discussion = { ...discussionMock, resolved: true };
- discussion.notes = discussion.notes.slice(0, 1);
- const noRepliesVm = new Component({
- store,
- propsData: { discussion },
- }).$mount();
-
- expect(noRepliesVm.isRepliesCollapsed).toEqual(false);
- expect(noRepliesVm.$el.querySelector('.js-toggle-replies')).toBeNull();
- expect(vm.$el.querySelector('.discussion-reply-holder')).not.toBeNull();
- noRepliesVm.$destroy();
- });
-
- it('should return true for resolved non-diff discussion which has replies', () => {
- const discussion = { ...discussionMock, resolved: true };
- const resolvedDiscussionVm = new Component({
- store,
- propsData: { discussion },
- }).$mount();
-
- expect(resolvedDiscussionVm.isRepliesCollapsed).toEqual(true);
- expect(resolvedDiscussionVm.$el.querySelector('.js-toggle-replies')).not.toBeNull();
- expect(vm.$el.querySelector('.discussion-reply-holder')).not.toBeNull();
- resolvedDiscussionVm.$destroy();
- });
- });
- });
-
describe('methods', () => {
describe('jumpToNextDiscussion', () => {
it('expands next unresolved discussion', done => {
diff --git a/spec/javascripts/notes/stores/mutation_spec.js b/spec/javascripts/notes/stores/mutation_spec.js
index 1c4449d1055..52cdc16353a 100644
--- a/spec/javascripts/notes/stores/mutation_spec.js
+++ b/spec/javascripts/notes/stores/mutation_spec.js
@@ -297,6 +297,16 @@ describe('Notes Store mutations', () => {
expect(state.discussions[0].expanded).toEqual(false);
});
+
+ it('forces a discussions expanded state', () => {
+ const state = {
+ discussions: [{ ...discussionMock, expanded: false }],
+ };
+
+ mutations.TOGGLE_DISCUSSION(state, { discussionId: discussionMock.id, forceExpanded: true });
+
+ expect(state.discussions[0].expanded).toEqual(true);
+ });
});
describe('UPDATE_NOTE', () => {
diff --git a/spec/javascripts/pipelines/pipeline_url_spec.js b/spec/javascripts/pipelines/pipeline_url_spec.js
index d6c44f4c976..ea917b36526 100644
--- a/spec/javascripts/pipelines/pipeline_url_spec.js
+++ b/spec/javascripts/pipelines/pipeline_url_spec.js
@@ -90,7 +90,7 @@ describe('Pipeline Url Component', () => {
expect(component.$el.querySelector('.js-pipeline-url-api').textContent).toContain('API');
});
- it('should render latest, yaml invalid and stuck flags when provided', () => {
+ it('should render latest, yaml invalid, merge request, and stuck flags when provided', () => {
const component = new PipelineUrlComponent({
propsData: {
pipeline: {
@@ -100,6 +100,7 @@ describe('Pipeline Url Component', () => {
latest: true,
yaml_errors: true,
stuck: true,
+ merge_request: true,
},
},
autoDevopsHelpPath: 'foo',
@@ -111,6 +112,10 @@ describe('Pipeline Url Component', () => {
'yaml invalid',
);
+ expect(component.$el.querySelector('.js-pipeline-url-mergerequest').textContent).toContain(
+ 'merge request',
+ );
+
expect(component.$el.querySelector('.js-pipeline-url-stuck').textContent).toContain('stuck');
});
diff --git a/spec/javascripts/vue_shared/components/diff_viewer/diff_viewer_spec.js b/spec/javascripts/vue_shared/components/diff_viewer/diff_viewer_spec.js
index 67a3a2e08bc..6add6cdac4d 100644
--- a/spec/javascripts/vue_shared/components/diff_viewer/diff_viewer_spec.js
+++ b/spec/javascripts/vue_shared/components/diff_viewer/diff_viewer_spec.js
@@ -68,4 +68,30 @@ describe('DiffViewer', () => {
done();
});
});
+
+ it('renders renamed component', () => {
+ createComponent({
+ diffMode: 'renamed',
+ newPath: 'test.abc',
+ newSha: 'ABC',
+ oldPath: 'testold.abc',
+ oldSha: 'DEF',
+ });
+
+ expect(vm.$el.textContent).toContain('File moved');
+ });
+
+ it('renders mode changed component', () => {
+ createComponent({
+ diffMode: 'mode_changed',
+ newPath: 'test.abc',
+ newSha: 'ABC',
+ oldPath: 'testold.abc',
+ oldSha: 'DEF',
+ aMode: '123',
+ bMode: '321',
+ });
+
+ expect(vm.$el.textContent).toContain('File mode changed from 123 to 321');
+ });
});
diff --git a/spec/javascripts/vue_shared/components/diff_viewer/viewers/mode_changed_spec.js b/spec/javascripts/vue_shared/components/diff_viewer/viewers/mode_changed_spec.js
new file mode 100644
index 00000000000..c4358f0d9cb
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/diff_viewer/viewers/mode_changed_spec.js
@@ -0,0 +1,23 @@
+import { shallowMount } from '@vue/test-utils';
+import ModeChanged from '~/vue_shared/components/diff_viewer/viewers/mode_changed.vue';
+
+describe('Diff viewer mode changed component', () => {
+ let vm;
+
+ beforeEach(() => {
+ vm = shallowMount(ModeChanged, {
+ propsData: {
+ aMode: '123',
+ bMode: '321',
+ },
+ });
+ });
+
+ afterEach(() => {
+ vm.destroy();
+ });
+
+ it('renders aMode & bMode', () => {
+ expect(vm.text()).toContain('File mode changed from 123 to 321');
+ });
+});
diff --git a/spec/lib/gitlab/background_migration/encrypt_runners_tokens_spec.rb b/spec/lib/gitlab/background_migration/encrypt_runners_tokens_spec.rb
new file mode 100644
index 00000000000..9d4921968b3
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/encrypt_runners_tokens_spec.rb
@@ -0,0 +1,77 @@
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::EncryptRunnersTokens, :migration, schema: 20181121111200 do
+ let(:settings) { table(:application_settings) }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:runners) { table(:ci_runners) }
+
+ context 'when migrating application settings' do
+ before do
+ settings.create!(id: 1, runners_registration_token: 'plain-text-token1')
+ end
+
+ it 'migrates runners registration tokens' do
+ migrate!(:settings, 1, 1)
+
+ encrypted_token = settings.first.runners_registration_token_encrypted
+ decrypted_token = ::Gitlab::CryptoHelper.aes256_gcm_decrypt(encrypted_token)
+
+ expect(decrypted_token).to eq 'plain-text-token1'
+ expect(settings.first.runners_registration_token).to eq 'plain-text-token1'
+ end
+ end
+
+ context 'when migrating namespaces' do
+ before do
+ namespaces.create!(id: 11, name: 'gitlab', path: 'gitlab-org', runners_token: 'my-token1')
+ namespaces.create!(id: 12, name: 'gitlab', path: 'gitlab-org', runners_token: 'my-token2')
+ namespaces.create!(id: 22, name: 'gitlab', path: 'gitlab-org', runners_token: 'my-token3')
+ end
+
+ it 'migrates runners registration tokens' do
+ migrate!(:namespace, 11, 22)
+
+ expect(namespaces.all.reload).to all(
+ have_attributes(runners_token: be_a(String), runners_token_encrypted: be_a(String))
+ )
+ end
+ end
+
+ context 'when migrating projects' do
+ before do
+ namespaces.create!(id: 11, name: 'gitlab', path: 'gitlab-org')
+ projects.create!(id: 111, namespace_id: 11, name: 'gitlab', path: 'gitlab-ce', runners_token: 'my-token1')
+ projects.create!(id: 114, namespace_id: 11, name: 'gitlab', path: 'gitlab-ce', runners_token: 'my-token2')
+ projects.create!(id: 116, namespace_id: 11, name: 'gitlab', path: 'gitlab-ce', runners_token: 'my-token3')
+ end
+
+ it 'migrates runners registration tokens' do
+ migrate!(:project, 111, 116)
+
+ expect(projects.all.reload).to all(
+ have_attributes(runners_token: be_a(String), runners_token_encrypted: be_a(String))
+ )
+ end
+ end
+
+ context 'when migrating runners' do
+ before do
+ runners.create!(id: 201, runner_type: 1, token: 'plain-text-token1')
+ runners.create!(id: 202, runner_type: 1, token: 'plain-text-token2')
+ runners.create!(id: 203, runner_type: 1, token: 'plain-text-token3')
+ end
+
+ it 'migrates runners communication tokens' do
+ migrate!(:runner, 201, 203)
+
+ expect(runners.all.reload).to all(
+ have_attributes(token: be_a(String), token_encrypted: be_a(String))
+ )
+ end
+ end
+
+ def migrate!(model, from, to)
+ subject.perform(model, from, to)
+ end
+end
diff --git a/spec/lib/gitlab/checks/branch_check_spec.rb b/spec/lib/gitlab/checks/branch_check_spec.rb
new file mode 100644
index 00000000000..77366e91dca
--- /dev/null
+++ b/spec/lib/gitlab/checks/branch_check_spec.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Checks::BranchCheck do
+ include_context 'change access checks context'
+
+ describe '#validate!' do
+ it 'does not raise any error' do
+ expect { subject.validate! }.not_to raise_error
+ end
+
+ context 'trying to delete the default branch' do
+ let(:newrev) { '0000000000000000000000000000000000000000' }
+ let(:ref) { 'refs/heads/master' }
+
+ it 'raises an error' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'The default branch of a project cannot be deleted.')
+ end
+ end
+
+ context 'protected branches check' do
+ before do
+ allow(ProtectedBranch).to receive(:protected?).with(project, 'master').and_return(true)
+ allow(ProtectedBranch).to receive(:protected?).with(project, 'feature').and_return(true)
+ end
+
+ it 'raises an error if the user is not allowed to do forced pushes to protected branches' do
+ expect(Gitlab::Checks::ForcePush).to receive(:force_push?).and_return(true)
+
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to force push code to a protected branch on this project.')
+ end
+
+ it 'raises an error if the user is not allowed to merge to protected branches' do
+ expect_any_instance_of(Gitlab::Checks::MatchingMergeRequest).to receive(:match?).and_return(true)
+ expect(user_access).to receive(:can_merge_to_branch?).and_return(false)
+ expect(user_access).to receive(:can_push_to_branch?).and_return(false)
+
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to merge code into protected branches on this project.')
+ end
+
+ it 'raises an error if the user is not allowed to push to protected branches' do
+ expect(user_access).to receive(:can_push_to_branch?).and_return(false)
+
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to push code to protected branches on this project.')
+ end
+
+ context 'when project repository is empty' do
+ let(:project) { create(:project) }
+
+ it 'raises an error if the user is not allowed to push to protected branches' do
+ expect(user_access).to receive(:can_push_to_branch?).and_return(false)
+
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /Ask a project Owner or Maintainer to create a default branch/)
+ end
+ end
+
+ context 'branch deletion' do
+ let(:newrev) { '0000000000000000000000000000000000000000' }
+ let(:ref) { 'refs/heads/feature' }
+
+ context 'if the user is not allowed to delete protected branches' do
+ it 'raises an error' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to delete protected branches from this project. Only a project maintainer or owner can delete a protected branch.')
+ end
+ end
+
+ context 'if the user is allowed to delete protected branches' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ context 'through the web interface' do
+ let(:protocol) { 'web' }
+
+ it 'allows branch deletion' do
+ expect { subject.validate! }.not_to raise_error
+ end
+ end
+
+ context 'over SSH or HTTP' do
+ it 'raises an error' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You can only delete protected branches using the web interface.')
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/checks/change_access_spec.rb b/spec/lib/gitlab/checks/change_access_spec.rb
index 81804ba5c76..45fb33e9e4a 100644
--- a/spec/lib/gitlab/checks/change_access_spec.rb
+++ b/spec/lib/gitlab/checks/change_access_spec.rb
@@ -2,245 +2,56 @@ require 'spec_helper'
describe Gitlab::Checks::ChangeAccess do
describe '#exec' do
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
- let(:user_access) { Gitlab::UserAccess.new(user, project: project) }
- let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' }
- let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
- let(:ref) { 'refs/heads/master' }
- let(:changes) { { oldrev: oldrev, newrev: newrev, ref: ref } }
- let(:protocol) { 'ssh' }
- let(:timeout) { Gitlab::GitAccess::INTERNAL_TIMEOUT }
- let(:logger) { Gitlab::Checks::TimedLogger.new(timeout: timeout) }
+ include_context 'change access checks context'
- subject(:change_access) do
- described_class.new(
- changes,
- project: project,
- user_access: user_access,
- protocol: protocol,
- logger: logger
- )
- end
-
- before do
- project.add_developer(user)
- end
+ subject { change_access }
context 'without failed checks' do
it "doesn't raise an error" do
expect { subject.exec }.not_to raise_error
end
- end
- context 'when time limit was reached' do
- it 'raises a TimeoutError' do
- logger = Gitlab::Checks::TimedLogger.new(start_time: timeout.ago, timeout: timeout)
- access = described_class.new(changes,
- project: project,
- user_access: user_access,
- protocol: protocol,
- logger: logger)
+ it 'calls pushes checks' do
+ expect_any_instance_of(Gitlab::Checks::PushCheck).to receive(:validate!)
- expect { access.exec }.to raise_error(Gitlab::Checks::TimedLogger::TimeoutError)
+ subject.exec
end
- end
- context 'when the user is not allowed to push to the repo' do
- it 'raises an error' do
- expect(user_access).to receive(:can_do_action?).with(:push_code).and_return(false)
- expect(user_access).to receive(:can_push_to_branch?).with('master').and_return(false)
+ it 'calls branches checks' do
+ expect_any_instance_of(Gitlab::Checks::BranchCheck).to receive(:validate!)
- expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to push code to this project.')
+ subject.exec
end
- end
- context 'tags check' do
- let(:ref) { 'refs/tags/v1.0.0' }
+ it 'calls tags checks' do
+ expect_any_instance_of(Gitlab::Checks::TagCheck).to receive(:validate!)
- it 'raises an error if the user is not allowed to update tags' do
- allow(user_access).to receive(:can_do_action?).with(:push_code).and_return(true)
- expect(user_access).to receive(:can_do_action?).with(:admin_project).and_return(false)
-
- expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to change existing tags on this project.')
+ subject.exec
end
- context 'with protected tag' do
- let!(:protected_tag) { create(:protected_tag, project: project, name: 'v*') }
-
- context 'as maintainer' do
- before do
- project.add_maintainer(user)
- end
+ it 'calls lfs checks' do
+ expect_any_instance_of(Gitlab::Checks::LfsCheck).to receive(:validate!)
- context 'deletion' do
- let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' }
- let(:newrev) { '0000000000000000000000000000000000000000' }
-
- it 'is prevented' do
- expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /cannot be deleted/)
- end
- end
-
- context 'update' do
- let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' }
- let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
-
- it 'is prevented' do
- expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /cannot be updated/)
- end
- end
- end
-
- context 'creation' do
- let(:oldrev) { '0000000000000000000000000000000000000000' }
- let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
- let(:ref) { 'refs/tags/v9.1.0' }
-
- it 'prevents creation below access level' do
- expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /allowed to create this tag as it is protected/)
- end
-
- context 'when user has access' do
- let!(:protected_tag) { create(:protected_tag, :developers_can_create, project: project, name: 'v*') }
-
- it 'allows tag creation' do
- expect { subject.exec }.not_to raise_error
- end
- end
- end
+ subject.exec
end
- end
- context 'branches check' do
- context 'trying to delete the default branch' do
- let(:newrev) { '0000000000000000000000000000000000000000' }
- let(:ref) { 'refs/heads/master' }
+ it 'calls diff checks' do
+ expect_any_instance_of(Gitlab::Checks::DiffCheck).to receive(:validate!)
- it 'raises an error' do
- expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'The default branch of a project cannot be deleted.')
- end
- end
-
- context 'protected branches check' do
- before do
- allow(ProtectedBranch).to receive(:protected?).with(project, 'master').and_return(true)
- allow(ProtectedBranch).to receive(:protected?).with(project, 'feature').and_return(true)
- end
-
- it 'raises an error if the user is not allowed to do forced pushes to protected branches' do
- expect(Gitlab::Checks::ForcePush).to receive(:force_push?).and_return(true)
-
- expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to force push code to a protected branch on this project.')
- end
-
- it 'raises an error if the user is not allowed to merge to protected branches' do
- expect_any_instance_of(Gitlab::Checks::MatchingMergeRequest).to receive(:match?).and_return(true)
- expect(user_access).to receive(:can_merge_to_branch?).and_return(false)
- expect(user_access).to receive(:can_push_to_branch?).and_return(false)
-
- expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to merge code into protected branches on this project.')
- end
-
- it 'raises an error if the user is not allowed to push to protected branches' do
- expect(user_access).to receive(:can_push_to_branch?).and_return(false)
-
- expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to push code to protected branches on this project.')
- end
-
- context 'when project repository is empty' do
- let(:project) { create(:project) }
-
- it 'raises an error if the user is not allowed to push to protected branches' do
- expect(user_access).to receive(:can_push_to_branch?).and_return(false)
-
- expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /Ask a project Owner or Maintainer to create a default branch/)
- end
- end
-
- context 'branch deletion' do
- let(:newrev) { '0000000000000000000000000000000000000000' }
- let(:ref) { 'refs/heads/feature' }
-
- context 'if the user is not allowed to delete protected branches' do
- it 'raises an error' do
- expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to delete protected branches from this project. Only a project maintainer or owner can delete a protected branch.')
- end
- end
-
- context 'if the user is allowed to delete protected branches' do
- before do
- project.add_maintainer(user)
- end
-
- context 'through the web interface' do
- let(:protocol) { 'web' }
-
- it 'allows branch deletion' do
- expect { subject.exec }.not_to raise_error
- end
- end
-
- context 'over SSH or HTTP' do
- it 'raises an error' do
- expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You can only delete protected branches using the web interface.')
- end
- end
- end
- end
+ subject.exec
end
end
- context 'LFS integrity check' do
- it 'fails if any LFS blobs are missing' do
- allow_any_instance_of(Gitlab::Checks::LfsIntegrity).to receive(:objects_missing?).and_return(true)
-
- expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /LFS objects are missing/)
- end
-
- it 'succeeds if LFS objects have already been uploaded' do
- allow_any_instance_of(Gitlab::Checks::LfsIntegrity).to receive(:objects_missing?).and_return(false)
-
- expect { subject.exec }.not_to raise_error
- end
- end
-
- context 'LFS file lock check' do
- let(:owner) { create(:user) }
- let!(:lock) { create(:lfs_file_lock, user: owner, project: project, path: 'README') }
-
- before do
- allow(project.repository).to receive(:new_commits).and_return(
- project.repository.commits_between('be93687618e4b132087f430a4d8fc3a609c9b77c', '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51')
- )
- end
-
- context 'with LFS not enabled' do
- it 'skips the validation' do
- expect_any_instance_of(Gitlab::Checks::CommitCheck).not_to receive(:validate)
-
- subject.exec
- end
- end
-
- context 'with LFS enabled' do
- before do
- allow(project).to receive(:lfs_enabled?).and_return(true)
- end
-
- context 'when change is sent by a different user' do
- it 'raises an error if the user is not allowed to update the file' do
- expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "The path 'README' is locked in Git LFS by #{lock.user.name}")
- end
- end
-
- context 'when change is sent by the author of the lock' do
- let(:user) { owner }
+ context 'when time limit was reached' do
+ it 'raises a TimeoutError' do
+ logger = Gitlab::Checks::TimedLogger.new(start_time: timeout.ago, timeout: timeout)
+ access = described_class.new(changes,
+ project: project,
+ user_access: user_access,
+ protocol: protocol,
+ logger: logger)
- it "doesn't raise any error" do
- expect { subject.exec }.not_to raise_error
- end
- end
+ expect { access.exec }.to raise_error(Gitlab::Checks::TimedLogger::TimeoutError)
end
end
end
diff --git a/spec/lib/gitlab/checks/diff_check_spec.rb b/spec/lib/gitlab/checks/diff_check_spec.rb
new file mode 100644
index 00000000000..eeec1e83179
--- /dev/null
+++ b/spec/lib/gitlab/checks/diff_check_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Checks::DiffCheck do
+ include_context 'change access checks context'
+
+ describe '#validate!' do
+ let(:owner) { create(:user) }
+ let!(:lock) { create(:lfs_file_lock, user: owner, project: project, path: 'README') }
+
+ before do
+ allow(project.repository).to receive(:new_commits).and_return(
+ project.repository.commits_between('be93687618e4b132087f430a4d8fc3a609c9b77c', '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51')
+ )
+ end
+
+ context 'with LFS not enabled' do
+ before do
+ allow(project).to receive(:lfs_enabled?).and_return(false)
+ end
+
+ it 'skips the validation' do
+ expect(subject).not_to receive(:validate_diff)
+ expect(subject).not_to receive(:validate_file_paths)
+
+ subject.validate!
+ end
+ end
+
+ context 'with LFS enabled' do
+ before do
+ allow(project).to receive(:lfs_enabled?).and_return(true)
+ end
+
+ context 'when change is sent by a different user' do
+ it 'raises an error if the user is not allowed to update the file' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "The path 'README' is locked in Git LFS by #{lock.user.name}")
+ end
+ end
+
+ context 'when change is sent by the author of the lock' do
+ let(:user) { owner }
+
+ it "doesn't raise any error" do
+ expect { subject.validate! }.not_to raise_error
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/checks/lfs_check_spec.rb b/spec/lib/gitlab/checks/lfs_check_spec.rb
new file mode 100644
index 00000000000..35f8069c8a4
--- /dev/null
+++ b/spec/lib/gitlab/checks/lfs_check_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Checks::LfsCheck do
+ include_context 'change access checks context'
+
+ let(:blob_object) { project.repository.blob_at_branch('lfs', 'files/lfs/lfs_object.iso') }
+
+ before do
+ allow_any_instance_of(Gitlab::Git::LfsChanges).to receive(:new_pointers) do
+ [blob_object]
+ end
+ end
+
+ describe '#validate!' do
+ context 'with LFS not enabled' do
+ it 'skips integrity check' do
+ expect_any_instance_of(Gitlab::Git::LfsChanges).not_to receive(:new_pointers)
+
+ subject.validate!
+ end
+ end
+
+ context 'with LFS enabled' do
+ before do
+ allow(project).to receive(:lfs_enabled?).and_return(true)
+ end
+
+ context 'deletion' do
+ let(:changes) { { oldrev: oldrev, ref: ref } }
+
+ it 'skips integrity check' do
+ expect(project.repository).not_to receive(:new_objects)
+
+ subject.validate!
+ end
+ end
+
+ it 'fails if any LFS blobs are missing' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /LFS objects are missing/)
+ end
+
+ it 'succeeds if LFS objects have already been uploaded' do
+ lfs_object = create(:lfs_object, oid: blob_object.lfs_oid)
+ create(:lfs_objects_project, project: project, lfs_object: lfs_object)
+
+ expect { subject.validate! }.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/checks/push_check_spec.rb b/spec/lib/gitlab/checks/push_check_spec.rb
new file mode 100644
index 00000000000..25f0d428cb9
--- /dev/null
+++ b/spec/lib/gitlab/checks/push_check_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Checks::PushCheck do
+ include_context 'change access checks context'
+
+ describe '#validate!' do
+ it 'does not raise any error' do
+ expect { subject.validate! }.not_to raise_error
+ end
+
+ context 'when the user is not allowed to push to the repo' do
+ it 'raises an error' do
+ expect(user_access).to receive(:can_do_action?).with(:push_code).and_return(false)
+ expect(user_access).to receive(:can_push_to_branch?).with('master').and_return(false)
+
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to push code to this project.')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/checks/tag_check_spec.rb b/spec/lib/gitlab/checks/tag_check_spec.rb
new file mode 100644
index 00000000000..b1258270611
--- /dev/null
+++ b/spec/lib/gitlab/checks/tag_check_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Checks::TagCheck do
+ include_context 'change access checks context'
+
+ describe '#validate!' do
+ let(:ref) { 'refs/tags/v1.0.0' }
+
+ it 'raises an error' do
+ allow(user_access).to receive(:can_do_action?).with(:push_code).and_return(true)
+ expect(user_access).to receive(:can_do_action?).with(:admin_project).and_return(false)
+
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to change existing tags on this project.')
+ end
+
+ context 'with protected tag' do
+ let!(:protected_tag) { create(:protected_tag, project: project, name: 'v*') }
+
+ context 'as maintainer' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ context 'deletion' do
+ let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' }
+ let(:newrev) { '0000000000000000000000000000000000000000' }
+
+ it 'is prevented' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /cannot be deleted/)
+ end
+ end
+
+ context 'update' do
+ let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' }
+ let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
+
+ it 'is prevented' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /cannot be updated/)
+ end
+ end
+ end
+
+ context 'creation' do
+ let(:oldrev) { '0000000000000000000000000000000000000000' }
+ let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
+ let(:ref) { 'refs/tags/v9.1.0' }
+
+ it 'prevents creation below access level' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /allowed to create this tag as it is protected/)
+ end
+
+ context 'when user has access' do
+ let!(:protected_tag) { create(:protected_tag, :developers_can_create, project: project, name: 'v*') }
+
+ it 'allows tag creation' do
+ expect { subject.validate! }.not_to raise_error
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/except_policy_spec.rb b/spec/lib/gitlab/ci/config/entry/except_policy_spec.rb
new file mode 100644
index 00000000000..d036bf2f4d1
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/except_policy_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Config::Entry::ExceptPolicy do
+ let(:entry) { described_class.new(config) }
+
+ it_behaves_like 'correct only except policy'
+
+ describe '.default' do
+ it 'does not have a default value' do
+ expect(described_class.default).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/global_spec.rb b/spec/lib/gitlab/ci/config/entry/global_spec.rb
index 7c18514934e..12f4b9dc624 100644
--- a/spec/lib/gitlab/ci/config/entry/global_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/global_spec.rb
@@ -160,7 +160,8 @@ describe Gitlab::Ci::Config::Entry::Global do
cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push' },
variables: { 'VAR' => 'value' },
ignore: false,
- after_script: ['make clean'] },
+ after_script: ['make clean'],
+ only: { refs: %w[branches tags] } },
spinach: { name: :spinach,
before_script: [],
script: %w[spinach],
@@ -171,7 +172,8 @@ describe Gitlab::Ci::Config::Entry::Global do
cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push' },
variables: {},
ignore: false,
- after_script: ['make clean'] }
+ after_script: ['make clean'],
+ only: { refs: %w[branches tags] } }
)
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index 57d4577a90c..c1f4a060063 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -258,7 +258,8 @@ describe Gitlab::Ci::Config::Entry::Job do
commands: "ls\npwd\nrspec",
stage: 'test',
ignore: false,
- after_script: %w[cleanup])
+ after_script: %w[cleanup],
+ only: { refs: %w[branches tags] })
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/jobs_spec.rb b/spec/lib/gitlab/ci/config/entry/jobs_spec.rb
index c0a2b6517e3..2a753408f54 100644
--- a/spec/lib/gitlab/ci/config/entry/jobs_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/jobs_spec.rb
@@ -67,12 +67,14 @@ describe Gitlab::Ci::Config::Entry::Jobs do
script: %w[rspec],
commands: 'rspec',
ignore: false,
- stage: 'test' },
+ stage: 'test',
+ only: { refs: %w[branches tags] } },
spinach: { name: :spinach,
script: %w[spinach],
commands: 'spinach',
ignore: false,
- stage: 'test' })
+ stage: 'test',
+ only: { refs: %w[branches tags] } })
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/only_policy_spec.rb b/spec/lib/gitlab/ci/config/entry/only_policy_spec.rb
new file mode 100644
index 00000000000..5518b68e51a
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/only_policy_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Config::Entry::OnlyPolicy do
+ let(:entry) { described_class.new(config) }
+
+ it_behaves_like 'correct only except policy'
+
+ describe '.default' do
+ it 'haa a default value' do
+ expect(described_class.default).to eq( { refs: %w[branches tags] } )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/policy_spec.rb b/spec/lib/gitlab/ci/config/entry/policy_spec.rb
index 83001b7fdd8..cf40a22af2e 100644
--- a/spec/lib/gitlab/ci/config/entry/policy_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/policy_spec.rb
@@ -1,173 +1,8 @@
-require 'fast_spec_helper'
-require_dependency 'active_model'
+require 'spec_helper'
describe Gitlab::Ci::Config::Entry::Policy do
let(:entry) { described_class.new(config) }
- context 'when using simplified policy' do
- describe 'validations' do
- context 'when entry config value is valid' do
- context 'when config is a branch or tag name' do
- let(:config) { %w[master feature/branch] }
-
- describe '#valid?' do
- it 'is valid' do
- expect(entry).to be_valid
- end
- end
-
- describe '#value' do
- it 'returns refs hash' do
- expect(entry.value).to eq(refs: config)
- end
- end
- end
-
- context 'when config is a regexp' do
- let(:config) { ['/^issue-.*$/'] }
-
- describe '#valid?' do
- it 'is valid' do
- expect(entry).to be_valid
- end
- end
- end
-
- context 'when config is a special keyword' do
- let(:config) { %w[tags triggers branches] }
-
- describe '#valid?' do
- it 'is valid' do
- expect(entry).to be_valid
- end
- end
- end
- end
-
- context 'when entry value is not valid' do
- let(:config) { [1] }
-
- describe '#errors' do
- it 'saves errors' do
- expect(entry.errors)
- .to include /policy config should be an array of strings or regexps/
- end
- end
- end
- end
- end
-
- context 'when using complex policy' do
- context 'when specifying refs policy' do
- let(:config) { { refs: ['master'] } }
-
- it 'is a correct configuraton' do
- expect(entry).to be_valid
- expect(entry.value).to eq(refs: %w[master])
- end
- end
-
- context 'when specifying kubernetes policy' do
- let(:config) { { kubernetes: 'active' } }
-
- it 'is a correct configuraton' do
- expect(entry).to be_valid
- expect(entry.value).to eq(kubernetes: 'active')
- end
- end
-
- context 'when specifying invalid kubernetes policy' do
- let(:config) { { kubernetes: 'something' } }
-
- it 'reports an error about invalid policy' do
- expect(entry.errors).to include /unknown value: something/
- end
- end
-
- context 'when specifying valid variables expressions policy' do
- let(:config) { { variables: ['$VAR == null'] } }
-
- it 'is a correct configuraton' do
- expect(entry).to be_valid
- expect(entry.value).to eq(config)
- end
- end
-
- context 'when specifying variables expressions in invalid format' do
- let(:config) { { variables: '$MY_VAR' } }
-
- it 'reports an error about invalid format' do
- expect(entry.errors).to include /should be an array of strings/
- end
- end
-
- context 'when specifying invalid variables expressions statement' do
- let(:config) { { variables: ['$MY_VAR =='] } }
-
- it 'reports an error about invalid statement' do
- expect(entry.errors).to include /invalid expression syntax/
- end
- end
-
- context 'when specifying invalid variables expressions token' do
- let(:config) { { variables: ['$MY_VAR == 123'] } }
-
- it 'reports an error about invalid expression' do
- expect(entry.errors).to include /invalid expression syntax/
- end
- end
-
- context 'when using invalid variables expressions regexp' do
- let(:config) { { variables: ['$MY_VAR =~ /some ( thing/'] } }
-
- it 'reports an error about invalid expression' do
- expect(entry.errors).to include /invalid expression syntax/
- end
- end
-
- context 'when specifying a valid changes policy' do
- let(:config) { { changes: %w[some/* paths/**/*.rb] } }
-
- it 'is a correct configuraton' do
- expect(entry).to be_valid
- expect(entry.value).to eq(config)
- end
- end
-
- context 'when changes policy is invalid' do
- let(:config) { { changes: [1, 2] } }
-
- it 'returns errors' do
- expect(entry.errors).to include /changes should be an array of strings/
- end
- end
-
- context 'when specifying unknown policy' do
- let(:config) { { refs: ['master'], invalid: :something } }
-
- it 'returns error about invalid key' do
- expect(entry.errors).to include /unknown keys: invalid/
- end
- end
-
- context 'when policy is empty' do
- let(:config) { {} }
-
- it 'is not a valid configuration' do
- expect(entry.errors).to include /can't be blank/
- end
- end
- end
-
- context 'when policy strategy does not match' do
- let(:config) { 'string strategy' }
-
- it 'returns information about errors' do
- expect(entry.errors)
- .to include /has to be either an array of conditions or a hash/
- end
- end
-
describe '.default' do
it 'does not have a default value' do
expect(described_class.default).to be_nil
diff --git a/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
index 85d73e5c382..fab071405df 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
@@ -18,6 +18,7 @@ describe Gitlab::Ci::Pipeline::Chain::Build do
before_sha: nil,
trigger_request: nil,
schedule: nil,
+ merge_request: nil,
project: project,
current_user: user,
variables_attributes: variables_attributes)
@@ -76,6 +77,7 @@ describe Gitlab::Ci::Pipeline::Chain::Build do
before_sha: nil,
trigger_request: nil,
schedule: nil,
+ merge_request: nil,
project: project,
current_user: user)
end
@@ -90,4 +92,31 @@ describe Gitlab::Ci::Pipeline::Chain::Build do
expect(pipeline).to be_tag
end
end
+
+ context 'when pipeline is running for a merge request' do
+ let(:command) do
+ Gitlab::Ci::Pipeline::Chain::Command.new(
+ source: :merge_request,
+ origin_ref: 'feature',
+ checkout_sha: project.commit.id,
+ after_sha: nil,
+ before_sha: nil,
+ trigger_request: nil,
+ schedule: nil,
+ merge_request: merge_request,
+ project: project,
+ current_user: user)
+ end
+
+ let(:merge_request) { build(:merge_request, target_project: project) }
+
+ before do
+ step.perform!
+ end
+
+ it 'correctly indicated that this is a merge request pipeline' do
+ expect(pipeline).to be_merge_request
+ expect(pipeline.merge_request).to eq(merge_request)
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb
index a8dc5356413..053bc421649 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb
@@ -106,4 +106,34 @@ describe Gitlab::Ci::Pipeline::Chain::Validate::Config do
expect(step.break?).to be false
end
end
+
+ context 'when pipeline source is merge request' do
+ before do
+ stub_ci_pipeline_yaml_file(YAML.dump(config))
+ end
+
+ let(:pipeline) { build_stubbed(:ci_pipeline, project: project) }
+
+ let(:merge_request_pipeline) do
+ build(:ci_pipeline, source: :merge_request, project: project)
+ end
+
+ let(:chain) { described_class.new(merge_request_pipeline, command).tap(&:perform!) }
+
+ context "when config contains 'merge_requests' keyword" do
+ let(:config) { { rspec: { script: 'echo', only: ['merge_requests'] } } }
+
+ it 'does not break the chain' do
+ expect(chain).not_to be_break
+ end
+ end
+
+ context "when config contains 'merge_request' keyword" do
+ let(:config) { { rspec: { script: 'echo', only: ['merge_request'] } } }
+
+ it 'does not break the chain' do
+ expect(chain).not_to be_break
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/crypto_helper_spec.rb b/spec/lib/gitlab/crypto_helper_spec.rb
new file mode 100644
index 00000000000..05cc6cf15de
--- /dev/null
+++ b/spec/lib/gitlab/crypto_helper_spec.rb
@@ -0,0 +1,37 @@
+require 'spec_helper'
+
+describe Gitlab::CryptoHelper do
+ describe '.sha256' do
+ it 'generates SHA256 digest Base46 encoded' do
+ digest = described_class.sha256('some-value')
+
+ expect(digest).to match %r{\A[A-Za-z0-9+/=]+\z}
+ expect(digest).to eq digest.strip
+ end
+ end
+
+ describe '.aes256_gcm_encrypt' do
+ it 'is Base64 encoded string without new line character' do
+ encrypted = described_class.aes256_gcm_encrypt('some-value')
+
+ expect(encrypted).to match %r{\A[A-Za-z0-9+/=]+\z}
+ expect(encrypted).not_to include "\n"
+ end
+ end
+
+ describe '.aes256_gcm_decrypt' do
+ let(:encrypted) { described_class.aes256_gcm_encrypt('some-value') }
+
+ it 'correctly decrypts encrypted string' do
+ decrypted = described_class.aes256_gcm_decrypt(encrypted)
+
+ expect(decrypted).to eq 'some-value'
+ end
+
+ it 'decrypts a value when it ends with a new line character' do
+ decrypted = described_class.aes256_gcm_decrypt(encrypted + "\n")
+
+ expect(decrypted).to eq 'some-value'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/count/exact_count_strategy_spec.rb b/spec/lib/gitlab/database/count/exact_count_strategy_spec.rb
new file mode 100644
index 00000000000..3991c737a26
--- /dev/null
+++ b/spec/lib/gitlab/database/count/exact_count_strategy_spec.rb
@@ -0,0 +1,40 @@
+require 'spec_helper'
+
+describe Gitlab::Database::Count::ExactCountStrategy do
+ before do
+ create_list(:project, 3)
+ create(:identity)
+ end
+
+ let(:models) { [Project, Identity] }
+
+ subject { described_class.new(models).count }
+
+ describe '#count' do
+ it 'counts all models' do
+ expect(models).to all(receive(:count).and_call_original)
+
+ expect(subject).to eq({ Project => 3, Identity => 1 })
+ end
+
+ it 'returns default value if count times out' do
+ allow(models.first).to receive(:count).and_raise(ActiveRecord::StatementInvalid.new(''))
+
+ expect(subject).to eq({})
+ end
+ end
+
+ describe '.enabled?' do
+ it 'is enabled for PostgreSQL' do
+ allow(Gitlab::Database).to receive(:postgresql?).and_return(true)
+
+ expect(described_class.enabled?).to be_truthy
+ end
+
+ it 'is enabled for MySQL' do
+ allow(Gitlab::Database).to receive(:postgresql?).and_return(false)
+
+ expect(described_class.enabled?).to be_truthy
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb b/spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb
new file mode 100644
index 00000000000..b44e8c5a110
--- /dev/null
+++ b/spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb
@@ -0,0 +1,48 @@
+require 'spec_helper'
+
+describe Gitlab::Database::Count::ReltuplesCountStrategy do
+ before do
+ create_list(:project, 3)
+ create(:identity)
+ end
+
+ let(:models) { [Project, Identity] }
+ subject { described_class.new(models).count }
+
+ describe '#count', :postgresql do
+ context 'when reltuples is up to date' do
+ before do
+ ActiveRecord::Base.connection.execute('ANALYZE projects')
+ ActiveRecord::Base.connection.execute('ANALYZE identities')
+ end
+
+ it 'uses statistics to do the count' do
+ models.each { |model| expect(model).not_to receive(:count) }
+
+ expect(subject).to eq({ Project => 3, Identity => 1 })
+ end
+ end
+
+ context 'insufficient permissions' do
+ it 'returns an empty hash' do
+ allow(ActiveRecord::Base).to receive(:transaction).and_raise(PG::InsufficientPrivilege)
+
+ expect(subject).to eq({})
+ end
+ end
+ end
+
+ describe '.enabled?' do
+ it 'is enabled for PostgreSQL' do
+ allow(Gitlab::Database).to receive(:postgresql?).and_return(true)
+
+ expect(described_class.enabled?).to be_truthy
+ end
+
+ it 'is disabled for MySQL' do
+ allow(Gitlab::Database).to receive(:postgresql?).and_return(false)
+
+ expect(described_class.enabled?).to be_falsey
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb b/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb
new file mode 100644
index 00000000000..203f9344a41
--- /dev/null
+++ b/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb
@@ -0,0 +1,65 @@
+require 'spec_helper'
+
+describe Gitlab::Database::Count::TablesampleCountStrategy do
+ before do
+ create_list(:project, 3)
+ create(:identity)
+ end
+
+ let(:models) { [Project, Identity] }
+ let(:strategy) { described_class.new(models) }
+
+ subject { strategy.count }
+
+ describe '#count', :postgresql do
+ let(:estimates) { { Project => threshold + 1, Identity => threshold - 1 } }
+ let(:threshold) { Gitlab::Database::Count::TablesampleCountStrategy::EXACT_COUNT_THRESHOLD }
+
+ before do
+ allow(strategy).to receive(:size_estimates).with(check_statistics: false).and_return(estimates)
+ end
+
+ context 'for tables with an estimated small size' do
+ it 'performs an exact count' do
+ expect(Identity).to receive(:count).and_call_original
+
+ expect(subject).to include({ Identity => 1 })
+ end
+ end
+
+ context 'for tables with an estimated large size' do
+ it 'performs a tablesample count' do
+ expect(Project).not_to receive(:count)
+
+ result = subject
+ expect(result[Project]).to eq(3)
+ end
+ end
+
+ context 'insufficient permissions' do
+ it 'returns an empty hash' do
+ allow(strategy).to receive(:size_estimates).and_raise(PG::InsufficientPrivilege)
+
+ expect(subject).to eq({})
+ end
+ end
+ end
+
+ describe '.enabled?' do
+ before do
+ stub_feature_flags(tablesample_counts: true)
+ end
+
+ it 'is enabled for PostgreSQL' do
+ allow(Gitlab::Database).to receive(:postgresql?).and_return(true)
+
+ expect(described_class.enabled?).to be_truthy
+ end
+
+ it 'is disabled for MySQL' do
+ allow(Gitlab::Database).to receive(:postgresql?).and_return(false)
+
+ expect(described_class.enabled?).to be_falsey
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/count_spec.rb b/spec/lib/gitlab/database/count_spec.rb
index 407d9470785..1d096b8fa7c 100644
--- a/spec/lib/gitlab/database/count_spec.rb
+++ b/spec/lib/gitlab/database/count_spec.rb
@@ -8,63 +8,51 @@ describe Gitlab::Database::Count do
let(:models) { [Project, Identity] }
- describe '.approximate_counts' do
- context 'with MySQL' do
- context 'when reltuples have not been updated' do
- it 'counts all models the normal way' do
- expect(Gitlab::Database).to receive(:postgresql?).and_return(false)
+ context '.approximate_counts' do
+ context 'selecting strategies' do
+ let(:strategies) { [double('s1', enabled?: true), double('s2', enabled?: false)] }
- expect(Project).to receive(:count).and_call_original
- expect(Identity).to receive(:count).and_call_original
+ it 'uses only enabled strategies' do
+ expect(strategies[0]).to receive(:new).and_return(double('strategy1', count: {}))
+ expect(strategies[1]).not_to receive(:new)
- expect(described_class.approximate_counts(models)).to eq({ Project => 3, Identity => 1 })
- end
+ described_class.approximate_counts(models, strategies: strategies)
end
end
- context 'with PostgreSQL', :postgresql do
- describe 'when reltuples have not been updated' do
- it 'counts all models the normal way' do
- expect(described_class).to receive(:reltuples_from_recently_updated).with(%w(projects identities)).and_return({})
+ context 'fallbacks' do
+ subject { described_class.approximate_counts(models, strategies: strategies) }
- expect(Project).to receive(:count).and_call_original
- expect(Identity).to receive(:count).and_call_original
- expect(described_class.approximate_counts(models)).to eq({ Project => 3, Identity => 1 })
- end
+ let(:strategies) do
+ [
+ double('s1', enabled?: true, new: first_strategy),
+ double('s2', enabled?: true, new: second_strategy)
+ ]
end
- describe 'no permission' do
- it 'falls back to standard query' do
- allow(described_class).to receive(:postgresql_estimate_query).and_raise(PG::InsufficientPrivilege)
+ let(:first_strategy) { double('first strategy', count: {}) }
+ let(:second_strategy) { double('second strategy', count: {}) }
- expect(Project).to receive(:count).and_call_original
- expect(Identity).to receive(:count).and_call_original
- expect(described_class.approximate_counts(models)).to eq({ Project => 3, Identity => 1 })
- end
+ it 'gets results from first strategy' do
+ expect(strategies[0]).to receive(:new).with(models).and_return(first_strategy)
+ expect(first_strategy).to receive(:count)
+
+ subject
end
- describe 'when some reltuples have been updated' do
- it 'counts projects in the fast way' do
- expect(described_class).to receive(:reltuples_from_recently_updated).with(%w(projects identities)).and_return({ 'projects' => 3 })
+ it 'gets more results from second strategy if some counts are missing' do
+ expect(first_strategy).to receive(:count).and_return({ Project => 3 })
+ expect(strategies[1]).to receive(:new).with([Identity]).and_return(second_strategy)
+ expect(second_strategy).to receive(:count).and_return({ Identity => 1 })
- expect(Project).not_to receive(:count).and_call_original
- expect(Identity).to receive(:count).and_call_original
- expect(described_class.approximate_counts(models)).to eq({ Project => 3, Identity => 1 })
- end
+ expect(subject).to eq({ Project => 3, Identity => 1 })
end
- describe 'when all reltuples have been updated' do
- before do
- ActiveRecord::Base.connection.execute('ANALYZE projects')
- ActiveRecord::Base.connection.execute('ANALYZE identities')
- end
-
- it 'counts models with the standard way' do
- expect(Project).not_to receive(:count)
- expect(Identity).not_to receive(:count)
+ it 'does not get more results as soon as all counts are present' do
+ expect(first_strategy).to receive(:count).and_return({ Project => 3, Identity => 1 })
+ expect(strategies[1]).not_to receive(:new)
- expect(described_class.approximate_counts(models)).to eq({ Project => 3, Identity => 1 })
- end
+ subject
end
end
end
diff --git a/spec/lib/gitlab/diff/file_collection/commit_spec.rb b/spec/lib/gitlab/diff/file_collection/commit_spec.rb
index 6d1b66deb6a..34ed22b8941 100644
--- a/spec/lib/gitlab/diff/file_collection/commit_spec.rb
+++ b/spec/lib/gitlab/diff/file_collection/commit_spec.rb
@@ -12,4 +12,8 @@ describe Gitlab::Diff::FileCollection::Commit do
let(:diffable) { project.commit }
let(:stub_path) { 'bar/branch-test.txt' }
end
+
+ it_behaves_like 'unfoldable diff' do
+ let(:diffable) { project.commit }
+ end
end
diff --git a/spec/lib/gitlab/diff/file_collection/merge_request_diff_spec.rb b/spec/lib/gitlab/diff/file_collection/merge_request_diff_spec.rb
index fbcf515281e..256166dbad3 100644
--- a/spec/lib/gitlab/diff/file_collection/merge_request_diff_spec.rb
+++ b/spec/lib/gitlab/diff/file_collection/merge_request_diff_spec.rb
@@ -2,22 +2,29 @@ require 'spec_helper'
describe Gitlab::Diff::FileCollection::MergeRequestDiff do
let(:merge_request) { create(:merge_request) }
- let(:diff_files) { described_class.new(merge_request.merge_request_diff, diff_options: nil).diff_files }
+ let(:subject) { described_class.new(merge_request.merge_request_diff, diff_options: nil) }
+ let(:diff_files) { subject.diff_files }
- it 'does not highlight binary files' do
- allow_any_instance_of(Gitlab::Diff::File).to receive(:text?).and_return(false)
+ describe '#diff_files' do
+ it 'does not highlight binary files' do
+ allow_any_instance_of(Gitlab::Diff::File).to receive(:text?).and_return(false)
- expect_any_instance_of(Gitlab::Diff::File).not_to receive(:highlighted_diff_lines)
+ expect_any_instance_of(Gitlab::Diff::File).not_to receive(:highlighted_diff_lines)
- diff_files
- end
+ diff_files
+ end
+
+ it 'does not highlight files marked as undiffable in .gitattributes' do
+ allow_any_instance_of(Gitlab::Diff::File).to receive(:diffable?).and_return(false)
- it 'does not highlight files marked as undiffable in .gitattributes' do
- allow_any_instance_of(Gitlab::Diff::File).to receive(:diffable?).and_return(false)
+ expect_any_instance_of(Gitlab::Diff::File).not_to receive(:highlighted_diff_lines)
- expect_any_instance_of(Gitlab::Diff::File).not_to receive(:highlighted_diff_lines)
+ diff_files
+ end
+ end
- diff_files
+ it_behaves_like 'unfoldable diff' do
+ let(:diffable) { merge_request.merge_request_diff }
end
it 'it uses a different cache key if diff line keys change' do
diff --git a/spec/lib/gitlab/git/repository_cleaner_spec.rb b/spec/lib/gitlab/git/repository_cleaner_spec.rb
new file mode 100644
index 00000000000..a9d9e67ef94
--- /dev/null
+++ b/spec/lib/gitlab/git/repository_cleaner_spec.rb
@@ -0,0 +1,32 @@
+require 'spec_helper'
+
+describe Gitlab::Git::RepositoryCleaner do
+ let(:project) { create(:project, :repository) }
+ let(:repository) { project.repository }
+ let(:head_sha) { repository.head_commit.id }
+
+ let(:object_map) { StringIO.new("#{head_sha} #{'0' * 40}") }
+
+ subject(:cleaner) { described_class.new(repository.raw) }
+
+ describe '#apply_bfg_object_map' do
+ it 'removes internal references pointing at SHAs in the object map' do
+ # Create some refs we expect to be removed
+ repository.keep_around(head_sha)
+ repository.create_ref(head_sha, 'refs/environments/1')
+ repository.create_ref(head_sha, 'refs/merge-requests/1')
+ repository.create_ref(head_sha, 'refs/heads/_keep')
+ repository.create_ref(head_sha, 'refs/tags/_keep')
+
+ cleaner.apply_bfg_object_map(object_map)
+
+ aggregate_failures do
+ expect(repository.kept_around?(head_sha)).to be_falsy
+ expect(repository.ref_exists?('refs/environments/1')).to be_falsy
+ expect(repository.ref_exists?('refs/merge-requests/1')).to be_falsy
+ expect(repository.ref_exists?('refs/heads/_keep')).to be_truthy
+ expect(repository.ref_exists?('refs/tags/_keep')).to be_truthy
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/gitaly_client/cleanup_service_spec.rb b/spec/lib/gitlab/gitaly_client/cleanup_service_spec.rb
new file mode 100644
index 00000000000..369deff732a
--- /dev/null
+++ b/spec/lib/gitlab/gitaly_client/cleanup_service_spec.rb
@@ -0,0 +1,19 @@
+require 'spec_helper'
+
+describe Gitlab::GitalyClient::CleanupService do
+ let(:project) { create(:project) }
+ let(:storage_name) { project.repository_storage }
+ let(:relative_path) { project.disk_path + '.git' }
+ let(:client) { described_class.new(project.repository) }
+
+ describe '#apply_bfg_object_map' do
+ it 'sends an apply_bfg_object_map message' do
+ expect_any_instance_of(Gitaly::CleanupService::Stub)
+ .to receive(:apply_bfg_object_map)
+ .with(kind_of(Enumerator), kind_of(Hash))
+ .and_return(double)
+
+ client.apply_bfg_object_map(StringIO.new)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/gpg/commit_spec.rb b/spec/lib/gitlab/gpg/commit_spec.rb
index 8c6d673391b..8229f0eb794 100644
--- a/spec/lib/gitlab/gpg/commit_spec.rb
+++ b/spec/lib/gitlab/gpg/commit_spec.rb
@@ -26,6 +26,28 @@ describe Gitlab::Gpg::Commit do
end
end
+ context 'invalid signature' do
+ let!(:commit) { create :commit, project: project, sha: commit_sha, committer_email: GpgHelpers::User1.emails.first }
+
+ let!(:user) { create(:user, email: GpgHelpers::User1.emails.first) }
+
+ before do
+ allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily)
+ .with(Gitlab::Git::Repository, commit_sha)
+ .and_return(
+ [
+ # Corrupt the key
+ GpgHelpers::User1.signed_commit_signature.tr('=', 'a'),
+ GpgHelpers::User1.signed_commit_base_data
+ ]
+ )
+ end
+
+ it 'returns nil' do
+ expect(described_class.new(commit).signature).to be_nil
+ end
+ end
+
context 'known key' do
context 'user matches the key uid' do
context 'user email matches the email committer' do
diff --git a/spec/lib/gitlab/group_hierarchy_spec.rb b/spec/lib/gitlab/group_hierarchy_spec.rb
index 30686634af4..f3de7adcec7 100644
--- a/spec/lib/gitlab/group_hierarchy_spec.rb
+++ b/spec/lib/gitlab/group_hierarchy_spec.rb
@@ -34,6 +34,28 @@ describe Gitlab::GroupHierarchy, :postgresql do
expect { relation.update_all(share_with_group_lock: false) }
.to raise_error(ActiveRecord::ReadOnlyRecord)
end
+
+ describe 'hierarchy_order option' do
+ let(:relation) do
+ described_class.new(Group.where(id: child2.id)).base_and_ancestors(hierarchy_order: hierarchy_order)
+ end
+
+ context ':asc' do
+ let(:hierarchy_order) { :asc }
+
+ it 'orders by child to parent' do
+ expect(relation).to eq([child2, child1, parent])
+ end
+ end
+
+ context ':desc' do
+ let(:hierarchy_order) { :desc }
+
+ it 'orders by parent to child' do
+ expect(relation).to eq([parent, child1, child2])
+ end
+ end
+ end
end
describe '#base_and_descendants' do
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 31ab11bbf8d..7df129da95a 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -94,6 +94,7 @@ merge_requests:
- timelogs
- head_pipeline
- latest_merge_request_diff
+- merge_request_pipelines
merge_request_diff:
- merge_request
- merge_request_diff_commits
@@ -102,7 +103,7 @@ merge_request_diff_commits:
- merge_request_diff
merge_request_diff_files:
- merge_request_diff
-pipelines:
+ci_pipelines:
- project
- user
- stages
@@ -121,6 +122,7 @@ pipelines:
- artifacts
- pipeline_schedule
- merge_requests
+- merge_request
- deployments
- environments
pipeline_variables:
@@ -263,7 +265,8 @@ project:
- notification_settings
- import_data
- commit_statuses
-- pipelines
+- ci_pipelines
+- all_pipelines
- stages
- builds
- runner_projects
@@ -304,6 +307,7 @@ project:
- import_export_upload
- repository_languages
- pool_repository
+- kubernetes_namespaces
award_emoji:
- awardable
- user
diff --git a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
index 7171e12a849..242c16c4bdc 100644
--- a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
@@ -197,9 +197,9 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
it 'has the correct number of pipelines and statuses' do
- expect(@project.pipelines.size).to eq(5)
+ expect(@project.ci_pipelines.size).to eq(5)
- @project.pipelines.zip([2, 2, 2, 2, 2])
+ @project.ci_pipelines.zip([2, 2, 2, 2, 2])
.each do |(pipeline, expected_status_size)|
expect(pipeline.statuses.size).to eq(expected_status_size)
end
diff --git a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb b/spec/lib/gitlab/import_export/project_tree_saver_spec.rb
index 5dc372263ad..46fdfba953b 100644
--- a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/project_tree_saver_spec.rb
@@ -119,16 +119,16 @@ describe Gitlab::ImportExport::ProjectTreeSaver do
end
it 'has pipeline stages' do
- expect(saved_project_json.dig('pipelines', 0, 'stages')).not_to be_empty
+ expect(saved_project_json.dig('ci_pipelines', 0, 'stages')).not_to be_empty
end
it 'has pipeline statuses' do
- expect(saved_project_json.dig('pipelines', 0, 'stages', 0, 'statuses')).not_to be_empty
+ expect(saved_project_json.dig('ci_pipelines', 0, 'stages', 0, 'statuses')).not_to be_empty
end
it 'has pipeline builds' do
builds_count = saved_project_json
- .dig('pipelines', 0, 'stages', 0, 'statuses')
+ .dig('ci_pipelines', 0, 'stages', 0, 'statuses')
.count { |hash| hash['type'] == 'Ci::Build' }
expect(builds_count).to eq(1)
@@ -142,11 +142,11 @@ describe Gitlab::ImportExport::ProjectTreeSaver do
end
it 'has pipeline commits' do
- expect(saved_project_json['pipelines']).not_to be_empty
+ expect(saved_project_json['ci_pipelines']).not_to be_empty
end
it 'has ci pipeline notes' do
- expect(saved_project_json['pipelines'].first['notes']).not_to be_empty
+ expect(saved_project_json['ci_pipelines'].first['notes']).not_to be_empty
end
it 'has labels with no associations' do
diff --git a/spec/lib/gitlab/import_export/relation_rename_service_spec.rb b/spec/lib/gitlab/import_export/relation_rename_service_spec.rb
new file mode 100644
index 00000000000..a20a844a492
--- /dev/null
+++ b/spec/lib/gitlab/import_export/relation_rename_service_spec.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::RelationRenameService do
+ let(:renames) do
+ {
+ 'example_relation1' => 'new_example_relation1',
+ 'example_relation2' => 'new_example_relation2'
+ }
+ end
+
+ let(:user) { create(:admin) }
+ let(:group) { create(:group, :nested) }
+ let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') }
+ let(:shared) { project.import_export_shared }
+
+ before do
+ stub_const("#{described_class}::RENAMES", renames)
+ end
+
+ context 'when importing' do
+ let(:project_tree_restorer) { Gitlab::ImportExport::ProjectTreeRestorer.new(user: user, shared: shared, project: project) }
+ let(:import_path) { 'spec/lib/gitlab/import_export' }
+ let(:file_content) { IO.read("#{import_path}/project.json") }
+ let!(:json_file) { ActiveSupport::JSON.decode(file_content) }
+ let(:tree_hash) { project_tree_restorer.instance_variable_get(:@tree_hash) }
+
+ before do
+ allow(shared).to receive(:export_path).and_return(import_path)
+ allow(ActiveSupport::JSON).to receive(:decode).with(file_content).and_return(json_file)
+ end
+
+ context 'when the file has only old relationship names' do
+ # Configuring the json as an old version exported file, with only
+ # the previous association with the old name
+ before do
+ renames.each do |old_name, _|
+ json_file[old_name.to_s] = []
+ end
+ end
+
+ it 'renames old relationships to the new name' do
+ expect(json_file.keys).to include(*renames.keys)
+
+ project_tree_restorer.restore
+
+ expect(json_file.keys).to include(*renames.values)
+ expect(json_file.keys).not_to include(*renames.keys)
+ end
+ end
+
+ context 'when the file has both the old and new relationships' do
+ # Configuring the json as the new version exported file, with both
+ # the old association name and the new one
+ before do
+ renames.each do |old_name, new_name|
+ json_file[old_name.to_s] = [1]
+ json_file[new_name.to_s] = [2]
+ end
+ end
+
+ it 'uses the new relationships and removes the old ones from the hash' do
+ expect(json_file.keys).to include(*renames.keys)
+
+ project_tree_restorer.restore
+
+ expect(json_file.keys).to include(*renames.values)
+ expect(json_file.values_at(*renames.values).flatten.uniq.first).to eq 2
+ expect(json_file.keys).not_to include(*renames.keys)
+ end
+ end
+
+ context 'when the file has only new relationship names' do
+ # Configuring the json as the future version exported file, with only
+ # the new association name
+ before do
+ renames.each do |_, new_name|
+ json_file[new_name.to_s] = []
+ end
+ end
+
+ it 'uses the new relationships' do
+ expect(json_file.keys).not_to include(*renames.keys)
+
+ project_tree_restorer.restore
+
+ expect(json_file.keys).to include(*renames.values)
+ end
+ end
+ end
+
+ context 'when exporting' do
+ let(:project_tree_saver) { Gitlab::ImportExport::ProjectTreeSaver.new(project: project, current_user: user, shared: shared) }
+ let(:project_tree) { project_tree_saver.send(:project_json) }
+
+ it 'adds old relationships to the exported file' do
+ project_tree.merge!(renames.values.map { |new_name| [new_name, []] }.to_h)
+
+ allow(project_tree_saver).to receive(:save) do |arg|
+ project_tree_saver.send(:project_json_tree)
+ end
+
+ result = project_tree_saver.save
+
+ saved_data = ActiveSupport::JSON.decode(result)
+
+ expect(saved_data.keys).to include(*(renames.keys + renames.values))
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index f7935149b23..d3bfde181bc 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -243,6 +243,7 @@ Ci::Pipeline:
- failure_reason
- protected
- iid
+- merge_request_id
Ci::Stage:
- id
- name
diff --git a/spec/lib/gitlab/kubernetes/kube_client_spec.rb b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
index 3979a43216c..8fc85301304 100644
--- a/spec/lib/gitlab/kubernetes/kube_client_spec.rb
+++ b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
@@ -99,6 +99,7 @@ describe Gitlab::Kubernetes::KubeClient do
:create_secret,
:create_service_account,
:update_config_map,
+ :update_secret,
:update_service_account
].each do |method|
describe "##{method}" do
@@ -174,6 +175,84 @@ describe Gitlab::Kubernetes::KubeClient do
end
end
+ shared_examples 'create_or_update method' do
+ let(:get_method) { "get_#{resource_type}" }
+ let(:update_method) { "update_#{resource_type}" }
+ let(:create_method) { "create_#{resource_type}" }
+
+ context 'resource exists' do
+ before do
+ expect(client).to receive(get_method).and_return(resource)
+ end
+
+ it 'calls the update method' do
+ expect(client).to receive(update_method).with(resource)
+
+ subject
+ end
+ end
+
+ context 'resource does not exist' do
+ before do
+ expect(client).to receive(get_method).and_raise(Kubeclient::ResourceNotFoundError.new(404, 'Not found', nil))
+ end
+
+ it 'calls the create method' do
+ expect(client).to receive(create_method).with(resource)
+
+ subject
+ end
+ end
+ end
+
+ describe '#create_or_update_cluster_role_binding' do
+ let(:resource_type) { 'cluster_role_binding' }
+
+ let(:resource) do
+ ::Kubeclient::Resource.new(metadata: { name: 'name', namespace: 'namespace' })
+ end
+
+ subject { client.create_or_update_cluster_role_binding(resource) }
+
+ it_behaves_like 'create_or_update method'
+ end
+
+ describe '#create_or_update_role_binding' do
+ let(:resource_type) { 'role_binding' }
+
+ let(:resource) do
+ ::Kubeclient::Resource.new(metadata: { name: 'name', namespace: 'namespace' })
+ end
+
+ subject { client.create_or_update_role_binding(resource) }
+
+ it_behaves_like 'create_or_update method'
+ end
+
+ describe '#create_or_update_service_account' do
+ let(:resource_type) { 'service_account' }
+
+ let(:resource) do
+ ::Kubeclient::Resource.new(metadata: { name: 'name', namespace: 'namespace' })
+ end
+
+ subject { client.create_or_update_service_account(resource) }
+
+ it_behaves_like 'create_or_update method'
+ end
+
+ describe '#create_or_update_secret' do
+ let(:resource_type) { 'secret' }
+
+ let(:resource) do
+ ::Kubeclient::Resource.new(metadata: { name: 'name', namespace: 'namespace' })
+ end
+
+ subject { client.create_or_update_secret(resource) }
+
+ it_behaves_like 'create_or_update method'
+ end
+
describe 'methods that do not exist on any client' do
it 'throws an error' do
expect { client.non_existent_method }.to raise_error(NoMethodError)
diff --git a/spec/lib/gitlab/kubernetes_spec.rb b/spec/lib/gitlab/kubernetes_spec.rb
index 5c03a2ce7d3..f326d57e9c6 100644
--- a/spec/lib/gitlab/kubernetes_spec.rb
+++ b/spec/lib/gitlab/kubernetes_spec.rb
@@ -48,26 +48,30 @@ describe Gitlab::Kubernetes do
end
describe '#to_kubeconfig' do
+ let(:token) { 'TOKEN' }
+ let(:ca_pem) { 'PEM' }
+
subject do
to_kubeconfig(
url: 'https://kube.domain.com',
namespace: 'NAMESPACE',
- token: 'TOKEN',
- ca_pem: ca_pem)
+ token: token,
+ ca_pem: ca_pem
+ )
end
- context 'when CA PEM is provided' do
- let(:ca_pem) { 'PEM' }
- let(:path) { expand_fixture_path('config/kubeconfig.yml') }
-
- it { is_expected.to eq(YAML.load_file(path)) }
- end
+ it { expect(YAML.safe_load(subject)).to eq(YAML.load_file(expand_fixture_path('config/kubeconfig.yml'))) }
context 'when CA PEM is not provided' do
let(:ca_pem) { nil }
- let(:path) { expand_fixture_path('config/kubeconfig-without-ca.yml') }
- it { is_expected.to eq(YAML.load_file(path)) }
+ it { expect(YAML.safe_load(subject)).to eq(YAML.load_file(expand_fixture_path('config/kubeconfig-without-ca.yml'))) }
+ end
+
+ context 'when token is not provided' do
+ let(:token) { nil }
+
+ it { is_expected.to be_nil }
end
end
diff --git a/spec/lib/gitlab/lfs_token_spec.rb b/spec/lib/gitlab/lfs_token_spec.rb
index 3a20dad16d0..77ee30264bf 100644
--- a/spec/lib/gitlab/lfs_token_spec.rb
+++ b/spec/lib/gitlab/lfs_token_spec.rb
@@ -48,4 +48,59 @@ describe Gitlab::LfsToken do
end
end
end
+
+ describe '#deploy_key_pushable?' do
+ let(:lfs_token) { described_class.new(actor) }
+
+ context 'when actor is not a DeployKey' do
+ let(:actor) { create(:user) }
+ let(:project) { create(:project) }
+
+ it 'returns false' do
+ expect(lfs_token.deploy_key_pushable?(project)).to be_falsey
+ end
+ end
+
+ context 'when actor is a DeployKey' do
+ let(:deploy_keys_project) { create(:deploy_keys_project, can_push: can_push) }
+ let(:project) { deploy_keys_project.project }
+ let(:actor) { deploy_keys_project.deploy_key }
+
+ context 'but the DeployKey cannot push to the project' do
+ let(:can_push) { false }
+
+ it 'returns false' do
+ expect(lfs_token.deploy_key_pushable?(project)).to be_falsey
+ end
+ end
+
+ context 'and the DeployKey can push to the project' do
+ let(:can_push) { true }
+
+ it 'returns true' do
+ expect(lfs_token.deploy_key_pushable?(project)).to be_truthy
+ end
+ end
+ end
+ end
+
+ describe '#type' do
+ let(:lfs_token) { described_class.new(actor) }
+
+ context 'when actor is not a User' do
+ let(:actor) { create(:deploy_key) }
+
+ it 'returns false' do
+ expect(lfs_token.type).to eq(:lfs_deploy_token)
+ end
+ end
+
+ context 'when actor is a User' do
+ let(:actor) { create(:user) }
+
+ it 'returns false' do
+ expect(lfs_token.type).to eq(:lfs_token)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb
index 4a0dc3686ec..6831274d37c 100644
--- a/spec/lib/gitlab/project_search_results_spec.rb
+++ b/spec/lib/gitlab/project_search_results_spec.rb
@@ -54,11 +54,18 @@ describe Gitlab::ProjectSearchResults do
end
it 'finds by name' do
- expect(results.map(&:first)).to include(expected_file_by_name)
+ expect(results.map(&:filename)).to include(expected_file_by_name)
+ end
+
+ it "loads all blobs for filename matches in single batch" do
+ expect(Gitlab::Git::Blob).to receive(:batch).once.and_call_original
+
+ expected = project.repository.search_files_by_name(query, 'master')
+ expect(results.map(&:filename)).to include(*expected)
end
it 'finds by content' do
- blob = results.select { |result| result.first == expected_file_by_content }.flatten.last
+ blob = results.select { |result| result.filename == expected_file_by_content }.flatten.last
expect(blob.filename).to eq(expected_file_by_content)
end
@@ -122,126 +129,6 @@ describe Gitlab::ProjectSearchResults do
let(:blob_type) { 'blobs' }
let(:entity) { project }
end
-
- describe 'parsing results' do
- let(:results) { project.repository.search_files_by_content('feature', 'master') }
- let(:search_result) { results.first }
-
- subject { described_class.parse_search_result(search_result) }
-
- it "returns a valid FoundBlob" do
- is_expected.to be_an Gitlab::SearchResults::FoundBlob
- expect(subject.id).to be_nil
- expect(subject.path).to eq('CHANGELOG')
- expect(subject.filename).to eq('CHANGELOG')
- expect(subject.basename).to eq('CHANGELOG')
- expect(subject.ref).to eq('master')
- expect(subject.startline).to eq(188)
- expect(subject.data.lines[2]).to eq(" - Feature: Replace teams with group membership\n")
- end
-
- context 'when the matching filename contains a colon' do
- let(:search_result) { "master:testdata/project::function1.yaml\x001\x00---\n" }
-
- it 'returns a valid FoundBlob' do
- expect(subject.filename).to eq('testdata/project::function1.yaml')
- expect(subject.basename).to eq('testdata/project::function1')
- expect(subject.ref).to eq('master')
- expect(subject.startline).to eq(1)
- expect(subject.data).to eq("---\n")
- end
- end
-
- context 'when the matching content contains a number surrounded by colons' do
- let(:search_result) { "master:testdata/foo.txt\x001\x00blah:9:blah" }
-
- it 'returns a valid FoundBlob' do
- expect(subject.filename).to eq('testdata/foo.txt')
- expect(subject.basename).to eq('testdata/foo')
- expect(subject.ref).to eq('master')
- expect(subject.startline).to eq(1)
- expect(subject.data).to eq('blah:9:blah')
- end
- end
-
- context 'when the matching content contains multiple null bytes' do
- let(:search_result) { "master:testdata/foo.txt\x001\x00blah\x001\x00foo" }
-
- it 'returns a valid FoundBlob' do
- expect(subject.filename).to eq('testdata/foo.txt')
- expect(subject.basename).to eq('testdata/foo')
- expect(subject.ref).to eq('master')
- expect(subject.startline).to eq(1)
- expect(subject.data).to eq("blah\x001\x00foo")
- end
- end
-
- context 'when the search result ends with an empty line' do
- let(:results) { project.repository.search_files_by_content('Role models', 'master') }
-
- it 'returns a valid FoundBlob that ends with an empty line' do
- expect(subject.filename).to eq('files/markdown/ruby-style-guide.md')
- expect(subject.basename).to eq('files/markdown/ruby-style-guide')
- expect(subject.ref).to eq('master')
- expect(subject.startline).to eq(1)
- expect(subject.data).to eq("# Prelude\n\n> Role models are important. <br/>\n> -- Officer Alex J. Murphy / RoboCop\n\n")
- end
- end
-
- context 'when the search returns non-ASCII data' do
- context 'with UTF-8' do
- let(:results) { project.repository.search_files_by_content('файл', 'master') }
-
- it 'returns results as UTF-8' do
- expect(subject.filename).to eq('encoding/russian.rb')
- expect(subject.basename).to eq('encoding/russian')
- expect(subject.ref).to eq('master')
- expect(subject.startline).to eq(1)
- expect(subject.data).to eq("Хороший файл\n")
- end
- end
-
- context 'with UTF-8 in the filename' do
- let(:results) { project.repository.search_files_by_content('webhook', 'master') }
-
- it 'returns results as UTF-8' do
- expect(subject.filename).to eq('encoding/テスト.txt')
- expect(subject.basename).to eq('encoding/テスト')
- expect(subject.ref).to eq('master')
- expect(subject.startline).to eq(3)
- expect(subject.data).to include('WebHookの確認')
- end
- end
-
- context 'with ISO-8859-1' do
- let(:search_result) { "master:encoding/iso8859.txt\x001\x00\xC4\xFC\nmaster:encoding/iso8859.txt\x002\x00\nmaster:encoding/iso8859.txt\x003\x00foo\n".force_encoding(Encoding::ASCII_8BIT) }
-
- it 'returns results as UTF-8' do
- expect(subject.filename).to eq('encoding/iso8859.txt')
- expect(subject.basename).to eq('encoding/iso8859')
- expect(subject.ref).to eq('master')
- expect(subject.startline).to eq(1)
- expect(subject.data).to eq("Äü\n\nfoo\n")
- end
- end
- end
-
- context "when filename has extension" do
- let(:search_result) { "master:CONTRIBUTE.md\x005\x00- [Contribute to GitLab](#contribute-to-gitlab)\n" }
-
- it { expect(subject.path).to eq('CONTRIBUTE.md') }
- it { expect(subject.filename).to eq('CONTRIBUTE.md') }
- it { expect(subject.basename).to eq('CONTRIBUTE') }
- end
-
- context "when file under directory" do
- let(:search_result) { "master:a/b/c.md\x005\x00a b c\n" }
-
- it { expect(subject.path).to eq('a/b/c.md') }
- it { expect(subject.filename).to eq('a/b/c.md') }
- it { expect(subject.basename).to eq('a/b/c') }
- end
- end
end
describe 'wiki search' do
diff --git a/spec/lib/gitlab/search/found_blob_spec.rb b/spec/lib/gitlab/search/found_blob_spec.rb
new file mode 100644
index 00000000000..74157e5c67c
--- /dev/null
+++ b/spec/lib/gitlab/search/found_blob_spec.rb
@@ -0,0 +1,138 @@
+# coding: utf-8
+
+require 'spec_helper'
+
+describe Gitlab::Search::FoundBlob do
+ describe 'parsing results' do
+ let(:project) { create(:project, :public, :repository) }
+ let(:results) { project.repository.search_files_by_content('feature', 'master') }
+ let(:search_result) { results.first }
+
+ subject { described_class.new(content_match: search_result, project: project) }
+
+ it "returns a valid FoundBlob" do
+ is_expected.to be_an described_class
+ expect(subject.id).to be_nil
+ expect(subject.path).to eq('CHANGELOG')
+ expect(subject.filename).to eq('CHANGELOG')
+ expect(subject.basename).to eq('CHANGELOG')
+ expect(subject.ref).to eq('master')
+ expect(subject.startline).to eq(188)
+ expect(subject.data.lines[2]).to eq(" - Feature: Replace teams with group membership\n")
+ end
+
+ it "doesn't parses content if not needed" do
+ expect(subject).not_to receive(:parse_search_result)
+ expect(subject.project_id).to eq(project.id)
+ expect(subject.binary_filename).to eq('CHANGELOG')
+ end
+
+ it "parses content only once when needed" do
+ expect(subject).to receive(:parse_search_result).once.and_call_original
+ expect(subject.filename).to eq('CHANGELOG')
+ expect(subject.startline).to eq(188)
+ end
+
+ context 'when the matching filename contains a colon' do
+ let(:search_result) { "master:testdata/project::function1.yaml\x001\x00---\n" }
+
+ it 'returns a valid FoundBlob' do
+ expect(subject.filename).to eq('testdata/project::function1.yaml')
+ expect(subject.basename).to eq('testdata/project::function1')
+ expect(subject.ref).to eq('master')
+ expect(subject.startline).to eq(1)
+ expect(subject.data).to eq("---\n")
+ end
+ end
+
+ context 'when the matching content contains a number surrounded by colons' do
+ let(:search_result) { "master:testdata/foo.txt\x001\x00blah:9:blah" }
+
+ it 'returns a valid FoundBlob' do
+ expect(subject.filename).to eq('testdata/foo.txt')
+ expect(subject.basename).to eq('testdata/foo')
+ expect(subject.ref).to eq('master')
+ expect(subject.startline).to eq(1)
+ expect(subject.data).to eq('blah:9:blah')
+ end
+ end
+
+ context 'when the matching content contains multiple null bytes' do
+ let(:search_result) { "master:testdata/foo.txt\x001\x00blah\x001\x00foo" }
+
+ it 'returns a valid FoundBlob' do
+ expect(subject.filename).to eq('testdata/foo.txt')
+ expect(subject.basename).to eq('testdata/foo')
+ expect(subject.ref).to eq('master')
+ expect(subject.startline).to eq(1)
+ expect(subject.data).to eq("blah\x001\x00foo")
+ end
+ end
+
+ context 'when the search result ends with an empty line' do
+ let(:results) { project.repository.search_files_by_content('Role models', 'master') }
+
+ it 'returns a valid FoundBlob that ends with an empty line' do
+ expect(subject.filename).to eq('files/markdown/ruby-style-guide.md')
+ expect(subject.basename).to eq('files/markdown/ruby-style-guide')
+ expect(subject.ref).to eq('master')
+ expect(subject.startline).to eq(1)
+ expect(subject.data).to eq("# Prelude\n\n> Role models are important. <br/>\n> -- Officer Alex J. Murphy / RoboCop\n\n")
+ end
+ end
+
+ context 'when the search returns non-ASCII data' do
+ context 'with UTF-8' do
+ let(:results) { project.repository.search_files_by_content('файл', 'master') }
+
+ it 'returns results as UTF-8' do
+ expect(subject.filename).to eq('encoding/russian.rb')
+ expect(subject.basename).to eq('encoding/russian')
+ expect(subject.ref).to eq('master')
+ expect(subject.startline).to eq(1)
+ expect(subject.data).to eq("Хороший файл\n")
+ end
+ end
+
+ context 'with UTF-8 in the filename' do
+ let(:results) { project.repository.search_files_by_content('webhook', 'master') }
+
+ it 'returns results as UTF-8' do
+ expect(subject.filename).to eq('encoding/テスト.txt')
+ expect(subject.basename).to eq('encoding/テスト')
+ expect(subject.ref).to eq('master')
+ expect(subject.startline).to eq(3)
+ expect(subject.data).to include('WebHookの確認')
+ end
+ end
+
+ context 'with ISO-8859-1' do
+ let(:search_result) { "master:encoding/iso8859.txt\x001\x00\xC4\xFC\nmaster:encoding/iso8859.txt\x002\x00\nmaster:encoding/iso8859.txt\x003\x00foo\n".force_encoding(Encoding::ASCII_8BIT) }
+
+ it 'returns results as UTF-8' do
+ expect(subject.filename).to eq('encoding/iso8859.txt')
+ expect(subject.basename).to eq('encoding/iso8859')
+ expect(subject.ref).to eq('master')
+ expect(subject.startline).to eq(1)
+ expect(subject.data).to eq("Äü\n\nfoo\n")
+ end
+ end
+ end
+
+ context "when filename has extension" do
+ let(:search_result) { "master:CONTRIBUTE.md\x005\x00- [Contribute to GitLab](#contribute-to-gitlab)\n" }
+
+ it { expect(subject.path).to eq('CONTRIBUTE.md') }
+ it { expect(subject.filename).to eq('CONTRIBUTE.md') }
+ it { expect(subject.basename).to eq('CONTRIBUTE') }
+ end
+
+ context "when file under directory" do
+ let(:search_result) { "master:a/b/c.md\x005\x00a b c\n" }
+
+ it { expect(subject.path).to eq('a/b/c.md') }
+ it { expect(subject.filename).to eq('a/b/c.md') }
+ it { expect(subject.basename).to eq('a/b/c') }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 5390f237073..deb19fe1a4b 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -17,6 +17,9 @@ describe Gitlab::UsageData do
gcp_cluster = create(:cluster, :provided_by_gcp)
create(:cluster, :provided_by_user)
create(:cluster, :provided_by_user, :disabled)
+ create(:cluster, :group)
+ create(:cluster, :group, :disabled)
+ create(:cluster, :group, :disabled)
create(:clusters_applications_helm, :installed, cluster: gcp_cluster)
create(:clusters_applications_ingress, :installed, cluster: gcp_cluster)
create(:clusters_applications_cert_managers, :installed, cluster: gcp_cluster)
@@ -77,7 +80,11 @@ describe Gitlab::UsageData do
environments
clusters
clusters_enabled
+ project_clusters_enabled
+ group_clusters_enabled
clusters_disabled
+ project_clusters_disabled
+ group_clusters_disabled
clusters_platforms_gke
clusters_platforms_user
clusters_applications_helm
@@ -127,8 +134,13 @@ describe Gitlab::UsageData do
expect(count_data[:projects_slack_notifications_active]).to eq(2)
expect(count_data[:projects_slack_slash_active]).to eq(1)
- expect(count_data[:clusters_enabled]).to eq(6)
- expect(count_data[:clusters_disabled]).to eq(1)
+ expect(count_data[:clusters_enabled]).to eq(7)
+ expect(count_data[:project_clusters_enabled]).to eq(6)
+ expect(count_data[:group_clusters_enabled]).to eq(1)
+ expect(count_data[:clusters_disabled]).to eq(3)
+ expect(count_data[:project_clusters_disabled]).to eq(1)
+ expect(count_data[:group_clusters_disabled]).to eq(2)
+ expect(count_data[:group_clusters_enabled]).to eq(1)
expect(count_data[:clusters_platforms_gke]).to eq(1)
expect(count_data[:clusters_platforms_user]).to eq(1)
expect(count_data[:clusters_applications_helm]).to eq(1)
@@ -201,4 +213,29 @@ describe Gitlab::UsageData do
expect(described_class.count(relation, fallback: 15)).to eq(15)
end
end
+
+ describe '#approximate_counts' do
+ it 'gets approximate counts for selected models' do
+ create(:label)
+
+ expect(Gitlab::Database::Count).to receive(:approximate_counts)
+ .with(described_class::APPROXIMATE_COUNT_MODELS).once.and_call_original
+
+ counts = described_class.approximate_counts.values
+
+ expect(counts.count).to eq(described_class::APPROXIMATE_COUNT_MODELS.count)
+ expect(counts.any? { |count| count < 0 }).to be_falsey
+ end
+
+ it 'returns default values if counts can not be retrieved' do
+ described_class::APPROXIMATE_COUNT_MODELS.map do |model|
+ model.name.underscore.pluralize.to_sym
+ end
+
+ expect(Gitlab::Database::Count).to receive(:approximate_counts)
+ .and_return({})
+
+ expect(described_class.approximate_counts.values.uniq).to eq([-1])
+ end
+ end
end
diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb
index fae32cff781..47a5fd0bdb4 100644
--- a/spec/lib/gitlab/utils_spec.rb
+++ b/spec/lib/gitlab/utils_spec.rb
@@ -153,4 +153,42 @@ describe Gitlab::Utils do
end
end
end
+
+ describe '.ensure_utf8_size' do
+ context 'string is has less bytes than expected' do
+ it 'backfills string with null characters' do
+ transformed = described_class.ensure_utf8_size('a' * 10, bytes: 32)
+
+ expect(transformed.bytesize).to eq 32
+ expect(transformed).to eq(('a' * 10) + ('0' * 22))
+ end
+ end
+
+ context 'string size is exactly the one that is expected' do
+ it 'returns original value' do
+ transformed = described_class.ensure_utf8_size('a' * 32, bytes: 32)
+
+ expect(transformed).to eq 'a' * 32
+ expect(transformed.bytesize).to eq 32
+ end
+ end
+
+ context 'when string contains a few multi-byte UTF characters' do
+ it 'backfills string with null characters' do
+ transformed = described_class.ensure_utf8_size('❤' * 6, bytes: 32)
+
+ expect(transformed).to eq '❤❤❤❤❤❤' + ('0' * 14)
+ expect(transformed.bytesize).to eq 32
+ end
+ end
+
+ context 'when string has multiple multi-byte UTF chars exceeding 32 bytes' do
+ it 'truncates string to 32 characters and backfills it if needed' do
+ transformed = described_class.ensure_utf8_size('❤' * 18, bytes: 32)
+
+ expect(transformed).to eq(('❤' * 10) + ('0' * 2))
+ expect(transformed.bytesize).to eq 32
+ end
+ end
+ end
end
diff --git a/spec/lib/omni_auth/strategies/jwt_spec.rb b/spec/lib/omni_auth/strategies/jwt_spec.rb
index 88d6d0b559a..c2e2db27362 100644
--- a/spec/lib/omni_auth/strategies/jwt_spec.rb
+++ b/spec/lib/omni_auth/strategies/jwt_spec.rb
@@ -4,12 +4,10 @@ describe OmniAuth::Strategies::Jwt do
include Rack::Test::Methods
include DeviseHelpers
- context '.decoded' do
- let(:strategy) { described_class.new({}) }
+ context '#decoded' do
+ subject { described_class.new({}) }
let(:timestamp) { Time.now.to_i }
let(:jwt_config) { Devise.omniauth_configs[:jwt] }
- let(:key) { JWT.encode(claims, jwt_config.strategy.secret) }
-
let(:claims) do
{
id: 123,
@@ -18,19 +16,55 @@ describe OmniAuth::Strategies::Jwt do
iat: timestamp
}
end
+ let(:algorithm) { 'HS256' }
+ let(:secret) { jwt_config.strategy.secret }
+ let(:private_key) { secret }
+ let(:payload) { JWT.encode(claims, private_key, algorithm) }
before do
- allow_any_instance_of(OmniAuth::Strategy).to receive(:options).and_return(jwt_config.strategy)
- allow_any_instance_of(Rack::Request).to receive(:params).and_return({ 'jwt' => key })
+ subject.options[:secret] = secret
+ subject.options[:algorithm] = algorithm
+
+ expect_next_instance_of(Rack::Request) do |rack_request|
+ expect(rack_request).to receive(:params).and_return('jwt' => payload)
+ end
end
- it 'decodes the user information' do
- result = strategy.decoded
+ ECDSA_NAMED_CURVES = {
+ 'ES256' => 'prime256v1',
+ 'ES384' => 'secp384r1',
+ 'ES512' => 'secp521r1'
+ }.freeze
- expect(result["id"]).to eq(123)
- expect(result["name"]).to eq("user_example")
- expect(result["email"]).to eq("user@example.com")
- expect(result["iat"]).to eq(timestamp)
+ {
+ OpenSSL::PKey::RSA => %w[RS256 RS384 RS512],
+ OpenSSL::PKey::EC => %w[ES256 ES384 ES512],
+ String => %w[HS256 HS384 HS512]
+ }.each do |private_key_class, algorithms|
+ algorithms.each do |algorithm|
+ context "when the #{algorithm} algorithm is used" do
+ let(:algorithm) { algorithm }
+ let(:secret) do
+ if private_key_class == OpenSSL::PKey::RSA
+ private_key_class.generate(2048)
+ .to_pem
+ elsif private_key_class == OpenSSL::PKey::EC
+ private_key_class.new(ECDSA_NAMED_CURVES[algorithm])
+ .tap { |key| key.generate_key! }
+ .to_pem
+ else
+ private_key_class.new(jwt_config.strategy.secret)
+ end
+ end
+ let(:private_key) { private_key_class ? private_key_class.new(secret) : secret }
+
+ it 'decodes the user information' do
+ result = subject.decoded
+
+ expect(result).to eq(claims.stringify_keys)
+ end
+ end
+ end
end
context 'required claims is missing' do
@@ -43,7 +77,7 @@ describe OmniAuth::Strategies::Jwt do
end
it 'raises error' do
- expect { strategy.decoded }.to raise_error(OmniAuth::Strategies::Jwt::ClaimInvalid)
+ expect { subject.decoded }.to raise_error(OmniAuth::Strategies::Jwt::ClaimInvalid)
end
end
@@ -57,11 +91,12 @@ describe OmniAuth::Strategies::Jwt do
end
before do
- jwt_config.strategy.valid_within = Time.now.to_i
+ # Omniauth config values are always strings!
+ subject.options[:valid_within] = 2.days.to_s
end
it 'raises error' do
- expect { strategy.decoded }.to raise_error(OmniAuth::Strategies::Jwt::ClaimInvalid)
+ expect { subject.decoded }.to raise_error(OmniAuth::Strategies::Jwt::ClaimInvalid)
end
end
@@ -76,11 +111,12 @@ describe OmniAuth::Strategies::Jwt do
end
before do
- jwt_config.strategy.valid_within = 2.seconds
+ # Omniauth config values are always strings!
+ subject.options[:valid_within] = 2.seconds.to_s
end
it 'raises error' do
- expect { strategy.decoded }.to raise_error(OmniAuth::Strategies::Jwt::ClaimInvalid)
+ expect { subject.decoded }.to raise_error(OmniAuth::Strategies::Jwt::ClaimInvalid)
end
end
end
diff --git a/spec/migrations/schedule_runners_token_encryption_spec.rb b/spec/migrations/schedule_runners_token_encryption_spec.rb
new file mode 100644
index 00000000000..376d2795277
--- /dev/null
+++ b/spec/migrations/schedule_runners_token_encryption_spec.rb
@@ -0,0 +1,38 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20181121111200_schedule_runners_token_encryption')
+
+describe ScheduleRunnersTokenEncryption, :migration do
+ let(:settings) { table(:application_settings) }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:runners) { table(:ci_runners) }
+
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 1)
+
+ settings.create!(id: 1, runners_registration_token: 'plain-text-token1')
+ namespaces.create!(id: 11, name: 'gitlab', path: 'gitlab-org', runners_token: 'my-token1')
+ namespaces.create!(id: 12, name: 'gitlab', path: 'gitlab-org', runners_token: 'my-token2')
+ projects.create!(id: 111, namespace_id: 11, name: 'gitlab', path: 'gitlab-ce', runners_token: 'my-token1')
+ projects.create!(id: 114, namespace_id: 11, name: 'gitlab', path: 'gitlab-ce', runners_token: 'my-token2')
+ runners.create!(id: 201, runner_type: 1, token: 'plain-text-token1')
+ runners.create!(id: 202, runner_type: 1, token: 'plain-text-token2')
+ end
+
+ it 'schedules runners token encryption migration for multiple resources' do
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, 'settings', 1, 1)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, 'namespace', 11, 11)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(8.minutes, 'namespace', 12, 12)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, 'project', 111, 111)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(8.minutes, 'project', 114, 114)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, 'runner', 201, 201)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(8.minutes, 'runner', 202, 202)
+ expect(BackgroundMigrationWorker.jobs.size).to eq 7
+ end
+ end
+ end
+end
diff --git a/spec/models/blob_spec.rb b/spec/models/blob_spec.rb
index ed93f94d893..e8c03b587e2 100644
--- a/spec/models/blob_spec.rb
+++ b/spec/models/blob_spec.rb
@@ -18,6 +18,7 @@ describe Blob do
describe '.lazy' do
let(:project) { create(:project, :repository) }
+ let(:same_project) { Project.find(project.id) }
let(:other_project) { create(:project, :repository) }
let(:commit_id) { 'e63f41fe459e62e1228fcef60d7189127aeba95a' }
@@ -32,7 +33,7 @@ describe Blob do
expect(other_project.repository).not_to receive(:blobs_at)
changelog = described_class.lazy(project, commit_id, 'CHANGELOG')
- contributing = described_class.lazy(project, commit_id, 'CONTRIBUTING.md')
+ contributing = described_class.lazy(same_project, commit_id, 'CONTRIBUTING.md')
described_class.lazy(other_project, commit_id, 'CHANGELOG')
diff --git a/spec/models/ci/build_metadata_spec.rb b/spec/models/ci/build_metadata_spec.rb
index 6dba132184c..519968b9e48 100644
--- a/spec/models/ci/build_metadata_spec.rb
+++ b/spec/models/ci/build_metadata_spec.rb
@@ -15,6 +15,8 @@ describe Ci::BuildMetadata do
let(:build) { create(:ci_build, pipeline: pipeline) }
let(:build_metadata) { build.metadata }
+ it_behaves_like 'having unique enum values'
+
describe '#update_timeout_state' do
subject { build_metadata }
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index d02c3a5765f..89f78f629d4 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -769,33 +769,15 @@ describe Ci::Build do
let(:subject) { build.hide_secrets(data) }
context 'hide runners token' do
- let(:data) { 'new token data'}
+ let(:data) { "new #{project.runners_token} data"}
- before do
- build.project.update(runners_token: 'token')
- end
-
- it { is_expected.to eq('new xxxxx data') }
+ it { is_expected.to match(/^new x+ data$/) }
end
context 'hide build token' do
- let(:data) { 'new token data'}
-
- before do
- build.update(token: 'token')
- end
-
- it { is_expected.to eq('new xxxxx data') }
- end
-
- context 'hide build token' do
- let(:data) { 'new token data'}
-
- before do
- build.update(token: 'token')
- end
+ let(:data) { "new #{build.token} data"}
- it { is_expected.to eq('new xxxxx data') }
+ it { is_expected.to match(/^new x+ data$/) }
end
end
@@ -1943,7 +1925,7 @@ describe Ci::Build do
context 'when token is empty' do
before do
- build.token = nil
+ build.update_columns(token: nil, token_encrypted: nil)
end
it { is_expected.to be_nil}
@@ -2159,7 +2141,7 @@ describe Ci::Build do
end
before do
- build.token = 'my-token'
+ build.set_token('my-token')
build.yaml_variables = []
end
diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb
index 859287bb0c8..d214fdf369a 100644
--- a/spec/models/ci/build_trace_chunk_spec.rb
+++ b/spec/models/ci/build_trace_chunk_spec.rb
@@ -12,6 +12,8 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
described_class.new(build: build, chunk_index: chunk_index, data_store: data_store, raw_data: raw_data)
end
+ it_behaves_like 'having unique enum values'
+
before do
stub_feature_flags(ci_enable_live_trace: true)
stub_artifacts_object_storage
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index fb5bec4108a..c68ba02b8de 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -15,6 +15,8 @@ describe Ci::JobArtifact do
it { is_expected.to delegate_method(:open).to(:file) }
it { is_expected.to delegate_method(:exists?).to(:file) }
+ it_behaves_like 'having unique enum values'
+
describe '.test_reports' do
subject { described_class.test_reports }
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 9e6146b8a44..b67c6a4cffa 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -8,10 +8,13 @@ describe Ci::Pipeline, :mailer do
create(:ci_empty_pipeline, status: :created, project: project)
end
+ it_behaves_like 'having unique enum values'
+
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:user) }
it { is_expected.to belong_to(:auto_canceled_by) }
it { is_expected.to belong_to(:pipeline_schedule) }
+ it { is_expected.to belong_to(:merge_request) }
it { is_expected.to have_many(:statuses) }
it { is_expected.to have_many(:trigger_requests) }
@@ -30,8 +33,131 @@ describe Ci::Pipeline, :mailer do
describe 'associations' do
it 'has a bidirectional relationship with projects' do
- expect(described_class.reflect_on_association(:project).has_inverse?).to eq(:pipelines)
- expect(Project.reflect_on_association(:pipelines).has_inverse?).to eq(:project)
+ expect(described_class.reflect_on_association(:project).has_inverse?).to eq(:all_pipelines)
+ expect(Project.reflect_on_association(:all_pipelines).has_inverse?).to eq(:project)
+ expect(Project.reflect_on_association(:ci_pipelines).has_inverse?).to eq(:project)
+ end
+ end
+
+ describe '.sort_by_merge_request_pipelines' do
+ subject { described_class.sort_by_merge_request_pipelines }
+
+ context 'when branch pipelines exist' do
+ let!(:branch_pipeline_1) { create(:ci_pipeline, source: :push) }
+ let!(:branch_pipeline_2) { create(:ci_pipeline, source: :push) }
+
+ it 'returns pipelines order by id' do
+ expect(subject).to eq([branch_pipeline_2,
+ branch_pipeline_1])
+ end
+ end
+
+ context 'when merge request pipelines exist' do
+ let!(:merge_request_pipeline_1) do
+ create(:ci_pipeline, source: :merge_request, merge_request: merge_request)
+ end
+
+ let!(:merge_request_pipeline_2) do
+ create(:ci_pipeline, source: :merge_request, merge_request: merge_request)
+ end
+
+ let(:merge_request) do
+ create(:merge_request,
+ source_project: project,
+ source_branch: 'feature',
+ target_project: project,
+ target_branch: 'master')
+ end
+
+ it 'returns pipelines order by id' do
+ expect(subject).to eq([merge_request_pipeline_2,
+ merge_request_pipeline_1])
+ end
+ end
+
+ context 'when both branch pipeline and merge request pipeline exist' do
+ let!(:branch_pipeline_1) { create(:ci_pipeline, source: :push) }
+ let!(:branch_pipeline_2) { create(:ci_pipeline, source: :push) }
+
+ let!(:merge_request_pipeline_1) do
+ create(:ci_pipeline, source: :merge_request, merge_request: merge_request)
+ end
+
+ let!(:merge_request_pipeline_2) do
+ create(:ci_pipeline, source: :merge_request, merge_request: merge_request)
+ end
+
+ let(:merge_request) do
+ create(:merge_request,
+ source_project: project,
+ source_branch: 'feature',
+ target_project: project,
+ target_branch: 'master')
+ end
+
+ it 'returns merge request pipeline first' do
+ expect(subject).to eq([merge_request_pipeline_2,
+ merge_request_pipeline_1,
+ branch_pipeline_2,
+ branch_pipeline_1])
+ end
+ end
+ end
+
+ describe '.merge_request' do
+ subject { described_class.merge_request }
+
+ context 'when there is a merge request pipeline' do
+ let!(:pipeline) { create(:ci_pipeline, source: :merge_request, merge_request: merge_request) }
+ let(:merge_request) { create(:merge_request) }
+
+ it 'returns merge request pipeline first' do
+ expect(subject).to eq([pipeline])
+ end
+ end
+
+ context 'when there are no merge request pipelines' do
+ let!(:pipeline) { create(:ci_pipeline, source: :push) }
+
+ it 'returns empty array' do
+ expect(subject).to be_empty
+ end
+ end
+ end
+
+ describe 'Validations for merge request pipelines' do
+ let(:pipeline) { build(:ci_pipeline, source: source, merge_request: merge_request) }
+
+ context 'when source is merge request' do
+ let(:source) { :merge_request }
+
+ context 'when merge request is specified' do
+ let(:merge_request) { create(:merge_request, source_project: project, source_branch: 'feature', target_project: project, target_branch: 'master') }
+
+ it { expect(pipeline).to be_valid }
+ end
+
+ context 'when merge request is empty' do
+ let(:merge_request) { nil }
+
+ it { expect(pipeline).not_to be_valid }
+ end
+ end
+
+ context 'when source is web' do
+ let(:source) { :web }
+
+ context 'when merge request is specified' do
+ let(:merge_request) { create(:merge_request, source_project: project, source_branch: 'feature', target_project: project, target_branch: 'master') }
+
+ it { expect(pipeline).not_to be_valid }
+ end
+
+ context 'when merge request is empty' do
+ let(:merge_request) { nil }
+
+ it { expect(pipeline).to be_valid }
+ end
end
end
@@ -224,6 +350,50 @@ describe Ci::Pipeline, :mailer do
CI_COMMIT_TITLE
CI_COMMIT_DESCRIPTION]
end
+
+ context 'when source is merge request' do
+ let(:pipeline) do
+ create(:ci_pipeline, source: :merge_request, merge_request: merge_request)
+ end
+
+ let(:merge_request) do
+ create(:merge_request,
+ source_project: project,
+ source_branch: 'feature',
+ target_project: project,
+ target_branch: 'master')
+ end
+
+ it 'exposes merge request pipeline variables' do
+ expect(subject.to_hash)
+ .to include(
+ 'CI_MERGE_REQUEST_ID' => merge_request.id.to_s,
+ 'CI_MERGE_REQUEST_IID' => merge_request.iid.to_s,
+ 'CI_MERGE_REQUEST_REF_PATH' => merge_request.ref_path.to_s,
+ 'CI_MERGE_REQUEST_PROJECT_ID' => merge_request.project.id.to_s,
+ 'CI_MERGE_REQUEST_PROJECT_PATH' => merge_request.project.full_path,
+ 'CI_MERGE_REQUEST_PROJECT_URL' => merge_request.project.web_url,
+ 'CI_MERGE_REQUEST_TARGET_BRANCH_NAME' => merge_request.target_branch.to_s,
+ 'CI_MERGE_REQUEST_SOURCE_PROJECT_ID' => merge_request.source_project.id.to_s,
+ 'CI_MERGE_REQUEST_SOURCE_PROJECT_PATH' => merge_request.source_project.full_path,
+ 'CI_MERGE_REQUEST_SOURCE_PROJECT_URL' => merge_request.source_project.web_url,
+ 'CI_MERGE_REQUEST_SOURCE_BRANCH_NAME' => merge_request.source_branch.to_s)
+ end
+
+ context 'when source project does not exist' do
+ before do
+ merge_request.update_column(:source_project_id, nil)
+ end
+
+ it 'does not expose source project related variables' do
+ expect(subject.to_hash.keys).not_to include(
+ %w[CI_MERGE_REQUEST_SOURCE_PROJECT_ID
+ CI_MERGE_REQUEST_SOURCE_PROJECT_PATH
+ CI_MERGE_REQUEST_SOURCE_PROJECT_URL
+ CI_MERGE_REQUEST_SOURCE_BRANCH_NAME])
+ end
+ end
+ end
end
describe '#protected_ref?' do
@@ -758,27 +928,85 @@ describe Ci::Pipeline, :mailer do
describe '#branch?' do
subject { pipeline.branch? }
- context 'is not a tag' do
+ context 'when ref is not a tag' do
before do
pipeline.tag = false
end
- it 'return true when tag is set to false' do
+ it 'return true' do
is_expected.to be_truthy
end
+
+ context 'when source is merge request' do
+ let(:pipeline) do
+ create(:ci_pipeline, source: :merge_request, merge_request: merge_request)
+ end
+
+ let(:merge_request) do
+ create(:merge_request,
+ source_project: project,
+ source_branch: 'feature',
+ target_project: project,
+ target_branch: 'master')
+ end
+
+ it 'returns false' do
+ is_expected.to be_falsey
+ end
+ end
end
- context 'is not a tag' do
+ context 'when ref is a tag' do
before do
pipeline.tag = true
end
- it 'return false when tag is set to true' do
+ it 'return false' do
is_expected.to be_falsey
end
end
end
+ describe '#git_ref' do
+ subject { pipeline.send(:git_ref) }
+
+ context 'when ref is branch' do
+ let(:pipeline) { create(:ci_pipeline, tag: false) }
+
+ it 'returns branch ref' do
+ is_expected.to eq(Gitlab::Git::BRANCH_REF_PREFIX + pipeline.ref.to_s)
+ end
+ end
+
+ context 'when ref is tag' do
+ let(:pipeline) { create(:ci_pipeline, tag: true) }
+
+ it 'returns branch ref' do
+ is_expected.to eq(Gitlab::Git::TAG_REF_PREFIX + pipeline.ref.to_s)
+ end
+ end
+
+ context 'when ref is merge request' do
+ let(:pipeline) do
+ create(:ci_pipeline,
+ source: :merge_request,
+ merge_request: merge_request)
+ end
+
+ let(:merge_request) do
+ create(:merge_request,
+ source_project: project,
+ source_branch: 'feature',
+ target_project: project,
+ target_branch: 'master')
+ end
+
+ it 'returns branch ref' do
+ is_expected.to eq(Gitlab::Git::BRANCH_REF_PREFIX + pipeline.ref.to_s)
+ end
+ end
+ end
+
describe 'ref_exists?' do
context 'when repository exists' do
using RSpec::Parameterized::TableSyntax
@@ -1003,7 +1231,7 @@ describe Ci::Pipeline, :mailer do
create(:ci_build, :allowed_to_fail, :failed, pipeline: pipeline, name: 'rubocop')
create(:ci_build, :allowed_to_fail, :failed, pipeline: pipeline2, name: 'rubocop')
- pipelines = project.pipelines.to_a
+ pipelines = project.ci_pipelines.to_a
pipelines.each(&:number_of_warnings)
@@ -1247,22 +1475,40 @@ describe Ci::Pipeline, :mailer do
describe '#ci_yaml_file_path' do
subject { pipeline.ci_yaml_file_path }
- it 'returns the path from project' do
- allow(pipeline.project).to receive(:ci_config_path) { 'custom/path' }
+ %i[unknown_source repository_source].each do |source|
+ context source.to_s do
+ before do
+ pipeline.config_source = described_class.config_sources.fetch(source)
+ end
- is_expected.to eq('custom/path')
- end
+ it 'returns the path from project' do
+ allow(pipeline.project).to receive(:ci_config_path) { 'custom/path' }
+
+ is_expected.to eq('custom/path')
+ end
+
+ it 'returns default when custom path is nil' do
+ allow(pipeline.project).to receive(:ci_config_path) { nil }
+
+ is_expected.to eq('.gitlab-ci.yml')
+ end
- it 'returns default when custom path is nil' do
- allow(pipeline.project).to receive(:ci_config_path) { nil }
+ it 'returns default when custom path is empty' do
+ allow(pipeline.project).to receive(:ci_config_path) { '' }
- is_expected.to eq('.gitlab-ci.yml')
+ is_expected.to eq('.gitlab-ci.yml')
+ end
+ end
end
- it 'returns default when custom path is empty' do
- allow(pipeline.project).to receive(:ci_config_path) { '' }
+ context 'when pipeline is for auto-devops' do
+ before do
+ pipeline.config_source = 'auto_devops_source'
+ end
- is_expected.to eq('.gitlab-ci.yml')
+ it 'does not return config file' do
+ is_expected.to be_nil
+ end
end
end
@@ -1835,6 +2081,55 @@ describe Ci::Pipeline, :mailer do
expect(pipeline.all_merge_requests).to be_empty
end
+
+ context 'when there is a merge request pipeline' do
+ let(:source_branch) { 'feature' }
+ let(:target_branch) { 'master' }
+
+ let!(:pipeline) do
+ create(:ci_pipeline,
+ source: :merge_request,
+ project: project,
+ ref: source_branch,
+ merge_request: merge_request)
+ end
+
+ let(:merge_request) do
+ create(:merge_request,
+ source_project: project,
+ source_branch: source_branch,
+ target_project: project,
+ target_branch: target_branch)
+ end
+
+ it 'returns an associated merge request' do
+ expect(pipeline.all_merge_requests).to eq([merge_request])
+ end
+
+ context 'when there is another merge request pipeline that targets a different branch' do
+ let(:target_branch_2) { 'merge-test' }
+
+ let!(:pipeline_2) do
+ create(:ci_pipeline,
+ source: :merge_request,
+ project: project,
+ ref: source_branch,
+ merge_request: merge_request_2)
+ end
+
+ let(:merge_request_2) do
+ create(:merge_request,
+ source_project: project,
+ source_branch: source_branch,
+ target_project: project,
+ target_branch: target_branch_2)
+ end
+
+ it 'does not return an associated merge request' do
+ expect(pipeline.all_merge_requests).not_to include(merge_request_2)
+ end
+ end
+ end
end
describe '#stuck?' do
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index b545e036aa1..ad79f8d4ce0 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe Ci::Runner do
+ it_behaves_like 'having unique enum values'
+
describe 'validation' do
it { is_expected.to validate_presence_of(:access_level) }
it { is_expected.to validate_presence_of(:runner_type) }
diff --git a/spec/models/ci/stage_spec.rb b/spec/models/ci/stage_spec.rb
index 5076f7faeac..3228c400155 100644
--- a/spec/models/ci/stage_spec.rb
+++ b/spec/models/ci/stage_spec.rb
@@ -3,6 +3,8 @@ require 'spec_helper'
describe Ci::Stage, :models do
let(:stage) { create(:ci_stage_entity) }
+ it_behaves_like 'having unique enum values'
+
describe 'associations' do
before do
create(:ci_build, stage_id: stage.id)
diff --git a/spec/models/clusters/applications/ingress_spec.rb b/spec/models/clusters/applications/ingress_spec.rb
index cfe0e216c78..cd28f1fe9c6 100644
--- a/spec/models/clusters/applications/ingress_spec.rb
+++ b/spec/models/clusters/applications/ingress_spec.rb
@@ -3,6 +3,8 @@ require 'rails_helper'
describe Clusters::Applications::Ingress do
let(:ingress) { create(:clusters_applications_ingress) }
+ it_behaves_like 'having unique enum values'
+
include_examples 'cluster application core specs', :clusters_applications_ingress
include_examples 'cluster application status specs', :clusters_applications_ingress
include_examples 'cluster application helm specs', :clusters_applications_ingress
diff --git a/spec/models/clusters/applications/knative_spec.rb b/spec/models/clusters/applications/knative_spec.rb
index d43d88c2924..a1579b90436 100644
--- a/spec/models/clusters/applications/knative_spec.rb
+++ b/spec/models/clusters/applications/knative_spec.rb
@@ -1,6 +1,9 @@
require 'rails_helper'
describe Clusters::Applications::Knative do
+ include KubernetesHelpers
+ include ReactiveCachingHelpers
+
let(:knative) { create(:clusters_applications_knative) }
include_examples 'cluster application core specs', :clusters_applications_knative
@@ -121,4 +124,43 @@ describe Clusters::Applications::Knative do
describe 'validations' do
it { is_expected.to validate_presence_of(:hostname) }
end
+
+ describe '#services' do
+ let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:service) { cluster.platform_kubernetes }
+ let(:knative) { create(:clusters_applications_knative, cluster: cluster) }
+
+ let(:namespace) do
+ create(:cluster_kubernetes_namespace,
+ cluster: cluster,
+ cluster_project: cluster.cluster_project,
+ project: cluster.cluster_project.project)
+ end
+
+ subject { knative.services }
+
+ before do
+ stub_kubeclient_discover(service.api_url)
+ stub_kubeclient_knative_services
+ end
+
+ it 'should have an unintialized cache' do
+ is_expected.to be_nil
+ end
+
+ context 'when using synchronous reactive cache' do
+ before do
+ stub_reactive_cache(knative, services: kube_response(kube_knative_services_body))
+ synchronous_reactive_cache(knative)
+ end
+
+ it 'should have cached services' do
+ is_expected.not_to be_nil
+ end
+
+ it 'should match our namespace' do
+ expect(knative.services_for(ns: namespace)).not_to be_nil
+ end
+ end
+ end
end
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index eb68ebccdcb..840f74c9890 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
describe Clusters::Cluster do
+ it_behaves_like 'having unique enum values'
+
it { is_expected.to belong_to(:user) }
it { is_expected.to have_many(:cluster_projects) }
it { is_expected.to have_many(:projects) }
@@ -90,6 +92,26 @@ describe Clusters::Cluster do
it { is_expected.to contain_exactly(cluster) }
end
+ describe '.missing_kubernetes_namespace' do
+ let!(:cluster) { create(:cluster, :provided_by_gcp, :project) }
+ let(:project) { cluster.project }
+ let(:kubernetes_namespaces) { project.kubernetes_namespaces }
+
+ subject do
+ described_class.joins(:projects).where(projects: { id: project.id }).missing_kubernetes_namespace(kubernetes_namespaces)
+ end
+
+ it { is_expected.to contain_exactly(cluster) }
+
+ context 'kubernetes namespace exists' do
+ before do
+ create(:cluster_kubernetes_namespace, project: project, cluster: cluster)
+ end
+
+ it { is_expected.to be_empty }
+ end
+ end
+
describe 'validation' do
subject { cluster.valid? }
@@ -231,6 +253,81 @@ describe Clusters::Cluster do
end
end
+ describe '.ancestor_clusters_for_clusterable' do
+ let(:group_cluster) { create(:cluster, :provided_by_gcp, :group) }
+ let(:group) { group_cluster.group }
+ let(:hierarchy_order) { :desc }
+ let(:clusterable) { project }
+
+ subject do
+ described_class.ancestor_clusters_for_clusterable(clusterable, hierarchy_order: hierarchy_order)
+ end
+
+ context 'when project does not belong to this group' do
+ let(:project) { create(:project, group: create(:group)) }
+
+ it 'returns nothing' do
+ is_expected.to be_empty
+ end
+ end
+
+ context 'when group has a configured kubernetes cluster' do
+ let(:project) { create(:project, group: group) }
+
+ it 'returns the group cluster' do
+ is_expected.to eq([group_cluster])
+ end
+ end
+
+ context 'when sub-group has configured kubernetes cluster', :nested_groups do
+ let(:sub_group_cluster) { create(:cluster, :provided_by_gcp, :group) }
+ let(:sub_group) { sub_group_cluster.group }
+ let(:project) { create(:project, group: sub_group) }
+
+ before do
+ sub_group.update!(parent: group)
+ end
+
+ it 'returns clusters in order, descending the hierachy' do
+ is_expected.to eq([group_cluster, sub_group_cluster])
+ end
+
+ it 'avoids N+1 queries' do
+ another_project = create(:project)
+ control_count = ActiveRecord::QueryRecorder.new do
+ described_class.ancestor_clusters_for_clusterable(another_project, hierarchy_order: hierarchy_order)
+ end.count
+
+ cluster2 = create(:cluster, :provided_by_gcp, :group)
+ child2 = cluster2.group
+ child2.update!(parent: sub_group)
+ project = create(:project, group: child2)
+
+ expect do
+ described_class.ancestor_clusters_for_clusterable(project, hierarchy_order: hierarchy_order)
+ end.not_to exceed_query_limit(control_count)
+ end
+
+ context 'for a group' do
+ let(:clusterable) { sub_group }
+
+ it 'returns clusters in order for a group' do
+ is_expected.to eq([group_cluster])
+ end
+ end
+ end
+
+ context 'scope chaining' do
+ let(:project) { create(:project, group: group) }
+
+ subject { described_class.none.ancestor_clusters_for_clusterable(project) }
+
+ it 'returns nothing' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
describe '#provider' do
subject { cluster.provider }
@@ -263,6 +360,31 @@ describe Clusters::Cluster do
end
end
+ describe '#all_projects' do
+ let(:project) { create(:project) }
+ let(:cluster) { create(:cluster, projects: [project]) }
+
+ subject { cluster.all_projects }
+
+ context 'project cluster' do
+ it 'returns project' do
+ is_expected.to eq([project])
+ end
+ end
+
+ context 'group cluster' do
+ let(:cluster) { create(:cluster, :group) }
+ let(:group) { cluster.group }
+ let(:project) { create(:project, group: group) }
+ let(:subgroup) { create(:group, parent: group) }
+ let(:subproject) { create(:project, group: subgroup) }
+
+ it 'returns all projects for group' do
+ is_expected.to contain_exactly(project, subproject)
+ end
+ end
+ end
+
describe '#first_project' do
subject { cluster.first_project }
diff --git a/spec/models/clusters/platforms/kubernetes_spec.rb b/spec/models/clusters/platforms/kubernetes_spec.rb
index 99fd6ccc4d8..062d2fd0768 100644
--- a/spec/models/clusters/platforms/kubernetes_spec.rb
+++ b/spec/models/clusters/platforms/kubernetes_spec.rb
@@ -18,6 +18,8 @@ describe Clusters::Platforms::Kubernetes, :use_clean_rails_memory_store_caching
it { is_expected.to delegate_method(:managed?).to(:cluster) }
it { is_expected.to delegate_method(:kubernetes_namespace).to(:cluster) }
+ it_behaves_like 'having unique enum values'
+
describe 'before_validation' do
context 'when namespace includes upper case' do
let(:kubernetes) { create(:cluster_platform_kubernetes, :configured, namespace: namespace) }
@@ -273,6 +275,36 @@ describe Clusters::Platforms::Kubernetes, :use_clean_rails_memory_store_caching
)
end
end
+
+ context 'group level cluster' do
+ let!(:cluster) { create(:cluster, :group, platform_kubernetes: kubernetes) }
+
+ let(:project) { create(:project, group: cluster.group) }
+
+ subject { kubernetes.predefined_variables(project: project) }
+
+ context 'no kubernetes namespace for the project' do
+ it_behaves_like 'setting variables'
+
+ it 'does not return KUBE_TOKEN' do
+ expect(subject).not_to include(
+ { key: 'KUBE_TOKEN', value: kubernetes.token, public: false }
+ )
+ end
+ end
+
+ context 'kubernetes namespace exists for the project' do
+ let!(:kubernetes_namespace) { create(:cluster_kubernetes_namespace, :with_token, cluster: cluster, project: project) }
+
+ it_behaves_like 'setting variables'
+
+ it 'sets KUBE_TOKEN' do
+ expect(subject).to include(
+ { key: 'KUBE_TOKEN', value: kubernetes_namespace.service_account_token, public: false }
+ )
+ end
+ end
+ end
end
describe '#terminals' do
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index 917685399d4..8b7c88805c1 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -13,6 +13,8 @@ describe CommitStatus do
create(:commit_status, pipeline: pipeline, **opts)
end
+ it_behaves_like 'having unique enum values'
+
it { is_expected.to belong_to(:pipeline) }
it { is_expected.to belong_to(:user) }
it { is_expected.to belong_to(:project) }
diff --git a/spec/models/concerns/chronic_duration_attribute_spec.rb b/spec/models/concerns/chronic_duration_attribute_spec.rb
index 8847623f705..b14b773b653 100644
--- a/spec/models/concerns/chronic_duration_attribute_spec.rb
+++ b/spec/models/concerns/chronic_duration_attribute_spec.rb
@@ -54,7 +54,8 @@ shared_examples 'ChronicDurationAttribute writer' do
subject.send("#{virtual_field}=", '-10m')
expect(subject.valid?).to be_falsey
- expect(subject.errors&.messages).to include(virtual_field => ['is not a correct duration'])
+ expect(subject.errors&.messages)
+ .to include(base: ['Maximum job timeout has a value which could not be accepted'])
end
end
diff --git a/spec/models/concerns/deployment_platform_spec.rb b/spec/models/concerns/deployment_platform_spec.rb
index 7bb89fe41dc..19ab4382b53 100644
--- a/spec/models/concerns/deployment_platform_spec.rb
+++ b/spec/models/concerns/deployment_platform_spec.rb
@@ -43,13 +43,86 @@ describe DeploymentPlatform do
it { is_expected.to be_nil }
end
- context 'when user configured kubernetes from CI/CD > Clusters' do
+ context 'when project has configured kubernetes from CI/CD > Clusters' do
let!(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
let(:platform_kubernetes) { cluster.platform_kubernetes }
it 'returns the Kubernetes platform' do
expect(subject).to eq(platform_kubernetes)
end
+
+ context 'with a group level kubernetes cluster' do
+ let(:group_cluster) { create(:cluster, :provided_by_gcp, :group) }
+
+ before do
+ project.update!(group: group_cluster.group)
+ end
+
+ it 'returns the Kubernetes platform from the project cluster' do
+ expect(subject).to eq(platform_kubernetes)
+ end
+ end
+ end
+
+ context 'when group has configured kubernetes cluster' do
+ let!(:group_cluster) { create(:cluster, :provided_by_gcp, :group) }
+ let(:group) { group_cluster.group }
+
+ before do
+ project.update!(group: group)
+ end
+
+ it 'returns the Kubernetes platform' do
+ is_expected.to eq(group_cluster.platform_kubernetes)
+ end
+
+ context 'when child group has configured kubernetes cluster', :nested_groups do
+ let!(:child_group1_cluster) { create(:cluster, :provided_by_gcp, :group) }
+ let(:child_group1) { child_group1_cluster.group }
+
+ before do
+ project.update!(group: child_group1)
+ child_group1.update!(parent: group)
+ end
+
+ it 'returns the Kubernetes platform for the child group' do
+ is_expected.to eq(child_group1_cluster.platform_kubernetes)
+ end
+
+ context 'deeply nested group' do
+ let!(:child_group2_cluster) { create(:cluster, :provided_by_gcp, :group) }
+ let(:child_group2) { child_group2_cluster.group }
+
+ before do
+ child_group2.update!(parent: child_group1)
+ project.update!(group: child_group2)
+ end
+
+ it 'returns most nested group cluster Kubernetes platform' do
+ is_expected.to eq(child_group2_cluster.platform_kubernetes)
+ end
+
+ context 'cluster in the middle of hierarchy is disabled' do
+ before do
+ child_group2_cluster.update!(enabled: false)
+ end
+
+ it 'returns closest enabled Kubenetes platform' do
+ is_expected.to eq(child_group1_cluster.platform_kubernetes)
+ end
+ end
+ end
+ end
+
+ context 'feature flag disabled' do
+ before do
+ stub_feature_flags(group_clusters: false)
+ end
+
+ it 'returns nil' do
+ is_expected.to be_nil
+ end
+ end
end
context 'when user configured kubernetes integration from project services' do
diff --git a/spec/models/concerns/token_authenticatable_spec.rb b/spec/models/concerns/token_authenticatable_spec.rb
index 782687516ae..55d83bc3a6b 100644
--- a/spec/models/concerns/token_authenticatable_spec.rb
+++ b/spec/models/concerns/token_authenticatable_spec.rb
@@ -21,44 +21,59 @@ end
describe ApplicationSetting, 'TokenAuthenticatable' do
let(:token_field) { :runners_registration_token }
+ let(:settings) { described_class.new }
+
it_behaves_like 'TokenAuthenticatable'
describe 'generating new token' do
context 'token is not generated yet' do
describe 'token field accessor' do
- subject { described_class.new.send(token_field) }
+ subject { settings.send(token_field) }
+
it { is_expected.not_to be_blank }
end
- describe 'ensured token' do
- subject { described_class.new.send("ensure_#{token_field}") }
+ describe "ensure_runners_registration_token" do
+ subject { settings.send("ensure_#{token_field}") }
it { is_expected.to be_a String }
it { is_expected.not_to be_blank }
+
+ it 'does not persist token' do
+ expect(settings).not_to be_persisted
+ end
end
- describe 'ensured! token' do
- subject { described_class.new.send("ensure_#{token_field}!") }
+ describe 'ensure_runners_registration_token!' do
+ subject { settings.send("ensure_#{token_field}!") }
- it 'persists new token' do
- expect(subject).to eq described_class.current[token_field]
+ it 'persists new token as an encrypted string' do
+ expect(subject).to eq settings.reload.runners_registration_token
+ expect(settings.read_attribute('runners_registration_token_encrypted'))
+ .to eq Gitlab::CryptoHelper.aes256_gcm_encrypt(subject)
+ expect(settings).to be_persisted
+ end
+
+ it 'does not persist token in a clear text' do
+ expect(subject).not_to eq settings.reload
+ .read_attribute('runners_registration_token_encrypted')
end
end
end
context 'token is generated' do
before do
- subject.send("reset_#{token_field}!")
+ settings.send("reset_#{token_field}!")
end
it 'persists a new token' do
- expect(subject.send(:read_attribute, token_field)).to be_a String
+ expect(settings.runners_registration_token).to be_a String
end
end
end
describe 'setting new token' do
- subject { described_class.new.send("set_#{token_field}", '0123456789') }
+ subject { settings.send("set_#{token_field}", '0123456789') }
it { is_expected.to eq '0123456789' }
end
@@ -336,3 +351,89 @@ describe PersonalAccessToken, 'TokenAuthenticatable' do
end
end
end
+
+describe Ci::Build, 'TokenAuthenticatable' do
+ let(:token_field) { :token }
+ let(:build) { FactoryBot.build(:ci_build) }
+
+ it_behaves_like 'TokenAuthenticatable'
+
+ describe 'generating new token' do
+ context 'token is not generated yet' do
+ describe 'token field accessor' do
+ it 'makes it possible to access token' do
+ expect(build.token).to be_nil
+
+ build.save!
+
+ expect(build.token).to be_present
+ end
+ end
+
+ describe "ensure_token" do
+ subject { build.ensure_token }
+
+ it { is_expected.to be_a String }
+ it { is_expected.not_to be_blank }
+
+ it 'does not persist token' do
+ expect(build).not_to be_persisted
+ end
+ end
+
+ describe 'ensure_token!' do
+ it 'persists a new token' do
+ expect(build.ensure_token!).to eq build.reload.token
+ expect(build).to be_persisted
+ end
+
+ it 'persists new token as an encrypted string' do
+ build.ensure_token!
+
+ encrypted = Gitlab::CryptoHelper.aes256_gcm_encrypt(build.token)
+
+ expect(build.read_attribute('token_encrypted')).to eq encrypted
+ end
+
+ it 'does not persist a token in a clear text' do
+ build.ensure_token!
+
+ expect(build.read_attribute('token')).to be_nil
+ end
+ end
+ end
+
+ describe '#reset_token!' do
+ it 'persists a new token' do
+ build.save!
+
+ build.token.yield_self do |previous_token|
+ build.reset_token!
+
+ expect(build.token).not_to eq previous_token
+ expect(build.token).to be_a String
+ end
+ end
+ end
+ end
+
+ describe 'setting a new token' do
+ subject { build.set_token('0123456789') }
+
+ it 'returns the token' do
+ expect(subject).to eq '0123456789'
+ end
+
+ it 'writes a new encrypted token' do
+ expect(build.read_attribute('token_encrypted')).to be_nil
+ expect(subject).to eq '0123456789'
+ expect(build.read_attribute('token_encrypted')).to be_present
+ end
+
+ it 'does not write a new cleartext token' do
+ expect(build.read_attribute('token')).to be_nil
+ expect(subject).to eq '0123456789'
+ expect(build.read_attribute('token')).to be_nil
+ end
+ end
+end
diff --git a/spec/models/concerns/token_authenticatable_strategies/base_spec.rb b/spec/models/concerns/token_authenticatable_strategies/base_spec.rb
new file mode 100644
index 00000000000..6605f1f5a5f
--- /dev/null
+++ b/spec/models/concerns/token_authenticatable_strategies/base_spec.rb
@@ -0,0 +1,65 @@
+require 'spec_helper'
+
+describe TokenAuthenticatableStrategies::Base do
+ let(:instance) { double(:instance) }
+ let(:field) { double(:field) }
+
+ describe '.fabricate' do
+ context 'when digest stragegy is specified' do
+ it 'fabricates digest strategy object' do
+ strategy = described_class.fabricate(instance, field, digest: true)
+
+ expect(strategy).to be_a TokenAuthenticatableStrategies::Digest
+ end
+ end
+
+ context 'when encrypted strategy is specified' do
+ it 'fabricates encrypted strategy object' do
+ strategy = described_class.fabricate(instance, field, encrypted: true)
+
+ expect(strategy).to be_a TokenAuthenticatableStrategies::Encrypted
+ end
+ end
+
+ context 'when no strategy is specified' do
+ it 'fabricates insecure strategy object' do
+ strategy = described_class.fabricate(instance, field, something: true)
+
+ expect(strategy).to be_a TokenAuthenticatableStrategies::Insecure
+ end
+ end
+
+ context 'when incompatible options are provided' do
+ it 'raises an error' do
+ expect { described_class.fabricate(instance, field, digest: true, encrypted: true) }
+ .to raise_error ArgumentError
+ end
+ end
+ end
+
+ describe '#fallback?' do
+ context 'when fallback is set' do
+ it 'recognizes fallback setting' do
+ strategy = described_class.new(instance, field, fallback: true)
+
+ expect(strategy.fallback?).to be true
+ end
+ end
+
+ context 'when fallback is not a valid value' do
+ it 'raises an error' do
+ strategy = described_class.new(instance, field, fallback: 'something')
+
+ expect { strategy.fallback? }.to raise_error ArgumentError
+ end
+ end
+
+ context 'when fallback is not set' do
+ it 'raises an error' do
+ strategy = described_class.new(instance, field, {})
+
+ expect(strategy.fallback?).to eq false
+ end
+ end
+ end
+end
diff --git a/spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb b/spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb
new file mode 100644
index 00000000000..93cab80cb1f
--- /dev/null
+++ b/spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb
@@ -0,0 +1,156 @@
+require 'spec_helper'
+
+describe TokenAuthenticatableStrategies::Encrypted do
+ let(:model) { double(:model) }
+ let(:instance) { double(:instance) }
+
+ let(:encrypted) do
+ Gitlab::CryptoHelper.aes256_gcm_encrypt('my-value')
+ end
+
+ subject do
+ described_class.new(model, 'some_field', options)
+ end
+
+ describe '.new' do
+ context 'when fallback and migration strategies are set' do
+ let(:options) { { fallback: true, migrating: true } }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error ArgumentError, /not compatible/
+ end
+ end
+ end
+
+ describe '#find_token_authenticatable' do
+ context 'when using fallback strategy' do
+ let(:options) { { fallback: true } }
+
+ it 'finds the encrypted resource by cleartext' do
+ allow(model).to receive(:find_by)
+ .with('some_field_encrypted' => encrypted)
+ .and_return('encrypted resource')
+
+ expect(subject.find_token_authenticatable('my-value'))
+ .to eq 'encrypted resource'
+ end
+
+ it 'uses insecure strategy when encrypted token cannot be found' do
+ allow(subject.send(:insecure_strategy))
+ .to receive(:find_token_authenticatable)
+ .and_return('plaintext resource')
+
+ allow(model).to receive(:find_by)
+ .with('some_field_encrypted' => encrypted)
+ .and_return(nil)
+
+ expect(subject.find_token_authenticatable('my-value'))
+ .to eq 'plaintext resource'
+ end
+ end
+
+ context 'when using migration strategy' do
+ let(:options) { { migrating: true } }
+
+ it 'finds the cleartext resource by cleartext' do
+ allow(model).to receive(:find_by)
+ .with('some_field' => 'my-value')
+ .and_return('cleartext resource')
+
+ expect(subject.find_token_authenticatable('my-value'))
+ .to eq 'cleartext resource'
+ end
+
+ it 'returns nil if resource cannot be found' do
+ allow(model).to receive(:find_by)
+ .with('some_field' => 'my-value')
+ .and_return(nil)
+
+ expect(subject.find_token_authenticatable('my-value'))
+ .to be_nil
+ end
+ end
+ end
+
+ describe '#get_token' do
+ context 'when using fallback strategy' do
+ let(:options) { { fallback: true } }
+
+ it 'returns decrypted token when an encrypted token is present' do
+ allow(instance).to receive(:read_attribute)
+ .with('some_field_encrypted')
+ .and_return(encrypted)
+
+ expect(subject.get_token(instance)).to eq 'my-value'
+ end
+
+ it 'returns the plaintext token when encrypted token is not present' do
+ allow(instance).to receive(:read_attribute)
+ .with('some_field_encrypted')
+ .and_return(nil)
+
+ allow(instance).to receive(:read_attribute)
+ .with('some_field')
+ .and_return('cleartext value')
+
+ expect(subject.get_token(instance)).to eq 'cleartext value'
+ end
+ end
+
+ context 'when using migration strategy' do
+ let(:options) { { migrating: true } }
+
+ it 'returns cleartext token when an encrypted token is present' do
+ allow(instance).to receive(:read_attribute)
+ .with('some_field_encrypted')
+ .and_return(encrypted)
+
+ allow(instance).to receive(:read_attribute)
+ .with('some_field')
+ .and_return('my-cleartext-value')
+
+ expect(subject.get_token(instance)).to eq 'my-cleartext-value'
+ end
+
+ it 'returns the cleartext token when encrypted token is not present' do
+ allow(instance).to receive(:read_attribute)
+ .with('some_field_encrypted')
+ .and_return(nil)
+
+ allow(instance).to receive(:read_attribute)
+ .with('some_field')
+ .and_return('cleartext value')
+
+ expect(subject.get_token(instance)).to eq 'cleartext value'
+ end
+ end
+ end
+
+ describe '#set_token' do
+ context 'when using fallback strategy' do
+ let(:options) { { fallback: true } }
+
+ it 'writes encrypted token and removes plaintext token and returns it' do
+ expect(instance).to receive(:[]=)
+ .with('some_field_encrypted', encrypted)
+ expect(instance).to receive(:[]=)
+ .with('some_field', nil)
+
+ expect(subject.set_token(instance, 'my-value')).to eq 'my-value'
+ end
+ end
+
+ context 'when using migration strategy' do
+ let(:options) { { migrating: true } }
+
+ it 'writes encrypted token and writes plaintext token' do
+ expect(instance).to receive(:[]=)
+ .with('some_field_encrypted', encrypted)
+ expect(instance).to receive(:[]=)
+ .with('some_field', 'my-value')
+
+ expect(subject.set_token(instance, 'my-value')).to eq 'my-value'
+ end
+ end
+ end
+end
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index 270b2767c68..a8d53cfcd7d 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -16,6 +16,8 @@ describe Deployment do
it { is_expected.to validate_presence_of(:ref) }
it { is_expected.to validate_presence_of(:sha) }
+ it_behaves_like 'having unique enum values'
+
describe '#scheduled_actions' do
subject { deployment.scheduled_actions }
diff --git a/spec/models/gpg_signature_spec.rb b/spec/models/gpg_signature_spec.rb
index 0136bb61c07..cdd7dea2064 100644
--- a/spec/models/gpg_signature_spec.rb
+++ b/spec/models/gpg_signature_spec.rb
@@ -8,6 +8,8 @@ RSpec.describe GpgSignature do
let(:gpg_key) { create(:gpg_key) }
let(:gpg_key_subkey) { create(:gpg_key_subkey) }
+ it_behaves_like 'having unique enum values'
+
describe 'associations' do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:gpg_key) }
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index ada00f03928..87aa5a46c21 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -76,7 +76,7 @@ describe Group do
before do
group.add_developer(user)
- sub_group.add_developer(user)
+ sub_group.add_maintainer(user)
end
it 'also gets notification settings from parent groups' do
@@ -498,7 +498,7 @@ describe Group do
it 'returns member users on every nest level without duplication' do
group.add_developer(user_a)
nested_group.add_developer(user_b)
- deep_nested_group.add_developer(user_a)
+ deep_nested_group.add_maintainer(user_a)
expect(group.users_with_descendants).to contain_exactly(user_a, user_b)
expect(nested_group.users_with_descendants).to contain_exactly(user_a, user_b)
@@ -745,4 +745,33 @@ describe Group do
let(:uploader_class) { AttachmentUploader }
end
end
+
+ describe '#group_clusters_enabled?' do
+ before do
+ # Override global stub in spec/spec_helper.rb
+ expect(Feature).to receive(:enabled?).and_call_original
+ end
+
+ subject { group.group_clusters_enabled? }
+
+ it { is_expected.to be_truthy }
+
+ context 'explicitly disabled for root ancestor' do
+ before do
+ feature = Feature.get(:group_clusters)
+ feature.disable(group.root_ancestor)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'explicitly disabled for root ancestor' do
+ before do
+ feature = Feature.get(:group_clusters)
+ feature.enable(group.root_ancestor)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
end
diff --git a/spec/models/internal_id_spec.rb b/spec/models/internal_id_spec.rb
index 52c00a74b4b..4696341c05f 100644
--- a/spec/models/internal_id_spec.rb
+++ b/spec/models/internal_id_spec.rb
@@ -7,6 +7,8 @@ describe InternalId do
let(:scope) { { project: project } }
let(:init) { ->(s) { s.project.issues.size } }
+ it_behaves_like 'having unique enum values'
+
context 'validations' do
it { is_expected.to validate_presence_of(:usage) }
end
diff --git a/spec/models/list_spec.rb b/spec/models/list_spec.rb
index 17dc27bd132..a51580f8292 100644
--- a/spec/models/list_spec.rb
+++ b/spec/models/list_spec.rb
@@ -1,6 +1,8 @@
require 'rails_helper'
describe List do
+ it_behaves_like 'having unique enum values'
+
describe 'relationships' do
it { is_expected.to belong_to(:board) }
it { is_expected.to belong_to(:label) }
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index fca1b1f90d9..188beac1582 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -53,6 +53,29 @@ describe Member do
expect(member).to be_valid
end
end
+
+ context "when a child member inherits its access level" do
+ let(:user) { create(:user) }
+ let(:member) { create(:group_member, :developer, user: user) }
+ let(:child_group) { create(:group, parent: member.group) }
+ let(:child_member) { build(:group_member, group: child_group, user: user) }
+
+ it "requires a higher level" do
+ child_member.access_level = GroupMember::REPORTER
+
+ child_member.validate
+
+ expect(child_member).not_to be_valid
+ end
+
+ it "is valid with a higher level" do
+ child_member.access_level = GroupMember::MAINTAINER
+
+ child_member.validate
+
+ expect(child_member).to be_valid
+ end
+ end
end
describe 'Scopes & finders' do
diff --git a/spec/models/members/group_member_spec.rb b/spec/models/members/group_member_spec.rb
index 97959ed4304..a3451c67bd8 100644
--- a/spec/models/members/group_member_spec.rb
+++ b/spec/models/members/group_member_spec.rb
@@ -50,4 +50,26 @@ describe GroupMember do
group_member.destroy
end
end
+
+ context 'access levels', :nested_groups do
+ context 'with parent group' do
+ it_behaves_like 'inherited access level as a member of entity' do
+ let(:entity) { create(:group, parent: parent_entity) }
+ end
+ end
+
+ context 'with parent group and a sub subgroup' do
+ it_behaves_like 'inherited access level as a member of entity' do
+ let(:subgroup) { create(:group, parent: parent_entity) }
+ let(:entity) { create(:group, parent: subgroup) }
+ end
+
+ context 'when only the subgroup has the member' do
+ it_behaves_like 'inherited access level as a member of entity' do
+ let(:parent_entity) { create(:group, parent: create(:group)) }
+ let(:entity) { create(:group, parent: parent_entity) }
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/members/project_member_spec.rb b/spec/models/members/project_member_spec.rb
index 334d4f95f53..097b1bb30dc 100644
--- a/spec/models/members/project_member_spec.rb
+++ b/spec/models/members/project_member_spec.rb
@@ -124,4 +124,19 @@ describe ProjectMember do
end
it_behaves_like 'members notifications', :project
+
+ context 'access levels' do
+ context 'with parent group' do
+ it_behaves_like 'inherited access level as a member of entity' do
+ let(:entity) { create(:project, group: parent_entity) }
+ end
+ end
+
+ context 'with parent group and a subgroup', :nested_groups do
+ it_behaves_like 'inherited access level as a member of entity' do
+ let(:subgroup) { create(:group, parent: parent_entity) }
+ let(:entity) { create(:project, group: subgroup) }
+ end
+ end
+ end
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index ad55c280399..9b60054e14a 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -1206,6 +1206,119 @@ describe MergeRequest do
expect(subject.all_pipelines).to contain_exactly(pipeline)
end
end
+
+ context 'when pipelines exist for the branch and merge request' do
+ let(:source_ref) { 'feature' }
+ let(:target_ref) { 'master' }
+
+ let!(:branch_pipeline) do
+ create(:ci_pipeline,
+ source: :push,
+ project: project,
+ ref: source_ref,
+ sha: shas.second)
+ end
+
+ let!(:merge_request_pipeline) do
+ create(:ci_pipeline,
+ source: :merge_request,
+ project: project,
+ ref: source_ref,
+ sha: shas.second,
+ merge_request: merge_request)
+ end
+
+ let(:merge_request) do
+ create(:merge_request,
+ source_project: project,
+ source_branch: source_ref,
+ target_project: project,
+ target_branch: target_ref)
+ end
+
+ let(:project) { create(:project, :repository) }
+ let(:shas) { project.repository.commits(source_ref, limit: 2).map(&:id) }
+
+ before do
+ allow(merge_request).to receive(:all_commit_shas) { shas }
+ end
+
+ it 'returns merge request pipeline first' do
+ expect(merge_request.all_pipelines)
+ .to eq([merge_request_pipeline,
+ branch_pipeline])
+ end
+
+ context 'when there are a branch pipeline and a merge request pipeline' do
+ let!(:branch_pipeline_2) do
+ create(:ci_pipeline,
+ source: :push,
+ project: project,
+ ref: source_ref,
+ sha: shas.first)
+ end
+
+ let!(:merge_request_pipeline_2) do
+ create(:ci_pipeline,
+ source: :merge_request,
+ project: project,
+ ref: source_ref,
+ sha: shas.first,
+ merge_request: merge_request)
+ end
+
+ it 'returns merge request pipelines first' do
+ expect(merge_request.all_pipelines)
+ .to eq([merge_request_pipeline_2,
+ merge_request_pipeline,
+ branch_pipeline_2,
+ branch_pipeline])
+ end
+ end
+
+ context 'when there are multiple merge request pipelines from the same branch' do
+ let!(:branch_pipeline_2) do
+ create(:ci_pipeline,
+ source: :push,
+ project: project,
+ ref: source_ref,
+ sha: shas.first)
+ end
+
+ let!(:merge_request_pipeline_2) do
+ create(:ci_pipeline,
+ source: :merge_request,
+ project: project,
+ ref: source_ref,
+ sha: shas.first,
+ merge_request: merge_request_2)
+ end
+
+ let(:merge_request_2) do
+ create(:merge_request,
+ source_project: project,
+ source_branch: source_ref,
+ target_project: project,
+ target_branch: 'stable')
+ end
+
+ before do
+ allow(merge_request_2).to receive(:all_commit_shas) { shas }
+ end
+
+ it 'returns only related merge request pipelines' do
+ expect(merge_request.all_pipelines)
+ .to eq([merge_request_pipeline,
+ branch_pipeline_2,
+ branch_pipeline])
+
+ expect(merge_request_2.all_pipelines)
+ .to eq([merge_request_pipeline_2,
+ branch_pipeline_2,
+ branch_pipeline])
+ end
+ end
+ end
end
describe '#has_test_reports?' do
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 2db42fe802a..96561dab1c9 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -538,7 +538,7 @@ describe Namespace do
it 'returns member users on every nest level without duplication' do
group.add_developer(user_a)
nested_group.add_developer(user_b)
- deep_nested_group.add_developer(user_a)
+ deep_nested_group.add_maintainer(user_a)
expect(group.users_with_descendants).to contain_exactly(user_a, user_b)
expect(nested_group.users_with_descendants).to contain_exactly(user_a, user_b)
@@ -560,6 +560,7 @@ describe Namespace do
let!(:project2) { create(:project_empty_repo, namespace: child) }
it { expect(group.all_projects.to_a).to match_array([project2, project1]) }
+ it { expect(child.all_projects.to_a).to match_array([project2]) }
end
describe '#all_pipelines' do
@@ -720,6 +721,7 @@ describe Namespace do
deep_nested_group = create(:group, parent: nested_group)
very_deep_nested_group = create(:group, parent: deep_nested_group)
+ expect(root_group.root_ancestor).to eq(root_group)
expect(nested_group.root_ancestor).to eq(root_group)
expect(deep_nested_group.root_ancestor).to eq(root_group)
expect(very_deep_nested_group.root_ancestor).to eq(root_group)
diff --git a/spec/models/notification_setting_spec.rb b/spec/models/notification_setting_spec.rb
index e545b674b4f..771d834c4bc 100644
--- a/spec/models/notification_setting_spec.rb
+++ b/spec/models/notification_setting_spec.rb
@@ -1,6 +1,8 @@
require 'rails_helper'
RSpec.describe NotificationSetting do
+ it_behaves_like 'having unique enum values'
+
describe "Associations" do
it { is_expected.to belong_to(:user) }
it { is_expected.to belong_to(:source) }
diff --git a/spec/models/project_auto_devops_spec.rb b/spec/models/project_auto_devops_spec.rb
index 342798f730b..7ff64c76e37 100644
--- a/spec/models/project_auto_devops_spec.rb
+++ b/spec/models/project_auto_devops_spec.rb
@@ -3,6 +3,8 @@ require 'spec_helper'
describe ProjectAutoDevops do
set(:project) { build(:project) }
+ it_behaves_like 'having unique enum values'
+
it { is_expected.to belong_to(:project) }
it { is_expected.to define_enum_for(:deploy_strategy) }
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index af5b0939ca2..50920d9d1fc 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -4,6 +4,8 @@ describe Project do
include ProjectForksHelper
include GitHelpers
+ it_behaves_like 'having unique enum values'
+
describe 'associations' do
it { is_expected.to belong_to(:group) }
it { is_expected.to belong_to(:namespace) }
@@ -61,7 +63,7 @@ describe Project do
it { is_expected.to have_one(:forked_from_project).through(:fork_network_member) }
it { is_expected.to have_one(:auto_devops).class_name('ProjectAutoDevops') }
it { is_expected.to have_many(:commit_statuses) }
- it { is_expected.to have_many(:pipelines) }
+ it { is_expected.to have_many(:ci_pipelines) }
it { is_expected.to have_many(:builds) }
it { is_expected.to have_many(:build_trace_section_names)}
it { is_expected.to have_many(:runner_projects) }
@@ -85,6 +87,7 @@ describe Project do
it { is_expected.to have_many(:pipeline_schedules) }
it { is_expected.to have_many(:members_and_requesters) }
it { is_expected.to have_many(:clusters) }
+ it { is_expected.to have_many(:kubernetes_namespaces) }
it { is_expected.to have_many(:custom_attributes).class_name('ProjectCustomAttribute') }
it { is_expected.to have_many(:project_badges).class_name('ProjectBadge') }
it { is_expected.to have_many(:lfs_file_locks) }
@@ -140,6 +143,29 @@ describe Project do
expect(subject.boards.size).to eq 1
end
end
+
+ describe 'ci_pipelines association' do
+ context 'when feature flag pipeline_ci_sources_only is enabled' do
+ it 'returns only pipelines from ci_sources' do
+ stub_feature_flags(pipeline_ci_sources_only: true)
+
+ expect(Ci::Pipeline).to receive(:ci_sources).and_call_original
+
+ subject.ci_pipelines
+ end
+ end
+
+ context 'when feature flag pipeline_ci_sources_only is disabled' do
+ it 'returns all pipelines' do
+ stub_feature_flags(pipeline_ci_sources_only: false)
+
+ expect(Ci::Pipeline).not_to receive(:ci_sources).and_call_original
+ expect(Ci::Pipeline).to receive(:all).and_call_original.at_least(:once)
+
+ subject.ci_pipelines
+ end
+ end
+ end
end
describe 'modules' do
@@ -152,6 +178,24 @@ describe Project do
it { is_expected.to include_module(Sortable) }
end
+ describe '.missing_kubernetes_namespace' do
+ let!(:project) { create(:project) }
+ let!(:cluster) { create(:cluster, :provided_by_user, :group) }
+ let(:kubernetes_namespaces) { project.kubernetes_namespaces }
+
+ subject { described_class.missing_kubernetes_namespace(kubernetes_namespaces) }
+
+ it { is_expected.to contain_exactly(project) }
+
+ context 'kubernetes namespace exists' do
+ before do
+ create(:cluster_kubernetes_namespace, project: project, cluster: cluster)
+ end
+
+ it { is_expected.to be_empty }
+ end
+ end
+
describe 'validation' do
let!(:project) { create(:project) }
@@ -391,6 +435,8 @@ describe Project do
it { is_expected.to delegate_method(:members).to(:team).with_prefix(true) }
it { is_expected.to delegate_method(:name).to(:owner).with_prefix(true).with_arguments(allow_nil: true) }
+ it { is_expected.to delegate_method(:group_clusters_enabled?).to(:group).with_arguments(allow_nil: true) }
+ it { is_expected.to delegate_method(:root_ancestor).to(:namespace).with_arguments(allow_nil: true) }
end
describe '#to_reference_with_postfix' do
@@ -2096,6 +2142,39 @@ describe Project do
it 'includes ancestors upto but excluding the given ancestor' do
expect(project.ancestors_upto(parent)).to contain_exactly(child2, child)
end
+
+ describe 'with hierarchy_order' do
+ it 'returns ancestors ordered by descending hierarchy' do
+ expect(project.ancestors_upto(hierarchy_order: :desc)).to eq([parent, child, child2])
+ end
+
+ it 'can be used with upto option' do
+ expect(project.ancestors_upto(parent, hierarchy_order: :desc)).to eq([child, child2])
+ end
+ end
+ end
+
+ describe '#root_ancestor' do
+ let(:project) { create(:project) }
+
+ subject { project.root_ancestor }
+
+ it { is_expected.to eq(project.namespace) }
+
+ context 'in a group' do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, group: group) }
+
+ it { is_expected.to eq(group) }
+ end
+
+ context 'in a nested group', :nested_groups do
+ let(:root) { create(:group) }
+ let(:child) { create(:group, parent: root) }
+ let(:project) { create(:project, group: child) }
+
+ it { is_expected.to eq(root) }
+ end
end
describe '#lfs_enabled?' do
@@ -2708,6 +2787,17 @@ describe Project do
end
end
+ describe '#lfs_http_url_to_repo' do
+ let(:project) { create(:project) }
+
+ it 'returns the url to the repo without a username' do
+ lfs_http_url_to_repo = project.lfs_http_url_to_repo('operation_that_doesnt_matter')
+
+ expect(lfs_http_url_to_repo).to eq("#{project.web_url}.git")
+ expect(lfs_http_url_to_repo).not_to include('@')
+ end
+ end
+
describe '#pipeline_status' do
let(:project) { create(:project, :repository) }
it 'builds a pipeline status' do
@@ -3360,7 +3450,7 @@ describe Project do
context 'with a ref that is not the default branch' do
it 'returns the latest successful pipeline for the given ref' do
- expect(project.pipelines).to receive(:latest_successful_for).with('foo')
+ expect(project.ci_pipelines).to receive(:latest_successful_for).with('foo')
project.latest_successful_pipeline_for('foo')
end
@@ -3388,7 +3478,7 @@ describe Project do
it 'memoizes and returns the latest successful pipeline for the default branch' do
pipeline = double(:pipeline)
- expect(project.pipelines).to receive(:latest_successful_for)
+ expect(project.ci_pipelines).to receive(:latest_successful_for)
.with(project.default_branch)
.and_return(pipeline)
.once
@@ -3981,6 +4071,27 @@ describe Project do
end
end
+ describe '#all_clusters' do
+ let(:project) { create(:project) }
+ let(:cluster) { create(:cluster, cluster_type: :project_type, projects: [project]) }
+
+ subject { project.all_clusters }
+
+ it 'returns project level cluster' do
+ expect(subject).to eq([cluster])
+ end
+
+ context 'project belongs to a group' do
+ let(:group_cluster) { create(:cluster, :group) }
+ let(:group) { group_cluster.group }
+ let(:project) { create(:project, group: group) }
+
+ it 'returns clusters for groups of this project' do
+ expect(subject).to contain_exactly(cluster, group_cluster)
+ end
+ end
+ end
+
def rugged_config
rugged_repo(project.repository).config
end
diff --git a/spec/models/prometheus_metric_spec.rb b/spec/models/prometheus_metric_spec.rb
index a83a31ae88c..3692fe9a559 100644
--- a/spec/models/prometheus_metric_spec.rb
+++ b/spec/models/prometheus_metric_spec.rb
@@ -6,6 +6,8 @@ describe PrometheusMetric do
subject { build(:prometheus_metric) }
let(:other_project) { build(:project) }
+ it_behaves_like 'having unique enum values'
+
it { is_expected.to belong_to(:project) }
it { is_expected.to validate_presence_of(:title) }
it { is_expected.to validate_presence_of(:query) }
diff --git a/spec/models/push_event_payload_spec.rb b/spec/models/push_event_payload_spec.rb
index a049ad35584..69a4922b6fd 100644
--- a/spec/models/push_event_payload_spec.rb
+++ b/spec/models/push_event_payload_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe PushEventPayload do
+ it_behaves_like 'having unique enum values'
+
describe 'saving payloads' do
it 'does not allow commit messages longer than 70 characters' do
event = create(:push_event)
diff --git a/spec/models/resource_label_event_spec.rb b/spec/models/resource_label_event_spec.rb
index da6e1b5610d..e7e3f7376e6 100644
--- a/spec/models/resource_label_event_spec.rb
+++ b/spec/models/resource_label_event_spec.rb
@@ -7,6 +7,8 @@ RSpec.describe ResourceLabelEvent, type: :model do
let(:issue) { create(:issue) }
let(:merge_request) { create(:merge_request) }
+ it_behaves_like 'having unique enum values'
+
describe 'associations' do
it { is_expected.to belong_to(:user) }
it { is_expected.to belong_to(:issue) }
diff --git a/spec/models/user_callout_spec.rb b/spec/models/user_callout_spec.rb
index 64ba17c81fe..d54355afe12 100644
--- a/spec/models/user_callout_spec.rb
+++ b/spec/models/user_callout_spec.rb
@@ -3,6 +3,8 @@ require 'rails_helper'
describe UserCallout do
let!(:callout) { create(:user_callout) }
+ it_behaves_like 'having unique enum values'
+
describe 'relationships' do
it { is_expected.to belong_to(:user) }
end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 7bd6dccd0ad..6cb27246f06 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -4,6 +4,8 @@ describe User do
include ProjectForksHelper
include TermsHelper
+ it_behaves_like 'having unique enum values'
+
describe 'modules' do
subject { described_class }
@@ -2323,11 +2325,11 @@ describe User do
context 'user is member of all groups' do
before do
- group.add_owner(user)
- nested_group_1.add_owner(user)
- nested_group_1_1.add_owner(user)
- nested_group_2.add_owner(user)
- nested_group_2_1.add_owner(user)
+ group.add_reporter(user)
+ nested_group_1.add_developer(user)
+ nested_group_1_1.add_maintainer(user)
+ nested_group_2.add_developer(user)
+ nested_group_2_1.add_maintainer(user)
end
it 'returns all groups' do
diff --git a/spec/presenters/group_member_presenter_spec.rb b/spec/presenters/group_member_presenter_spec.rb
index c00e41725d9..bb66523a83d 100644
--- a/spec/presenters/group_member_presenter_spec.rb
+++ b/spec/presenters/group_member_presenter_spec.rb
@@ -135,4 +135,12 @@ describe GroupMemberPresenter do
end
end
end
+
+ it_behaves_like '#valid_level_roles', :group do
+ let(:expected_roles) { { 'Developer' => 30, 'Maintainer' => 40, 'Owner' => 50, 'Reporter' => 20 } }
+
+ before do
+ entity.parent = group
+ end
+ end
end
diff --git a/spec/presenters/project_member_presenter_spec.rb b/spec/presenters/project_member_presenter_spec.rb
index 83db5c56cdf..73ef113a1c5 100644
--- a/spec/presenters/project_member_presenter_spec.rb
+++ b/spec/presenters/project_member_presenter_spec.rb
@@ -135,4 +135,10 @@ describe ProjectMemberPresenter do
end
end
end
+
+ it_behaves_like '#valid_level_roles', :project do
+ before do
+ entity.group = group
+ end
+ end
end
diff --git a/spec/requests/api/commit_statuses_spec.rb b/spec/requests/api/commit_statuses_spec.rb
index cd43bec35df..a43304c9b83 100644
--- a/spec/requests/api/commit_statuses_spec.rb
+++ b/spec/requests/api/commit_statuses_spec.rb
@@ -16,8 +16,8 @@ describe API::CommitStatuses do
let(:get_url) { "/projects/#{project.id}/repository/commits/#{sha}/statuses" }
context 'ci commit exists' do
- let!(:master) { project.pipelines.create(source: :push, sha: commit.id, ref: 'master', protected: false) }
- let!(:develop) { project.pipelines.create(source: :push, sha: commit.id, ref: 'develop', protected: false) }
+ let!(:master) { project.ci_pipelines.create(source: :push, sha: commit.id, ref: 'master', protected: false) }
+ let!(:develop) { project.ci_pipelines.create(source: :push, sha: commit.id, ref: 'develop', protected: false) }
context "reporter user" do
let(:statuses_id) { json_response.map { |status| status['id'] } }
diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb
index 329d069ef3d..9e599c2175f 100644
--- a/spec/requests/api/commits_spec.rb
+++ b/spec/requests/api/commits_spec.rb
@@ -818,7 +818,7 @@ describe API::Commits do
end
context 'when the ref has a pipeline' do
- let!(:pipeline) { project.pipelines.create(source: :push, ref: 'master', sha: commit.sha, protected: false) }
+ let!(:pipeline) { project.ci_pipelines.create(source: :push, ref: 'master', sha: commit.sha, protected: false) }
it 'includes a "created" status' do
get api(route, current_user)
diff --git a/spec/requests/api/files_spec.rb b/spec/requests/api/files_spec.rb
index 334dbb1c34c..620f9f5e1d6 100644
--- a/spec/requests/api/files_spec.rb
+++ b/spec/requests/api/files_spec.rb
@@ -121,6 +121,13 @@ describe API::Files do
end
end
+ context 'when PATs are used' do
+ it_behaves_like 'repository files' do
+ let(:token) { create(:personal_access_token, scopes: ['read_repository'], user: user) }
+ let(:current_user) { { personal_access_token: token } }
+ end
+ end
+
context 'when authenticated', 'as a developer' do
it_behaves_like 'repository files' do
let(:current_user) { user }
@@ -217,6 +224,13 @@ describe API::Files do
end
end
+ context 'when PATs are used' do
+ it_behaves_like 'repository files' do
+ let(:token) { create(:personal_access_token, scopes: ['read_repository'], user: user) }
+ let(:current_user) { { personal_access_token: token } }
+ end
+ end
+
context 'when unauthenticated', 'and project is private' do
it_behaves_like '404 response' do
let(:request) { get api(route(file_path)), params }
@@ -317,6 +331,21 @@ describe API::Files do
let(:request) { get api(route(file_path), guest), params }
end
end
+
+ context 'when PATs are used' do
+ it 'returns file by commit sha' do
+ token = create(:personal_access_token, scopes: ['read_repository'], user: user)
+
+ # This file is deleted on HEAD
+ file_path = "files%2Fjs%2Fcommit%2Ejs%2Ecoffee"
+ params[:ref] = "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9"
+ expect(Gitlab::Workhorse).to receive(:send_git_blob)
+
+ get api(route(file_path) + "/raw", personal_access_token: token), params
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+ end
end
describe "POST /projects/:id/repository/files/:file_path" do
@@ -362,6 +391,24 @@ describe API::Files do
expect(response).to have_gitlab_http_status(400)
end
+ context 'with PATs' do
+ it 'returns 403 with `read_repository` scope' do
+ token = create(:personal_access_token, scopes: ['read_repository'], user: user)
+
+ post api(route(file_path), personal_access_token: token), params
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+
+ it 'returns 201 with `api` scope' do
+ token = create(:personal_access_token, scopes: ['api'], user: user)
+
+ post api(route(file_path), personal_access_token: token), params
+
+ expect(response).to have_gitlab_http_status(201)
+ end
+ end
+
context "when specifying an author" do
it "creates a new file with the specified author" do
params.merge!(author_email: author_email, author_name: author_name)
diff --git a/spec/requests/api/members_spec.rb b/spec/requests/api/members_spec.rb
index 93e1c3a2294..bb32d581176 100644
--- a/spec/requests/api/members_spec.rb
+++ b/spec/requests/api/members_spec.rb
@@ -224,6 +224,37 @@ describe API::Members do
end
end
+ context 'access levels' do
+ it 'does not create the member if group level is higher', :nested_groups do
+ parent = create(:group)
+
+ group.update(parent: parent)
+ project.update(group: group)
+ parent.add_developer(stranger)
+
+ post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
+ user_id: stranger.id, access_level: Member::REPORTER
+
+ expect(response).to have_gitlab_http_status(400)
+ expect(json_response['message']['access_level']).to eq(["should be higher than Developer inherited membership from group #{parent.name}"])
+ end
+
+ it 'creates the member if group level is lower', :nested_groups do
+ parent = create(:group)
+
+ group.update(parent: parent)
+ project.update(group: group)
+ parent.add_developer(stranger)
+
+ post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
+ user_id: stranger.id, access_level: Member::MAINTAINER
+
+ expect(response).to have_gitlab_http_status(201)
+ expect(json_response['id']).to eq(stranger.id)
+ expect(json_response['access_level']).to eq(Member::MAINTAINER)
+ end
+ end
+
it "returns 409 if member already exists" do
post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
user_id: maintainer.id, access_level: Member::MAINTAINER
diff --git a/spec/requests/api/pipelines_spec.rb b/spec/requests/api/pipelines_spec.rb
index 638cc9767d4..2e4fa0f9e16 100644
--- a/spec/requests/api/pipelines_spec.rb
+++ b/spec/requests/api/pipelines_spec.rb
@@ -304,7 +304,7 @@ describe API::Pipelines do
it 'creates and returns a new pipeline' do
expect do
post api("/projects/#{project.id}/pipeline", user), ref: project.default_branch
- end.to change { project.pipelines.count }.by(1)
+ end.to change { project.ci_pipelines.count }.by(1)
expect(response).to have_gitlab_http_status(201)
expect(json_response).to be_a Hash
@@ -317,8 +317,8 @@ describe API::Pipelines do
it 'creates and returns a new pipeline using the given variables' do
expect do
post api("/projects/#{project.id}/pipeline", user), ref: project.default_branch, variables: variables
- end.to change { project.pipelines.count }.by(1)
- expect_variables(project.pipelines.last.variables, variables)
+ end.to change { project.ci_pipelines.count }.by(1)
+ expect_variables(project.ci_pipelines.last.variables, variables)
expect(response).to have_gitlab_http_status(201)
expect(json_response).to be_a Hash
@@ -338,8 +338,8 @@ describe API::Pipelines do
it 'creates and returns a new pipeline using the given variables' do
expect do
post api("/projects/#{project.id}/pipeline", user), ref: project.default_branch, variables: variables
- end.to change { project.pipelines.count }.by(1)
- expect_variables(project.pipelines.last.variables, variables)
+ end.to change { project.ci_pipelines.count }.by(1)
+ expect_variables(project.ci_pipelines.last.variables, variables)
expect(response).to have_gitlab_http_status(201)
expect(json_response).to be_a Hash
@@ -353,7 +353,7 @@ describe API::Pipelines do
it "doesn't create a job" do
expect do
post api("/projects/#{project.id}/pipeline", user), ref: project.default_branch
- end.not_to change { project.pipelines.count }
+ end.not_to change { project.ci_pipelines.count }
expect(response).to have_gitlab_http_status(400)
end
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 62b6a3ce42e..e40db55cd20 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -1906,7 +1906,7 @@ describe API::Projects do
let(:group) { create(:group) }
let(:group2) do
group = create(:group, name: 'group2_name')
- group.add_owner(user2)
+ group.add_maintainer(user2)
group
end
diff --git a/spec/requests/api/triggers_spec.rb b/spec/requests/api/triggers_spec.rb
index 0ae6796d1e4..658df6945d2 100644
--- a/spec/requests/api/triggers_spec.rb
+++ b/spec/requests/api/triggers_spec.rb
@@ -39,7 +39,7 @@ describe API::Triggers do
end
context 'Have a commit' do
- let(:pipeline) { project.pipelines.last }
+ let(:pipeline) { project.ci_pipelines.last }
it 'creates pipeline' do
post api("/projects/#{project.id}/trigger/pipeline"), options.merge(ref: 'master')
diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb
index c71eae9164a..0dc459d9b5a 100644
--- a/spec/requests/git_http_spec.rb
+++ b/spec/requests/git_http_spec.rb
@@ -302,7 +302,7 @@ describe 'Git HTTP requests' do
it 'rejects pushes with 403 Forbidden' do
upload(path, env) do |response|
expect(response).to have_gitlab_http_status(:forbidden)
- expect(response.body).to eq(change_access_error(:push_code))
+ expect(response.body).to eq('You are not allowed to push code to this project.')
end
end
diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb
index bdfb12dc5df..5c3b37ef11c 100644
--- a/spec/routing/project_routing_spec.rb
+++ b/spec/routing/project_routing_spec.rb
@@ -36,36 +36,33 @@ describe 'project routing' do
shared_examples 'RESTful project resources' do
let(:actions) { [:index, :create, :new, :edit, :show, :update, :destroy] }
let(:controller_path) { controller }
- let(:id) { { id: '1' } }
- let(:format) { {} } # response format, e.g. { format: :html }
- let(:params) { { namespace_id: 'gitlab', project_id: 'gitlabhq' } }
it 'to #index' do
- expect(get("/gitlab/gitlabhq/#{controller_path}")).to route_to("projects/#{controller}#index", params) if actions.include?(:index)
+ expect(get("/gitlab/gitlabhq/#{controller_path}")).to route_to("projects/#{controller}#index", namespace_id: 'gitlab', project_id: 'gitlabhq') if actions.include?(:index)
end
it 'to #create' do
- expect(post("/gitlab/gitlabhq/#{controller_path}")).to route_to("projects/#{controller}#create", params) if actions.include?(:create)
+ expect(post("/gitlab/gitlabhq/#{controller_path}")).to route_to("projects/#{controller}#create", namespace_id: 'gitlab', project_id: 'gitlabhq') if actions.include?(:create)
end
it 'to #new' do
- expect(get("/gitlab/gitlabhq/#{controller_path}/new")).to route_to("projects/#{controller}#new", params) if actions.include?(:new)
+ expect(get("/gitlab/gitlabhq/#{controller_path}/new")).to route_to("projects/#{controller}#new", namespace_id: 'gitlab', project_id: 'gitlabhq') if actions.include?(:new)
end
it 'to #edit' do
- expect(get("/gitlab/gitlabhq/#{controller_path}/1/edit")).to route_to("projects/#{controller}#edit", params.merge(**id, **format)) if actions.include?(:edit)
+ expect(get("/gitlab/gitlabhq/#{controller_path}/1/edit")).to route_to("projects/#{controller}#edit", namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1') if actions.include?(:edit)
end
it 'to #show' do
- expect(get("/gitlab/gitlabhq/#{controller_path}/1")).to route_to("projects/#{controller}#show", params.merge(**id, **format)) if actions.include?(:show)
+ expect(get("/gitlab/gitlabhq/#{controller_path}/1")).to route_to("projects/#{controller}#show", namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1') if actions.include?(:show)
end
it 'to #update' do
- expect(put("/gitlab/gitlabhq/#{controller_path}/1")).to route_to("projects/#{controller}#update", params.merge(id)) if actions.include?(:update)
+ expect(put("/gitlab/gitlabhq/#{controller_path}/1")).to route_to("projects/#{controller}#update", namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1') if actions.include?(:update)
end
it 'to #destroy' do
- expect(delete("/gitlab/gitlabhq/#{controller_path}/1")).to route_to("projects/#{controller}#destroy", params.merge(**id, **format)) if actions.include?(:destroy)
+ expect(delete("/gitlab/gitlabhq/#{controller_path}/1")).to route_to("projects/#{controller}#destroy", namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1') if actions.include?(:destroy)
end
end
@@ -154,13 +151,12 @@ describe 'project routing' do
end
it 'to #history' do
- expect(get('/gitlab/gitlabhq/wikis/1/history')).to route_to('projects/wikis#history', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1', format: :html)
+ expect(get('/gitlab/gitlabhq/wikis/1/history')).to route_to('projects/wikis#history', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
end
it_behaves_like 'RESTful project resources' do
let(:actions) { [:create, :edit, :show, :destroy] }
let(:controller) { 'wikis' }
- let(:format) { { format: :html } }
end
end
diff --git a/spec/serializers/issue_board_entity_spec.rb b/spec/serializers/issue_board_entity_spec.rb
new file mode 100644
index 00000000000..06d9d3657e6
--- /dev/null
+++ b/spec/serializers/issue_board_entity_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe IssueBoardEntity do
+ let(:project) { create(:project) }
+ let(:resource) { create(:issue, project: project) }
+ let(:user) { create(:user) }
+
+ let(:request) { double('request', current_user: user) }
+
+ subject { described_class.new(resource, request: request).as_json }
+
+ it 'has basic attributes' do
+ expect(subject).to include(:id, :iid, :title, :confidential, :due_date, :project_id, :relative_position,
+ :project, :labels)
+ end
+
+ it 'has path and endpoints' do
+ expect(subject).to include(:reference_path, :real_path, :issue_sidebar_endpoint,
+ :toggle_subscription_endpoint, :assignable_labels_endpoint)
+ end
+end
diff --git a/spec/serializers/issue_serializer_spec.rb b/spec/serializers/issue_serializer_spec.rb
index 75578816e75..e8c46c0cdee 100644
--- a/spec/serializers/issue_serializer_spec.rb
+++ b/spec/serializers/issue_serializer_spec.rb
@@ -24,4 +24,12 @@ describe IssueSerializer do
expect(json_entity).to match_schema('entities/issue_sidebar')
end
end
+
+ context 'board issue serialization' do
+ let(:serializer) { 'board' }
+
+ it 'matches board issue json schema' do
+ expect(json_entity).to match_schema('entities/issue_board')
+ end
+ end
end
diff --git a/spec/serializers/pipeline_entity_spec.rb b/spec/serializers/pipeline_entity_spec.rb
index e67d12b7a89..774486dcb6d 100644
--- a/spec/serializers/pipeline_entity_spec.rb
+++ b/spec/serializers/pipeline_entity_spec.rb
@@ -44,7 +44,7 @@ describe PipelineEntity do
expect(subject).to include :flags
expect(subject[:flags])
.to include :latest, :stuck, :auto_devops,
- :yaml_errors, :retryable, :cancelable
+ :yaml_errors, :retryable, :cancelable, :merge_request
end
end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index a4582d1bc64..ccc6b0ef1c7 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -18,7 +18,8 @@ describe Ci::CreatePipelineService do
message: 'Message',
ref: ref_name,
trigger_request: nil,
- variables_attributes: nil)
+ variables_attributes: nil,
+ merge_request: nil)
params = { ref: ref,
before: '00000000',
after: after,
@@ -26,7 +27,7 @@ describe Ci::CreatePipelineService do
variables_attributes: variables_attributes }
described_class.new(project, user, params).execute(
- source, trigger_request: trigger_request)
+ source, trigger_request: trigger_request, merge_request: merge_request)
end
context 'valid params' do
@@ -43,7 +44,7 @@ describe Ci::CreatePipelineService do
expect(pipeline).to be_valid
expect(pipeline).to be_persisted
expect(pipeline).to be_push
- expect(pipeline).to eq(project.pipelines.last)
+ expect(pipeline).to eq(project.ci_pipelines.last)
expect(pipeline).to have_attributes(user: user)
expect(pipeline).to have_attributes(status: 'pending')
expect(pipeline.repository_source?).to be true
@@ -60,10 +61,10 @@ describe Ci::CreatePipelineService do
context 'when merge requests already exist for this source branch' do
let(:merge_request_1) do
- create(:merge_request, source_branch: 'master', target_branch: "branch_1", source_project: project)
+ create(:merge_request, source_branch: 'feature', target_branch: "master", source_project: project)
end
let(:merge_request_2) do
- create(:merge_request, source_branch: 'master', target_branch: "branch_2", source_project: project)
+ create(:merge_request, source_branch: 'feature', target_branch: "v1.1.0", source_project: project)
end
context 'when related merge request is already merged' do
@@ -83,7 +84,7 @@ describe Ci::CreatePipelineService do
merge_request_1
merge_request_2
- head_pipeline = execute_service
+ head_pipeline = execute_service(ref: 'feature', after: nil)
expect(merge_request_1.reload.head_pipeline).to eq(head_pipeline)
expect(merge_request_2.reload.head_pipeline).to eq(head_pipeline)
@@ -123,12 +124,12 @@ describe Ci::CreatePipelineService do
let!(:target_project) { create(:project, :repository) }
it 'updates head pipeline for merge request' do
- merge_request = create(:merge_request, source_branch: 'master',
- target_branch: "branch_1",
+ merge_request = create(:merge_request, source_branch: 'feature',
+ target_branch: "master",
source_project: project,
target_project: target_project)
- head_pipeline = execute_service
+ head_pipeline = execute_service(ref: 'feature', after: nil)
expect(merge_request.reload.head_pipeline).to eq(head_pipeline)
end
@@ -656,6 +657,212 @@ describe Ci::CreatePipelineService do
end
end
end
+
+ describe 'Merge request pipelines' do
+ let(:pipeline) do
+ execute_service(source: source, merge_request: merge_request, ref: ref_name)
+ end
+
+ before do
+ stub_ci_pipeline_yaml_file(YAML.dump(config))
+ end
+
+ let(:ref_name) { 'feature' }
+
+ context 'when source is merge request' do
+ let(:source) { :merge_request }
+
+ context "when config has merge_requests keywords" do
+ let(:config) do
+ {
+ build: {
+ stage: 'build',
+ script: 'echo'
+ },
+ test: {
+ stage: 'test',
+ script: 'echo',
+ only: ['merge_requests']
+ },
+ pages: {
+ stage: 'deploy',
+ script: 'echo',
+ except: ['merge_requests']
+ }
+ }
+ end
+
+ context 'when merge request is specified' do
+ let(:merge_request) do
+ create(:merge_request,
+ source_project: project,
+ source_branch: ref_name,
+ target_project: project,
+ target_branch: 'master')
+ end
+
+ it 'creates a merge request pipeline' do
+ expect(pipeline).to be_persisted
+ expect(pipeline).to be_merge_request
+ expect(pipeline.merge_request).to eq(merge_request)
+ expect(pipeline.builds.order(:stage_id).map(&:name)).to eq(%w[test])
+ end
+
+ context 'when ref is tag' do
+ let(:ref_name) { 'v1.1.0' }
+
+ it 'does not create a merge request pipeline' do
+ expect(pipeline).not_to be_persisted
+ expect(pipeline.errors[:tag]).to eq(["is not included in the list"])
+ end
+ end
+
+ context 'when merge request is created from a forked project' do
+ let(:merge_request) do
+ create(:merge_request,
+ source_project: project,
+ source_branch: ref_name,
+ target_project: target_project,
+ target_branch: 'master')
+ end
+
+ let!(:project) { fork_project(target_project, nil, repository: true) }
+ let!(:target_project) { create(:project, :repository) }
+
+ it 'creates a merge request pipeline in the forked project' do
+ expect(pipeline).to be_persisted
+ expect(project.ci_pipelines).to eq([pipeline])
+ expect(target_project.ci_pipelines).to be_empty
+ end
+ end
+
+ context "when there are no matched jobs" do
+ let(:config) do
+ {
+ test: {
+ stage: 'test',
+ script: 'echo',
+ except: ['merge_requests']
+ }
+ }
+ end
+
+ it 'does not create a merge request pipeline' do
+ expect(pipeline).not_to be_persisted
+ expect(pipeline.errors[:base]).to eq(["No stages / jobs for this pipeline."])
+ end
+ end
+ end
+
+ context 'when merge request is not specified' do
+ let(:merge_request) { nil }
+
+ it 'does not create a merge request pipeline' do
+ expect(pipeline).not_to be_persisted
+ expect(pipeline.errors[:merge_request]).to eq(["can't be blank"])
+ end
+ end
+ end
+
+ context "when config does not have merge_requests keywords" do
+ let(:config) do
+ {
+ build: {
+ stage: 'build',
+ script: 'echo'
+ },
+ test: {
+ stage: 'test',
+ script: 'echo'
+ },
+ pages: {
+ stage: 'deploy',
+ script: 'echo'
+ }
+ }
+ end
+
+ context 'when merge request is specified' do
+ let(:merge_request) do
+ create(:merge_request,
+ source_project: project,
+ source_branch: ref_name,
+ target_project: project,
+ target_branch: 'master')
+ end
+
+ it 'does not create a merge request pipeline' do
+ expect(pipeline).not_to be_persisted
+
+ expect(pipeline.errors[:base])
+ .to eq(['No stages / jobs for this pipeline.'])
+ end
+ end
+
+ context 'when merge request is not specified' do
+ let(:merge_request) { nil }
+
+ it 'does not create a merge request pipeline' do
+ expect(pipeline).not_to be_persisted
+
+ expect(pipeline.errors[:base])
+ .to eq(['No stages / jobs for this pipeline.'])
+ end
+ end
+ end
+ end
+
+ context 'when source is web' do
+ let(:source) { :web }
+
+ context "when config has merge_requests keywords" do
+ let(:config) do
+ {
+ build: {
+ stage: 'build',
+ script: 'echo'
+ },
+ test: {
+ stage: 'test',
+ script: 'echo',
+ only: ['merge_requests']
+ },
+ pages: {
+ stage: 'deploy',
+ script: 'echo',
+ except: ['merge_requests']
+ }
+ }
+ end
+
+ context 'when merge request is specified' do
+ let(:merge_request) do
+ create(:merge_request,
+ source_project: project,
+ source_branch: ref_name,
+ target_project: project,
+ target_branch: 'master')
+ end
+
+ it 'does not create a merge request pipeline' do
+ expect(pipeline).not_to be_persisted
+ expect(pipeline.errors[:merge_request]).to eq(["must be blank"])
+ end
+ end
+
+ context 'when merge request is not specified' do
+ let(:merge_request) { nil }
+
+ it 'creates a branch pipeline' do
+ expect(pipeline).to be_persisted
+ expect(pipeline).to be_web
+ expect(pipeline.merge_request).to be_nil
+ expect(pipeline.builds.order(:stage_id).map(&:name)).to eq(%w[build pages])
+ end
+ end
+ end
+ end
+ end
end
describe '#execute!' do
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index e779675744c..87185891470 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -20,9 +20,9 @@ describe Ci::RetryBuildService do
CLONE_ACCESSORS = described_class::CLONE_ACCESSORS
REJECT_ACCESSORS =
- %i[id status user token coverage trace runner artifacts_expire_at
- artifacts_file artifacts_metadata artifacts_size created_at
- updated_at started_at finished_at queued_at erased_by
+ %i[id status user token token_encrypted coverage trace runner
+ artifacts_expire_at artifacts_file artifacts_metadata artifacts_size
+ created_at updated_at started_at finished_at queued_at erased_by
erased_at auto_canceled_by job_artifacts job_artifacts_archive
job_artifacts_metadata job_artifacts_trace job_artifacts_junit
job_artifacts_sast job_artifacts_dependency_scanning
diff --git a/spec/services/clusters/gcp/finalize_creation_service_spec.rb b/spec/services/clusters/gcp/finalize_creation_service_spec.rb
index efee158739d..d69678c1277 100644
--- a/spec/services/clusters/gcp/finalize_creation_service_spec.rb
+++ b/spec/services/clusters/gcp/finalize_creation_service_spec.rb
@@ -19,6 +19,10 @@ describe Clusters::Gcp::FinalizeCreationService, '#execute' do
subject { described_class.new.execute(provider) }
+ before do
+ allow(ClusterPlatformConfigureWorker).to receive(:perform_async)
+ end
+
shared_examples 'success' do
it 'configures provider and kubernetes' do
subject
@@ -39,14 +43,10 @@ describe Clusters::Gcp::FinalizeCreationService, '#execute' do
expect(platform.token).to eq(token)
end
- it 'creates kubernetes namespace model' do
- subject
+ it 'calls ClusterPlatformConfigureWorker in a ascync fashion' do
+ expect(ClusterPlatformConfigureWorker).to receive(:perform_async).with(cluster.id)
- kubernetes_namespace = cluster.reload.kubernetes_namespace
- expect(kubernetes_namespace).to be_persisted
- expect(kubernetes_namespace.namespace).to eq(namespace)
- expect(kubernetes_namespace.service_account_name).to eq("#{namespace}-service-account")
- expect(kubernetes_namespace.service_account_token).to be_present
+ subject
end
end
@@ -104,8 +104,10 @@ describe Clusters::Gcp::FinalizeCreationService, '#execute' do
stub_kubeclient_discover(api_url)
stub_kubeclient_get_namespace(api_url)
stub_kubeclient_create_namespace(api_url)
+ stub_kubeclient_get_service_account_error(api_url, 'gitlab')
stub_kubeclient_create_service_account(api_url)
stub_kubeclient_create_secret(api_url)
+ stub_kubeclient_put_secret(api_url, 'gitlab-token')
stub_kubeclient_get_secret(
api_url,
@@ -115,19 +117,6 @@ describe Clusters::Gcp::FinalizeCreationService, '#execute' do
namespace: 'default'
}
)
-
- stub_kubeclient_get_namespace(api_url, namespace: namespace)
- stub_kubeclient_create_service_account(api_url, namespace: namespace)
- stub_kubeclient_create_secret(api_url, namespace: namespace)
-
- stub_kubeclient_get_secret(
- api_url,
- {
- metadata_name: "#{namespace}-token",
- token: Base64.encode64(token),
- namespace: namespace
- }
- )
end
end
@@ -155,8 +144,8 @@ describe Clusters::Gcp::FinalizeCreationService, '#execute' do
before do
provider.legacy_abac = false
+ stub_kubeclient_get_cluster_role_binding_error(api_url, 'gitlab-admin')
stub_kubeclient_create_cluster_role_binding(api_url)
- stub_kubeclient_create_role_binding(api_url, namespace: namespace)
end
include_context 'kubernetes information successfully fetched'
diff --git a/spec/services/clusters/gcp/kubernetes/create_or_update_namespace_service_spec.rb b/spec/services/clusters/gcp/kubernetes/create_or_update_namespace_service_spec.rb
index 661364ac765..fe785735fef 100644
--- a/spec/services/clusters/gcp/kubernetes/create_or_update_namespace_service_spec.rb
+++ b/spec/services/clusters/gcp/kubernetes/create_or_update_namespace_service_spec.rb
@@ -10,6 +10,7 @@ describe Clusters::Gcp::Kubernetes::CreateOrUpdateNamespaceService, '#execute' d
let(:api_url) { 'https://kubernetes.example.com' }
let(:project) { cluster.project }
let(:cluster_project) { cluster.cluster_project }
+ let(:namespace) { "#{project.path}-#{project.id}" }
subject do
described_class.new(
@@ -18,40 +19,31 @@ describe Clusters::Gcp::Kubernetes::CreateOrUpdateNamespaceService, '#execute' d
).execute
end
- shared_context 'kubernetes requests' do
- before do
- stub_kubeclient_discover(api_url)
- stub_kubeclient_get_namespace(api_url)
- stub_kubeclient_create_service_account(api_url)
- stub_kubeclient_create_secret(api_url)
-
- stub_kubeclient_get_namespace(api_url, namespace: namespace)
- stub_kubeclient_create_service_account(api_url, namespace: namespace)
- stub_kubeclient_create_secret(api_url, namespace: namespace)
-
- stub_kubeclient_get_secret(
- api_url,
- {
- metadata_name: "#{namespace}-token",
- token: Base64.encode64('sample-token'),
- namespace: namespace
- }
- )
- end
+ before do
+ stub_kubeclient_discover(api_url)
+ stub_kubeclient_get_namespace(api_url)
+ stub_kubeclient_get_service_account_error(api_url, 'gitlab')
+ stub_kubeclient_create_service_account(api_url)
+ stub_kubeclient_get_secret_error(api_url, 'gitlab-token')
+ stub_kubeclient_create_secret(api_url)
+
+ stub_kubeclient_get_namespace(api_url, namespace: namespace)
+ stub_kubeclient_get_service_account_error(api_url, "#{namespace}-service-account", namespace: namespace)
+ stub_kubeclient_create_service_account(api_url, namespace: namespace)
+ stub_kubeclient_create_secret(api_url, namespace: namespace)
+ stub_kubeclient_put_secret(api_url, "#{namespace}-token", namespace: namespace)
+
+ stub_kubeclient_get_secret(
+ api_url,
+ {
+ metadata_name: "#{namespace}-token",
+ token: Base64.encode64('sample-token'),
+ namespace: namespace
+ }
+ )
end
- context 'when kubernetes namespace is not persisted' do
- let(:namespace) { "#{project.path}-#{project.id}" }
-
- let(:kubernetes_namespace) do
- create(:cluster_kubernetes_namespace,
- cluster: cluster,
- project: cluster_project.project,
- cluster_project: cluster_project)
- end
-
- include_context 'kubernetes requests'
-
+ shared_examples 'successful creation of kubernetes namespace' do
it 'creates a Clusters::KubernetesNamespace' do
expect do
subject
@@ -59,7 +51,7 @@ describe Clusters::Gcp::Kubernetes::CreateOrUpdateNamespaceService, '#execute' d
end
it 'creates project service account' do
- expect_any_instance_of(Clusters::Gcp::Kubernetes::CreateServiceAccountService).to receive(:execute).once
+ expect_any_instance_of(Clusters::Gcp::Kubernetes::CreateOrUpdateServiceAccountService).to receive(:execute).once
subject
end
@@ -74,42 +66,69 @@ describe Clusters::Gcp::Kubernetes::CreateOrUpdateNamespaceService, '#execute' d
end
end
- context 'when there is a Kubernetes Namespace associated' do
- let(:namespace) { 'new-namespace' }
+ context 'group clusters' do
+ let(:cluster) { create(:cluster, :group, :provided_by_gcp) }
+ let(:group) { cluster.group }
+ let(:project) { create(:project, group: group) }
+
+ context 'when kubernetes namespace is not persisted' do
+ let(:kubernetes_namespace) do
+ build(:cluster_kubernetes_namespace,
+ cluster: cluster,
+ project: project)
+ end
- let(:kubernetes_namespace) do
- create(:cluster_kubernetes_namespace,
- cluster: cluster,
- project: cluster_project.project,
- cluster_project: cluster_project)
+ it_behaves_like 'successful creation of kubernetes namespace'
end
+ end
- include_context 'kubernetes requests'
+ context 'project clusters' do
+ context 'when kubernetes namespace is not persisted' do
+ let(:kubernetes_namespace) do
+ build(:cluster_kubernetes_namespace,
+ cluster: cluster,
+ project: cluster_project.project,
+ cluster_project: cluster_project)
+ end
- before do
- platform.update_column(:namespace, 'new-namespace')
+ it_behaves_like 'successful creation of kubernetes namespace'
end
- it 'does not create any Clusters::KubernetesNamespace' do
- subject
+ context 'when there is a Kubernetes Namespace associated' do
+ let(:namespace) { 'new-namespace' }
- expect(cluster.kubernetes_namespace).to eq(kubernetes_namespace)
- end
+ let(:kubernetes_namespace) do
+ create(:cluster_kubernetes_namespace,
+ cluster: cluster,
+ project: cluster_project.project,
+ cluster_project: cluster_project)
+ end
- it 'creates project service account' do
- expect_any_instance_of(Clusters::Gcp::Kubernetes::CreateServiceAccountService).to receive(:execute).once
+ before do
+ platform.update_column(:namespace, 'new-namespace')
+ end
- subject
- end
+ it 'does not create any Clusters::KubernetesNamespace' do
+ subject
- it 'updates Clusters::KubernetesNamespace' do
- subject
+ expect(cluster.kubernetes_namespace).to eq(kubernetes_namespace)
+ end
- kubernetes_namespace.reload
+ it 'creates project service account' do
+ expect_any_instance_of(Clusters::Gcp::Kubernetes::CreateOrUpdateServiceAccountService).to receive(:execute).once
- expect(kubernetes_namespace.namespace).to eq(namespace)
- expect(kubernetes_namespace.service_account_name).to eq("#{namespace}-service-account")
- expect(kubernetes_namespace.encrypted_service_account_token).to be_present
+ subject
+ end
+
+ it 'updates Clusters::KubernetesNamespace' do
+ subject
+
+ kubernetes_namespace.reload
+
+ expect(kubernetes_namespace.namespace).to eq(namespace)
+ expect(kubernetes_namespace.service_account_name).to eq("#{namespace}-service-account")
+ expect(kubernetes_namespace.encrypted_service_account_token).to be_present
+ end
end
end
end
diff --git a/spec/services/clusters/gcp/kubernetes/create_service_account_service_spec.rb b/spec/services/clusters/gcp/kubernetes/create_or_update_service_account_service_spec.rb
index 588edff85d4..11a65d0c300 100644
--- a/spec/services/clusters/gcp/kubernetes/create_service_account_service_spec.rb
+++ b/spec/services/clusters/gcp/kubernetes/create_or_update_service_account_service_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-describe Clusters::Gcp::Kubernetes::CreateServiceAccountService do
+describe Clusters::Gcp::Kubernetes::CreateOrUpdateServiceAccountService do
include KubernetesHelpers
let(:api_url) { 'http://111.111.111.111' }
@@ -55,7 +55,11 @@ describe Clusters::Gcp::Kubernetes::CreateServiceAccountService do
before do
stub_kubeclient_discover(api_url)
stub_kubeclient_get_namespace(api_url, namespace: namespace)
- stub_kubeclient_create_service_account(api_url, namespace: namespace )
+
+ stub_kubeclient_get_service_account_error(api_url, service_account_name, namespace: namespace)
+ stub_kubeclient_create_service_account(api_url, namespace: namespace)
+
+ stub_kubeclient_get_secret_error(api_url, token_name, namespace: namespace)
stub_kubeclient_create_secret(api_url, namespace: namespace)
end
@@ -74,10 +78,12 @@ describe Clusters::Gcp::Kubernetes::CreateServiceAccountService do
context 'with RBAC cluster' do
let(:rbac) { true }
+ let(:cluster_role_binding_name) { 'gitlab-admin' }
before do
cluster.platform_kubernetes.rbac!
+ stub_kubeclient_get_cluster_role_binding_error(api_url, cluster_role_binding_name)
stub_kubeclient_create_cluster_role_binding(api_url)
end
@@ -130,10 +136,12 @@ describe Clusters::Gcp::Kubernetes::CreateServiceAccountService do
context 'With RBAC enabled cluster' do
let(:rbac) { true }
+ let(:role_binding_name) { "gitlab-#{namespace}"}
before do
cluster.platform_kubernetes.rbac!
+ stub_kubeclient_get_role_binding_error(api_url, role_binding_name, namespace: namespace)
stub_kubeclient_create_role_binding(api_url, namespace: namespace)
end
diff --git a/spec/services/clusters/refresh_service_spec.rb b/spec/services/clusters/refresh_service_spec.rb
new file mode 100644
index 00000000000..58ab3c3cf73
--- /dev/null
+++ b/spec/services/clusters/refresh_service_spec.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Clusters::RefreshService do
+ shared_examples 'creates a kubernetes namespace' do
+ let(:token) { 'aaaaaa' }
+ let(:service_account_creator) { double(Clusters::Gcp::Kubernetes::CreateOrUpdateServiceAccountService, execute: true) }
+ let(:secrets_fetcher) { double(Clusters::Gcp::Kubernetes::FetchKubernetesTokenService, execute: token) }
+
+ it 'creates a kubernetes namespace' do
+ expect(Clusters::Gcp::Kubernetes::CreateOrUpdateServiceAccountService).to receive(:namespace_creator).and_return(service_account_creator)
+ expect(Clusters::Gcp::Kubernetes::FetchKubernetesTokenService).to receive(:new).and_return(secrets_fetcher)
+
+ expect { subject }.to change(project.kubernetes_namespaces, :count)
+
+ kubernetes_namespace = cluster.kubernetes_namespaces.first
+ expect(kubernetes_namespace).to be_present
+ expect(kubernetes_namespace.project).to eq(project)
+ end
+ end
+
+ shared_examples 'does not create a kubernetes namespace' do
+ it 'does not create a new kubernetes namespace' do
+ expect(Clusters::Gcp::Kubernetes::CreateOrUpdateServiceAccountService).not_to receive(:namespace_creator)
+ expect(Clusters::Gcp::Kubernetes::FetchKubernetesTokenService).not_to receive(:new)
+
+ expect { subject }.not_to change(Clusters::KubernetesNamespace, :count)
+ end
+ end
+
+ describe '.create_or_update_namespaces_for_cluster' do
+ let(:cluster) { create(:cluster, :provided_by_user, :project) }
+ let(:project) { cluster.project }
+
+ subject { described_class.create_or_update_namespaces_for_cluster(cluster) }
+
+ context 'cluster is project level' do
+ include_examples 'creates a kubernetes namespace'
+
+ context 'when project already has kubernetes namespace' do
+ before do
+ create(:cluster_kubernetes_namespace, project: project, cluster: cluster)
+ end
+
+ include_examples 'does not create a kubernetes namespace'
+ end
+ end
+
+ context 'cluster is group level' do
+ let(:cluster) { create(:cluster, :provided_by_user, :group) }
+ let(:group) { cluster.group }
+ let(:project) { create(:project, group: group) }
+
+ include_examples 'creates a kubernetes namespace'
+
+ context 'when project already has kubernetes namespace' do
+ before do
+ create(:cluster_kubernetes_namespace, project: project, cluster: cluster)
+ end
+
+ include_examples 'does not create a kubernetes namespace'
+ end
+ end
+ end
+
+ describe '.create_or_update_namespaces_for_project' do
+ let(:project) { create(:project) }
+
+ subject { described_class.create_or_update_namespaces_for_project(project) }
+
+ it 'creates no kubernetes namespaces' do
+ expect { subject }.not_to change(project.kubernetes_namespaces, :count)
+ end
+
+ context 'project has a project cluster' do
+ let!(:cluster) { create(:cluster, :provided_by_gcp, cluster_type: :project_type, projects: [project]) }
+
+ include_examples 'creates a kubernetes namespace'
+
+ context 'when project already has kubernetes namespace' do
+ before do
+ create(:cluster_kubernetes_namespace, project: project, cluster: cluster)
+ end
+
+ include_examples 'does not create a kubernetes namespace'
+ end
+ end
+
+ context 'project belongs to a group cluster' do
+ let!(:cluster) { create(:cluster, :provided_by_gcp, :group) }
+
+ let(:group) { cluster.group }
+ let(:project) { create(:project, group: group) }
+
+ include_examples 'creates a kubernetes namespace'
+
+ context 'when project already has kubernetes namespace' do
+ before do
+ create(:cluster_kubernetes_namespace, project: project, cluster: cluster)
+ end
+
+ include_examples 'does not create a kubernetes namespace'
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index 74bcc15f912..5a3ecb1019b 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -159,6 +159,78 @@ describe MergeRequests::CreateService do
end
end
end
+
+ describe 'Merge request pipelines' do
+ before do
+ stub_ci_pipeline_yaml_file(YAML.dump(config))
+ end
+
+ context "when .gitlab-ci.yml has merge_requests keywords" do
+ let(:config) do
+ {
+ test: {
+ stage: 'test',
+ script: 'echo',
+ only: ['merge_requests']
+ }
+ }
+ end
+
+ it 'creates a merge request pipeline and sets it as a head pipeline' do
+ expect(merge_request).to be_persisted
+
+ merge_request.reload
+ expect(merge_request.merge_request_pipelines.count).to eq(1)
+ expect(merge_request.actual_head_pipeline).to be_merge_request
+ end
+
+ context "when branch pipeline was created before a merge request pipline has been created" do
+ before do
+ create(:ci_pipeline, project: merge_request.source_project,
+ sha: merge_request.diff_head_sha,
+ ref: merge_request.source_branch,
+ tag: false)
+
+ merge_request
+ end
+
+ it 'sets the latest merge request pipeline as the head pipeline' do
+ expect(merge_request.actual_head_pipeline).to be_merge_request
+ end
+ end
+
+ context "when the 'ci_merge_request_pipeline' feature flag is disabled" do
+ before do
+ stub_feature_flags(ci_merge_request_pipeline: false)
+ end
+
+ it 'does not create a merge request pipeline' do
+ expect(merge_request).to be_persisted
+
+ merge_request.reload
+ expect(merge_request.merge_request_pipelines.count).to eq(0)
+ end
+ end
+ end
+
+ context "when .gitlab-ci.yml does not have merge_requests keywords" do
+ let(:config) do
+ {
+ test: {
+ stage: 'test',
+ script: 'echo'
+ }
+ }
+ end
+
+ it 'does not create a merge request pipeline' do
+ expect(merge_request).to be_persisted
+
+ merge_request.reload
+ expect(merge_request.merge_request_pipelines.count).to eq(0)
+ end
+ end
+ end
end
it_behaves_like 'new issuable record that supports quick actions' do
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index 61c6ba7d550..d29a1091d95 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -132,6 +132,94 @@ describe MergeRequests::RefreshService do
end
end
+ describe 'Merge request pipelines' do
+ before do
+ stub_ci_pipeline_yaml_file(YAML.dump(config))
+ end
+
+ subject { service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/master') }
+
+ context "when .gitlab-ci.yml has merge_requests keywords" do
+ let(:config) do
+ {
+ test: {
+ stage: 'test',
+ script: 'echo',
+ only: ['merge_requests']
+ }
+ }
+ end
+
+ it 'create merge request pipeline' do
+ expect { subject }
+ .to change { @merge_request.merge_request_pipelines.count }.by(1)
+ .and change { @fork_merge_request.merge_request_pipelines.count }.by(1)
+ .and change { @another_merge_request.merge_request_pipelines.count }.by(1)
+ end
+
+ context "when branch pipeline was created before a merge request pipline has been created" do
+ before do
+ create(:ci_pipeline, project: @merge_request.source_project,
+ sha: @merge_request.diff_head_sha,
+ ref: @merge_request.source_branch,
+ tag: false)
+
+ subject
+ end
+
+ it 'sets the latest merge request pipeline as a head pipeline' do
+ @merge_request.reload
+ expect(@merge_request.actual_head_pipeline).to be_merge_request
+ end
+
+ it 'returns pipelines in correct order' do
+ @merge_request.reload
+ expect(@merge_request.all_pipelines.first).to be_merge_request
+ expect(@merge_request.all_pipelines.second).to be_push
+ end
+ end
+
+ context "when MergeRequestUpdateWorker is retried by an exception" do
+ it 'does not re-create a duplicate merge request pipeline' do
+ expect do
+ service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/master')
+ end.to change { @merge_request.merge_request_pipelines.count }.by(1)
+
+ expect do
+ service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/master')
+ end.not_to change { @merge_request.merge_request_pipelines.count }
+ end
+ end
+
+ context "when the 'ci_merge_request_pipeline' feature flag is disabled" do
+ before do
+ stub_feature_flags(ci_merge_request_pipeline: false)
+ end
+
+ it 'does not create a merge request pipeline' do
+ expect { subject }
+ .not_to change { @merge_request.merge_request_pipelines.count }
+ end
+ end
+ end
+
+ context "when .gitlab-ci.yml does not have merge_requests keywords" do
+ let(:config) do
+ {
+ test: {
+ stage: 'test',
+ script: 'echo'
+ }
+ }
+ end
+
+ it 'does not create a merge request pipeline' do
+ expect { subject }
+ .not_to change { @merge_request.merge_request_pipelines.count }
+ end
+ end
+ end
+
context 'push to origin repo source branch when an MR was reopened' do
let(:refresh_service) { service.new(@project, @user) }
let(:notification_service) { spy('notification_service') }
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 2d8da7673dc..0f6c2604984 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -2146,6 +2146,27 @@ describe NotificationService, :mailer do
end
end
+ describe 'Repository cleanup' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+
+ describe '#repository_cleanup_success' do
+ it 'emails the specified user only' do
+ notification.repository_cleanup_success(project, user)
+
+ should_email(user)
+ end
+ end
+
+ describe '#repository_cleanup_failure' do
+ it 'emails the specified user only' do
+ notification.repository_cleanup_failure(project, user, 'Some error')
+
+ should_email(user)
+ end
+ end
+ end
+
def build_team(project)
@u_watcher = create_global_setting_for(create(:user), :watch)
@u_participating = create_global_setting_for(create(:user), :participating)
diff --git a/spec/services/projects/cleanup_service_spec.rb b/spec/services/projects/cleanup_service_spec.rb
new file mode 100644
index 00000000000..3d4587ce2a1
--- /dev/null
+++ b/spec/services/projects/cleanup_service_spec.rb
@@ -0,0 +1,44 @@
+require 'spec_helper'
+
+describe Projects::CleanupService do
+ let(:project) { create(:project, :repository, bfg_object_map: fixture_file_upload('spec/fixtures/bfg_object_map.txt')) }
+ let(:object_map) { project.bfg_object_map }
+
+ subject(:service) { described_class.new(project) }
+
+ describe '#execute' do
+ it 'runs the apply_bfg_object_map gitaly RPC' do
+ expect_next_instance_of(Gitlab::Git::RepositoryCleaner) do |cleaner|
+ expect(cleaner).to receive(:apply_bfg_object_map).with(kind_of(IO))
+ end
+
+ service.execute
+ end
+
+ it 'runs garbage collection on the repository' do
+ expect_next_instance_of(GitGarbageCollectWorker) do |worker|
+ expect(worker).to receive(:perform)
+ end
+
+ service.execute
+ end
+
+ it 'clears the repository cache' do
+ expect(project.repository).to receive(:expire_all_method_caches)
+
+ service.execute
+ end
+
+ it 'removes the object map file' do
+ service.execute
+
+ expect(object_map.exists?).to be_falsy
+ end
+
+ it 'raises an error if no object map can be found' do
+ object_map.remove!
+
+ expect { service.execute }.to raise_error(described_class::NoUploadError)
+ end
+ end
+end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index 08de27ca44a..f71e2b4bc24 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -261,6 +261,32 @@ describe Projects::CreateService, '#execute' do
end
end
+ context 'when group has kubernetes cluster' do
+ let(:group_cluster) { create(:cluster, :group, :provided_by_gcp) }
+ let(:group) { group_cluster.group }
+
+ let(:token) { 'aaaa' }
+ let(:service_account_creator) { double(Clusters::Gcp::Kubernetes::CreateOrUpdateServiceAccountService, execute: true) }
+ let(:secrets_fetcher) { double(Clusters::Gcp::Kubernetes::FetchKubernetesTokenService, execute: token) }
+
+ before do
+ group.add_owner(user)
+
+ expect(Clusters::Gcp::Kubernetes::CreateOrUpdateServiceAccountService).to receive(:namespace_creator).and_return(service_account_creator)
+ expect(Clusters::Gcp::Kubernetes::FetchKubernetesTokenService).to receive(:new).and_return(secrets_fetcher)
+ end
+
+ it 'creates kubernetes namespace for the project' do
+ project = create_project(user, opts.merge!(namespace_id: group.id))
+
+ expect(project).to be_valid
+
+ kubernetes_namespace = group_cluster.kubernetes_namespaces.first
+ expect(kubernetes_namespace).to be_present
+ expect(kubernetes_namespace.project).to eq(project)
+ end
+ end
+
context 'when there is an active service template' do
before do
create(:service, project: nil, template: true, active: true)
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index 2e07d4f8013..132ad9a2646 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -62,6 +62,32 @@ describe Projects::TransferService do
expect(rugged_config['gitlab.fullpath']).to eq "#{group.full_path}/#{project.path}"
end
+
+ context 'new group has a kubernetes cluster' do
+ let(:group_cluster) { create(:cluster, :group, :provided_by_gcp) }
+ let(:group) { group_cluster.group }
+
+ let(:token) { 'aaaa' }
+ let(:service_account_creator) { double(Clusters::Gcp::Kubernetes::CreateOrUpdateServiceAccountService, execute: true) }
+ let(:secrets_fetcher) { double(Clusters::Gcp::Kubernetes::FetchKubernetesTokenService, execute: token) }
+
+ subject { transfer_project(project, user, group) }
+
+ before do
+ expect(Clusters::Gcp::Kubernetes::CreateOrUpdateServiceAccountService).to receive(:namespace_creator).and_return(service_account_creator)
+ expect(Clusters::Gcp::Kubernetes::FetchKubernetesTokenService).to receive(:new).and_return(secrets_fetcher)
+ end
+
+ it 'creates kubernetes namespace for the project' do
+ subject
+
+ expect(project.kubernetes_namespaces.count).to eq(1)
+
+ kubernetes_namespace = group_cluster.kubernetes_namespaces.first
+ expect(kubernetes_namespace).to be_present
+ expect(kubernetes_namespace.project).to eq(project)
+ end
+ end
end
context 'when transfer fails' do
diff --git a/spec/support/active_record_enum.rb b/spec/support/active_record_enum.rb
new file mode 100644
index 00000000000..fb1189c7f17
--- /dev/null
+++ b/spec/support/active_record_enum.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+shared_examples 'having unique enum values' do
+ described_class.defined_enums.each do |name, enum|
+ it "has unique values in #{name.inspect}" do
+ duplicated = enum.group_by(&:last).select { |key, value| value.size > 1 }
+
+ expect(duplicated).to be_empty,
+ "Duplicated values detected: #{duplicated.values.map(&Hash.method(:[]))}"
+ end
+ end
+end
diff --git a/spec/support/features/discussion_comments_shared_example.rb b/spec/support/features/discussion_comments_shared_example.rb
index 18cf08f0b9e..922f3df144d 100644
--- a/spec/support/features/discussion_comments_shared_example.rb
+++ b/spec/support/features/discussion_comments_shared_example.rb
@@ -142,6 +142,14 @@ shared_examples 'discussion comments' do |resource_name|
find(comments_selector, match: :first)
end
+ def submit_reply(text)
+ find("#{comments_selector} .js-vue-discussion-reply").click
+ find("#{comments_selector} .note-textarea").send_keys(text)
+
+ click_button "Comment"
+ wait_for_requests
+ end
+
it 'clicking "Start discussion" will post a discussion' do
new_comment = all(comments_selector).last
@@ -149,16 +157,29 @@ shared_examples 'discussion comments' do |resource_name|
expect(new_comment).to have_selector '.discussion'
end
+ if resource_name =~ /(issue|merge request)/
+ it 'can be replied to' do
+ submit_reply('some text')
+
+ expect(page).to have_css('.discussion-notes .note', count: 2)
+ expect(page).to have_content 'Collapse replies'
+ end
+
+ it 'can be collapsed' do
+ submit_reply('another text')
+
+ find('.js-collapse-replies').click
+ expect(page).to have_css('.discussion-notes .note', count: 1)
+ expect(page).to have_content '1 reply'
+ end
+ end
+
if resource_name == 'merge request'
let(:note_id) { find("#{comments_selector} .note:first-child", match: :first)['data-note-id'] }
let(:reply_id) { find("#{comments_selector} .note:last-child", match: :first)['data-note-id'] }
it 'shows resolved discussion when toggled' do
- find("#{comments_selector} .js-vue-discussion-reply").click
- find("#{comments_selector} .note-textarea").send_keys('a')
-
- click_button "Comment"
- wait_for_requests
+ submit_reply('a')
click_button "Resolve discussion"
wait_for_requests
diff --git a/spec/support/helpers/features/list_rows_helpers.rb b/spec/support/helpers/features/list_rows_helpers.rb
new file mode 100644
index 00000000000..0626415361c
--- /dev/null
+++ b/spec/support/helpers/features/list_rows_helpers.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+# These helpers allow you to access rows in the list
+#
+# Usage:
+# describe "..." do
+# include Spec::Support::Helpers::Features::ListRowsHelpers
+# ...
+#
+# expect(first_row.text).to include("John Doe")
+# expect(second_row.text).to include("John Smith")
+#
+module Spec
+ module Support
+ module Helpers
+ module Features
+ module ListRowsHelpers
+ def first_row
+ page.all('ul.content-list > li')[0]
+ end
+
+ def second_row
+ page.all('ul.content-list > li')[1]
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/helpers/features/sorting_helpers.rb b/spec/support/helpers/features/sorting_helpers.rb
index a1ae428586e..003ecb251fe 100644
--- a/spec/support/helpers/features/sorting_helpers.rb
+++ b/spec/support/helpers/features/sorting_helpers.rb
@@ -13,9 +13,9 @@ module Spec
module Features
module SortingHelpers
def sort_by(value)
- find('.filter-dropdown-container button.dropdown-menu-toggle').click
+ find('.filter-dropdown-container .dropdown').click
- page.within('.content ul.dropdown-menu.dropdown-menu-right li') do
+ page.within('ul.dropdown-menu.dropdown-menu-right li') do
click_link(value)
end
end
diff --git a/spec/support/helpers/git_http_helpers.rb b/spec/support/helpers/git_http_helpers.rb
index b8289e6c5f1..9a5845af90c 100644
--- a/spec/support/helpers/git_http_helpers.rb
+++ b/spec/support/helpers/git_http_helpers.rb
@@ -60,9 +60,4 @@ module GitHttpHelpers
message = Gitlab::GitAccessWiki::ERROR_MESSAGES[error_key]
message || raise("GitAccessWiki error message key '#{error_key}' not found")
end
-
- def change_access_error(error_key)
- message = Gitlab::Checks::ChangeAccess::ERROR_MESSAGES[error_key]
- message || raise("ChangeAccess error message key '#{error_key}' not found")
- end
end
diff --git a/spec/support/helpers/kubernetes_helpers.rb b/spec/support/helpers/kubernetes_helpers.rb
index ccaf86aa3a6..39bd305d88a 100644
--- a/spec/support/helpers/kubernetes_helpers.rb
+++ b/spec/support/helpers/kubernetes_helpers.rb
@@ -34,6 +34,17 @@ module KubernetesHelpers
WebMock.stub_request(:get, deployments_url).to_return(response || kube_deployments_response)
end
+ def stub_kubeclient_knative_services(**options)
+ options[:name] ||= "kubetest"
+ options[:namespace] ||= "default"
+ options[:domain] ||= "example.com"
+
+ stub_kubeclient_discover(service.api_url)
+ knative_url = service.api_url + "/apis/serving.knative.dev/v1alpha1/services"
+
+ WebMock.stub_request(:get, knative_url).to_return(kube_response(kube_knative_services_body(options)))
+ end
+
def stub_kubeclient_get_secret(api_url, **options)
options[:metadata_name] ||= "default-token-1"
options[:namespace] ||= "default"
@@ -47,6 +58,11 @@ module KubernetesHelpers
.to_return(status: [status, "Internal Server Error"])
end
+ def stub_kubeclient_get_service_account_error(api_url, name, namespace: 'default', status: 404)
+ WebMock.stub_request(:get, api_url + "/api/v1/namespaces/#{namespace}/serviceaccounts/#{name}")
+ .to_return(status: [status, "Internal Server Error"])
+ end
+
def stub_kubeclient_create_service_account(api_url, namespace: 'default')
WebMock.stub_request(:post, api_url + "/api/v1/namespaces/#{namespace}/serviceaccounts")
.to_return(kube_response({}))
@@ -62,11 +78,26 @@ module KubernetesHelpers
.to_return(kube_response({}))
end
+ def stub_kubeclient_put_secret(api_url, name, namespace: 'default')
+ WebMock.stub_request(:put, api_url + "/api/v1/namespaces/#{namespace}/secrets/#{name}")
+ .to_return(kube_response({}))
+ end
+
+ def stub_kubeclient_get_cluster_role_binding_error(api_url, name, status: 404)
+ WebMock.stub_request(:get, api_url + "/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/#{name}")
+ .to_return(status: [status, "Internal Server Error"])
+ end
+
def stub_kubeclient_create_cluster_role_binding(api_url)
WebMock.stub_request(:post, api_url + '/apis/rbac.authorization.k8s.io/v1/clusterrolebindings')
.to_return(kube_response({}))
end
+ def stub_kubeclient_get_role_binding_error(api_url, name, namespace: 'default', status: 404)
+ WebMock.stub_request(:get, api_url + "/apis/rbac.authorization.k8s.io/v1/namespaces/#{namespace}/rolebindings/#{name}")
+ .to_return(status: [status, "Internal Server Error"])
+ end
+
def stub_kubeclient_create_role_binding(api_url, namespace: 'default')
WebMock.stub_request(:post, api_url + "/apis/rbac.authorization.k8s.io/v1/namespaces/#{namespace}/rolebindings")
.to_return(kube_response({}))
@@ -161,6 +192,13 @@ module KubernetesHelpers
}
end
+ def kube_knative_services_body(**options)
+ {
+ "kind" => "List",
+ "items" => [kube_service(options)]
+ }
+ end
+
# This is a partial response, it will have many more elements in reality but
# these are the ones we care about at the moment
def kube_pod(name: "kube-pod", app: "valid-pod-label", status: "Running", track: nil)
@@ -204,6 +242,54 @@ module KubernetesHelpers
}
end
+ def kube_service(name: "kubetest", namespace: "default", domain: "example.com")
+ {
+ "metadata" => {
+ "creationTimestamp" => "2018-11-21T06:16:33Z",
+ "name" => name,
+ "namespace" => namespace,
+ "selfLink" => "/apis/serving.knative.dev/v1alpha1/namespaces/#{namespace}/services/#{name}"
+ },
+ "spec" => {
+ "generation" => 2
+ },
+ "status" => {
+ "domain" => "#{name}.#{namespace}.#{domain}",
+ "domainInternal" => "#{name}.#{namespace}.svc.cluster.local",
+ "latestCreatedRevisionName" => "#{name}-00002",
+ "latestReadyRevisionName" => "#{name}-00002",
+ "observedGeneration" => 2
+ }
+ }
+ end
+
+ def kube_service_full(name: "kubetest", namespace: "kube-ns", domain: "example.com")
+ {
+ "metadata" => {
+ "creationTimestamp" => "2018-11-21T06:16:33Z",
+ "name" => name,
+ "namespace" => namespace,
+ "selfLink" => "/apis/serving.knative.dev/v1alpha1/namespaces/#{namespace}/services/#{name}",
+ "annotation" => {
+ "description" => "This is a test description"
+ }
+ },
+ "spec" => {
+ "generation" => 2,
+ "build" => {
+ "template" => "go-1.10.3"
+ }
+ },
+ "status" => {
+ "domain" => "#{name}.#{namespace}.#{domain}",
+ "domainInternal" => "#{name}.#{namespace}.svc.cluster.local",
+ "latestCreatedRevisionName" => "#{name}-00002",
+ "latestReadyRevisionName" => "#{name}-00002",
+ "observedGeneration" => 2
+ }
+ }
+ end
+
def kube_terminals(service, pod)
pod_name = pod['metadata']['name']
containers = pod['spec']['containers']
diff --git a/spec/support/helpers/stub_configuration.rb b/spec/support/helpers/stub_configuration.rb
index 776119564ec..2851cd9733c 100644
--- a/spec/support/helpers/stub_configuration.rb
+++ b/spec/support/helpers/stub_configuration.rb
@@ -27,6 +27,11 @@ module StubConfiguration
allow(Gitlab.config.gitlab).to receive_messages(to_settings(messages))
end
+ def stub_default_url_options(host: "localhost", protocol: "http")
+ url_options = { host: host, protocol: protocol }
+ allow(Rails.application.routes).to receive(:default_url_options).and_return(url_options)
+ end
+
def stub_gravatar_setting(messages)
allow(Gitlab.config.gravatar).to receive_messages(to_settings(messages))
end
diff --git a/spec/support/shared_contexts/change_access_checks_shared_context.rb b/spec/support/shared_contexts/change_access_checks_shared_context.rb
new file mode 100644
index 00000000000..aca18b0c73b
--- /dev/null
+++ b/spec/support/shared_contexts/change_access_checks_shared_context.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+shared_context 'change access checks context' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :repository) }
+ let(:user_access) { Gitlab::UserAccess.new(user, project: project) }
+ let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' }
+ let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
+ let(:ref) { 'refs/heads/master' }
+ let(:changes) { { oldrev: oldrev, newrev: newrev, ref: ref } }
+ let(:protocol) { 'ssh' }
+ let(:timeout) { Gitlab::GitAccess::INTERNAL_TIMEOUT }
+ let(:logger) { Gitlab::Checks::TimedLogger.new(timeout: timeout) }
+ let(:change_access) do
+ Gitlab::Checks::ChangeAccess.new(
+ changes,
+ project: project,
+ user_access: user_access,
+ protocol: protocol,
+ logger: logger
+ )
+ end
+
+ subject { described_class.new(change_access) }
+
+ before do
+ project.add_developer(user)
+ end
+end
diff --git a/spec/support/shared_examples/ci_trace_shared_examples.rb b/spec/support/shared_examples/ci_trace_shared_examples.rb
index 377bd82b67e..c603421d748 100644
--- a/spec/support/shared_examples/ci_trace_shared_examples.rb
+++ b/spec/support/shared_examples/ci_trace_shared_examples.rb
@@ -180,10 +180,9 @@ shared_examples_for 'common trace features' do
end
context 'runners token' do
- let(:token) { 'my_secret_token' }
+ let(:token) { build.project.runners_token }
before do
- build.project.update(runners_token: token)
trace.set(token)
end
@@ -193,10 +192,9 @@ shared_examples_for 'common trace features' do
end
context 'hides build token' do
- let(:token) { 'my_secret_token' }
+ let(:token) { build.token }
before do
- build.update(token: token)
trace.set(token)
end
diff --git a/spec/support/shared_examples/diff_file_collections.rb b/spec/support/shared_examples/diff_file_collections.rb
index 55ce160add0..367ddf06c28 100644
--- a/spec/support/shared_examples/diff_file_collections.rb
+++ b/spec/support/shared_examples/diff_file_collections.rb
@@ -45,3 +45,19 @@ shared_examples 'diff statistics' do |test_include_stats_flag: true|
end
end
end
+
+shared_examples 'unfoldable diff' do
+ let(:subject) { described_class.new(diffable, diff_options: nil) }
+
+ it 'calls Gitlab::Diff::File#unfold_diff_lines with correct position' do
+ position = instance_double(Gitlab::Diff::Position, file_path: 'README')
+ readme_file = instance_double(Gitlab::Diff::File, file_path: 'README')
+ other_file = instance_double(Gitlab::Diff::File, file_path: 'foo.rb')
+ nil_path_file = instance_double(Gitlab::Diff::File, file_path: nil)
+
+ allow(subject).to receive(:diff_files) { [readme_file, other_file, nil_path_file] }
+ expect(readme_file).to receive(:unfold_diff_lines).with(position)
+
+ subject.unfold_diff_files([position])
+ end
+end
diff --git a/spec/support/shared_examples/file_finder.rb b/spec/support/shared_examples/file_finder.rb
index ef144bdf61c..0dc351b5149 100644
--- a/spec/support/shared_examples/file_finder.rb
+++ b/spec/support/shared_examples/file_finder.rb
@@ -3,18 +3,19 @@ shared_examples 'file finder' do
let(:search_results) { subject.find(query) }
it 'finds by name' do
- filename, blob = search_results.find { |_, blob| blob.filename == expected_file_by_name }
- expect(filename).to eq(expected_file_by_name)
- expect(blob).to be_a(Gitlab::SearchResults::FoundBlob)
+ blob = search_results.find { |blob| blob.filename == expected_file_by_name }
+
+ expect(blob.filename).to eq(expected_file_by_name)
+ expect(blob).to be_a(Gitlab::Search::FoundBlob)
expect(blob.ref).to eq(subject.ref)
expect(blob.data).not_to be_empty
end
it 'finds by content' do
- filename, blob = search_results.find { |_, blob| blob.filename == expected_file_by_content }
+ blob = search_results.find { |blob| blob.filename == expected_file_by_content }
- expect(filename).to eq(expected_file_by_content)
- expect(blob).to be_a(Gitlab::SearchResults::FoundBlob)
+ expect(blob.filename).to eq(expected_file_by_content)
+ expect(blob).to be_a(Gitlab::Search::FoundBlob)
expect(blob.ref).to eq(subject.ref)
expect(blob.data).not_to be_empty
end
diff --git a/spec/support/shared_examples/models/member_shared_examples.rb b/spec/support/shared_examples/models/member_shared_examples.rb
new file mode 100644
index 00000000000..77376496854
--- /dev/null
+++ b/spec/support/shared_examples/models/member_shared_examples.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+shared_examples_for 'inherited access level as a member of entity' do
+ let(:parent_entity) { create(:group) }
+ let(:user) { create(:user) }
+ let(:member) { entity.is_a?(Group) ? entity.group_member(user) : entity.project_member(user) }
+
+ context 'with root parent_entity developer member' do
+ before do
+ parent_entity.add_developer(user)
+ end
+
+ it 'is allowed to be a maintainer of the entity' do
+ entity.add_maintainer(user)
+
+ expect(member.access_level).to eq(Gitlab::Access::MAINTAINER)
+ end
+
+ it 'is not allowed to be a reporter of the entity' do
+ entity.add_reporter(user)
+
+ expect(member).to be_nil
+ end
+
+ it 'is allowed to change to be a developer of the entity' do
+ entity.add_maintainer(user)
+
+ expect { member.update(access_level: Gitlab::Access::DEVELOPER) }
+ .to change { member.access_level }.to(Gitlab::Access::DEVELOPER)
+ end
+
+ it 'is not allowed to change to be a guest of the entity' do
+ entity.add_maintainer(user)
+
+ expect { member.update(access_level: Gitlab::Access::GUEST) }
+ .not_to change { member.reload.access_level }
+ end
+
+ it "shows an error if the member can't be updated" do
+ entity.add_maintainer(user)
+
+ member.update(access_level: Gitlab::Access::REPORTER)
+
+ expect(member.errors.full_messages).to eq(["Access level should be higher than Developer inherited membership from group #{parent_entity.name}"])
+ end
+
+ it 'allows changing the level from a non existing member' do
+ non_member_user = create(:user)
+
+ entity.add_maintainer(non_member_user)
+
+ non_member = entity.is_a?(Group) ? entity.group_member(non_member_user) : entity.project_member(non_member_user)
+
+ expect { non_member.update(access_level: Gitlab::Access::GUEST) }
+ .to change { non_member.reload.access_level }
+ end
+ end
+end
+
+shared_examples_for '#valid_level_roles' do |entity_name|
+ let(:member_user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:entity) { create(entity_name) }
+ let(:entity_member) { create("#{entity_name}_member", :developer, source: entity, user: member_user) }
+ let(:presenter) { described_class.new(entity_member, current_user: member_user) }
+ let(:expected_roles) { { 'Developer' => 30, 'Maintainer' => 40, 'Reporter' => 20 } }
+
+ it 'returns all roles when no parent member is present' do
+ expect(presenter.valid_level_roles).to eq(entity_member.class.access_level_roles)
+ end
+
+ it 'returns higher roles when a parent member is present' do
+ group.add_reporter(member_user)
+
+ expect(presenter.valid_level_roles).to eq(expected_roles)
+ end
+end
diff --git a/spec/support/shared_examples/only_except_policy_examples.rb b/spec/support/shared_examples/only_except_policy_examples.rb
new file mode 100644
index 00000000000..35240af1d74
--- /dev/null
+++ b/spec/support/shared_examples/only_except_policy_examples.rb
@@ -0,0 +1,167 @@
+# frozen_string_literal: true
+
+shared_examples 'correct only except policy' do
+ context 'when using simplified policy' do
+ describe 'validations' do
+ context 'when entry config value is valid' do
+ context 'when config is a branch or tag name' do
+ let(:config) { %w[master feature/branch] }
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ describe '#value' do
+ it 'returns refs hash' do
+ expect(entry.value).to eq(refs: config)
+ end
+ end
+ end
+
+ context 'when config is a regexp' do
+ let(:config) { ['/^issue-.*$/'] }
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+ end
+
+ context 'when config is a special keyword' do
+ let(:config) { %w[tags triggers branches] }
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+ end
+ end
+
+ context 'when entry value is not valid' do
+ let(:config) { [1] }
+
+ describe '#errors' do
+ it 'saves errors' do
+ expect(entry.errors)
+ .to include /policy config should be an array of strings or regexps/
+ end
+ end
+ end
+ end
+ end
+
+ context 'when using complex policy' do
+ context 'when specifying refs policy' do
+ let(:config) { { refs: ['master'] } }
+
+ it 'is a correct configuraton' do
+ expect(entry).to be_valid
+ expect(entry.value).to eq(refs: %w[master])
+ end
+ end
+
+ context 'when specifying kubernetes policy' do
+ let(:config) { { kubernetes: 'active' } }
+
+ it 'is a correct configuraton' do
+ expect(entry).to be_valid
+ expect(entry.value).to eq(kubernetes: 'active')
+ end
+ end
+
+ context 'when specifying invalid kubernetes policy' do
+ let(:config) { { kubernetes: 'something' } }
+
+ it 'reports an error about invalid policy' do
+ expect(entry.errors).to include /unknown value: something/
+ end
+ end
+
+ context 'when specifying valid variables expressions policy' do
+ let(:config) { { variables: ['$VAR == null'] } }
+
+ it 'is a correct configuraton' do
+ expect(entry).to be_valid
+ expect(entry.value).to eq(config)
+ end
+ end
+
+ context 'when specifying variables expressions in invalid format' do
+ let(:config) { { variables: '$MY_VAR' } }
+
+ it 'reports an error about invalid format' do
+ expect(entry.errors).to include /should be an array of strings/
+ end
+ end
+
+ context 'when specifying invalid variables expressions statement' do
+ let(:config) { { variables: ['$MY_VAR =='] } }
+
+ it 'reports an error about invalid statement' do
+ expect(entry.errors).to include /invalid expression syntax/
+ end
+ end
+
+ context 'when specifying invalid variables expressions token' do
+ let(:config) { { variables: ['$MY_VAR == 123'] } }
+
+ it 'reports an error about invalid expression' do
+ expect(entry.errors).to include /invalid expression syntax/
+ end
+ end
+
+ context 'when using invalid variables expressions regexp' do
+ let(:config) { { variables: ['$MY_VAR =~ /some ( thing/'] } }
+
+ it 'reports an error about invalid expression' do
+ expect(entry.errors).to include /invalid expression syntax/
+ end
+ end
+
+ context 'when specifying a valid changes policy' do
+ let(:config) { { changes: %w[some/* paths/**/*.rb] } }
+
+ it 'is a correct configuraton' do
+ expect(entry).to be_valid
+ expect(entry.value).to eq(config)
+ end
+ end
+
+ context 'when changes policy is invalid' do
+ let(:config) { { changes: [1, 2] } }
+
+ it 'returns errors' do
+ expect(entry.errors).to include /changes should be an array of strings/
+ end
+ end
+
+ context 'when specifying unknown policy' do
+ let(:config) { { refs: ['master'], invalid: :something } }
+
+ it 'returns error about invalid key' do
+ expect(entry.errors).to include /unknown keys: invalid/
+ end
+ end
+
+ context 'when policy is empty' do
+ let(:config) { {} }
+
+ it 'is not a valid configuration' do
+ expect(entry.errors).to include /can't be blank/
+ end
+ end
+ end
+
+ context 'when policy strategy does not match' do
+ let(:config) { 'string strategy' }
+
+ it 'returns information about errors' do
+ expect(entry.errors)
+ .to include /has to be either an array of conditions or a hash/
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/check_rake_spec.rb b/spec/tasks/gitlab/check_rake_spec.rb
index 4eda618b6d6..06525e3c771 100644
--- a/spec/tasks/gitlab/check_rake_spec.rb
+++ b/spec/tasks/gitlab/check_rake_spec.rb
@@ -1,51 +1,101 @@
require 'rake_helper'
-describe 'gitlab:ldap:check rake task' do
- include LdapHelpers
-
+describe 'check.rake' do
before do
Rake.application.rake_require 'tasks/gitlab/check'
stub_warn_user_is_not_gitlab
end
- context 'when LDAP is not enabled' do
- it 'does not attempt to bind or search for users' do
- expect(Gitlab::Auth::LDAP::Config).not_to receive(:providers)
- expect(Gitlab::Auth::LDAP::Adapter).not_to receive(:open)
-
- run_rake_task('gitlab:ldap:check')
+ shared_examples_for 'system check rake task' do
+ it 'runs the check' do
+ expect do
+ subject
+ end.to output(/Checking #{name} ... Finished/).to_stdout
end
end
- context 'when LDAP is enabled' do
- let(:ldap) { double(:ldap) }
- let(:adapter) { ldap_adapter('ldapmain', ldap) }
+ describe 'gitlab:check rake task' do
+ subject { run_rake_task('gitlab:check') }
+ let(:name) { 'GitLab subtasks' }
- before do
- allow(Gitlab::Auth::LDAP::Config)
- .to receive_messages(
- enabled?: true,
- providers: ['ldapmain']
- )
- allow(Gitlab::Auth::LDAP::Adapter).to receive(:open).and_yield(adapter)
- allow(adapter).to receive(:users).and_return([])
- end
+ it_behaves_like 'system check rake task'
+ end
+
+ describe 'gitlab:gitlab_shell:check rake task' do
+ subject { run_rake_task('gitlab:gitlab_shell:check') }
+ let(:name) { 'GitLab Shell' }
+
+ it_behaves_like 'system check rake task'
+ end
+
+ describe 'gitlab:gitaly:check rake task' do
+ subject { run_rake_task('gitlab:gitaly:check') }
+ let(:name) { 'Gitaly' }
+
+ it_behaves_like 'system check rake task'
+ end
+
+ describe 'gitlab:sidekiq:check rake task' do
+ subject { run_rake_task('gitlab:sidekiq:check') }
+ let(:name) { 'Sidekiq' }
- it 'attempts to bind using credentials' do
- stub_ldap_config(has_auth?: true)
+ it_behaves_like 'system check rake task'
+ end
- expect(ldap).to receive(:bind)
+ describe 'gitlab:incoming_email:check rake task' do
+ subject { run_rake_task('gitlab:incoming_email:check') }
+ let(:name) { 'Incoming Email' }
- run_rake_task('gitlab:ldap:check')
+ it_behaves_like 'system check rake task'
+ end
+
+ describe 'gitlab:ldap:check rake task' do
+ include LdapHelpers
+
+ subject { run_rake_task('gitlab:ldap:check') }
+ let(:name) { 'LDAP' }
+
+ it_behaves_like 'system check rake task'
+
+ context 'when LDAP is not enabled' do
+ it 'does not attempt to bind or search for users' do
+ expect(Gitlab::Auth::LDAP::Config).not_to receive(:providers)
+ expect(Gitlab::Auth::LDAP::Adapter).not_to receive(:open)
+
+ subject
+ end
end
- it 'searches for 100 LDAP users' do
- stub_ldap_config(uid: 'uid')
+ context 'when LDAP is enabled' do
+ let(:ldap) { double(:ldap) }
+ let(:adapter) { ldap_adapter('ldapmain', ldap) }
+
+ before do
+ allow(Gitlab::Auth::LDAP::Config)
+ .to receive_messages(
+ enabled?: true,
+ providers: ['ldapmain']
+ )
+ allow(Gitlab::Auth::LDAP::Adapter).to receive(:open).and_yield(adapter)
+ allow(adapter).to receive(:users).and_return([])
+ end
+
+ it 'attempts to bind using credentials' do
+ stub_ldap_config(has_auth?: true)
+
+ expect(ldap).to receive(:bind)
+
+ subject
+ end
+
+ it 'searches for 100 LDAP users' do
+ stub_ldap_config(uid: 'uid')
- expect(adapter).to receive(:users).with('uid', '*', 100)
+ expect(adapter).to receive(:users).with('uid', '*', 100)
- run_rake_task('gitlab:ldap:check')
+ subject
+ end
end
end
end
diff --git a/spec/tasks/gitlab/web_hook_rake_spec.rb b/spec/tasks/gitlab/web_hook_rake_spec.rb
new file mode 100644
index 00000000000..7bdf33ff6b0
--- /dev/null
+++ b/spec/tasks/gitlab/web_hook_rake_spec.rb
@@ -0,0 +1,92 @@
+require 'rake_helper'
+
+describe 'gitlab:web_hook namespace rake tasks' do
+ set(:group) { create(:group) }
+
+ set(:project1) { create(:project, namespace: group) }
+ set(:project2) { create(:project, namespace: group) }
+ set(:other_group_project) { create(:project) }
+
+ let(:url) { 'http://example.com' }
+ let(:hook_urls) { (project1.hooks + project2.hooks).map(&:url) }
+ let(:other_group_hook_urls) { other_group_project.hooks.map(&:url) }
+
+ before do
+ Rake.application.rake_require 'tasks/gitlab/web_hook'
+ end
+
+ describe 'gitlab:web_hook:add' do
+ it 'adds a web hook to all projects' do
+ stub_env('URL' => url)
+ run_rake_task('gitlab:web_hook:add')
+
+ expect(hook_urls).to contain_exactly(url, url)
+ expect(other_group_hook_urls).to contain_exactly(url)
+ end
+
+ it 'adds a web hook to projects in the specified namespace' do
+ stub_env('URL' => url, 'NAMESPACE' => group.full_path)
+ run_rake_task('gitlab:web_hook:add')
+
+ expect(hook_urls).to contain_exactly(url, url)
+ expect(other_group_hook_urls).to be_empty
+ end
+
+ it 'raises an error if an unknown namespace is specified' do
+ stub_env('URL' => url, 'NAMESPACE' => group.full_path)
+
+ group.destroy
+
+ expect { run_rake_task('gitlab:web_hook:add') }.to raise_error(SystemExit)
+ end
+ end
+
+ describe 'gitlab:web_hook:rm' do
+ let!(:hook1) { create(:project_hook, project: project1, url: url) }
+ let!(:hook2) { create(:project_hook, project: project2, url: url) }
+ let!(:other_group_hook) { create(:project_hook, project: other_group_project, url: url) }
+ let!(:other_url_hook) { create(:project_hook, url: other_url, project: project1) }
+
+ let(:other_url) { 'http://other.example.com' }
+
+ it 'removes a web hook from all projects by URL' do
+ stub_env('URL' => url)
+ run_rake_task('gitlab:web_hook:rm')
+
+ expect(hook_urls).to contain_exactly(other_url)
+ expect(other_group_hook_urls).to be_empty
+ end
+
+ it 'removes a web hook from projects in the specified namespace by URL' do
+ stub_env('NAMESPACE' => group.full_path, 'URL' => url)
+ run_rake_task('gitlab:web_hook:rm')
+
+ expect(hook_urls).to contain_exactly(other_url)
+ expect(other_group_hook_urls).to contain_exactly(url)
+ end
+
+ it 'raises an error if an unknown namespace is specified' do
+ stub_env('URL' => url, 'NAMESPACE' => group.full_path)
+
+ group.destroy
+
+ expect { run_rake_task('gitlab:web_hook:rm') }.to raise_error(SystemExit)
+ end
+ end
+
+ describe 'gitlab:web_hook:list' do
+ let!(:hook1) { create(:project_hook, project: project1) }
+ let!(:hook2) { create(:project_hook, project: project2) }
+ let!(:other_group_hook) { create(:project_hook, project: other_group_project) }
+
+ it 'lists all web hooks' do
+ expect { run_rake_task('gitlab:web_hook:list') }.to output(/3 webhooks found/).to_stdout
+ end
+
+ it 'lists web hooks in a particular namespace' do
+ stub_env('NAMESPACE', group.full_path)
+
+ expect { run_rake_task('gitlab:web_hook:list') }.to output(/2 webhooks found/).to_stdout
+ end
+ end
+end
diff --git a/spec/features/admin/admin_active_tab_spec.rb b/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb
index 1215908f5ea..05c2f61a606 100644
--- a/spec/features/admin/admin_active_tab_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb
@@ -1,27 +1,26 @@
require 'spec_helper'
-RSpec.describe 'admin active tab' do
- before do
- sign_in(create(:admin))
- end
-
+describe 'layouts/nav/sidebar/_admin' do
shared_examples 'page has active tab' do |title|
it "activates #{title} tab" do
- expect(page).to have_selector('.nav-sidebar .sidebar-top-level-items > li.active', count: 1)
- expect(page.find('.nav-sidebar .sidebar-top-level-items > li.active')).to have_content(title)
+ render
+
+ expect(rendered).to have_selector('.nav-sidebar .sidebar-top-level-items > li.active', count: 1)
+ expect(rendered).to have_css('.nav-sidebar .sidebar-top-level-items > li.active', text: title)
end
end
shared_examples 'page has active sub tab' do |title|
it "activates #{title} sub tab" do
- expect(page).to have_selector('.sidebar-sub-level-items > li.active', count: 2)
- expect(page.all('.sidebar-sub-level-items > li.active')[1]).to have_content(title)
+ render
+
+ expect(rendered).to have_css('.sidebar-sub-level-items > li.active', text: title)
end
end
context 'on home page' do
before do
- visit admin_root_path
+ allow(controller).to receive(:controller_name).and_return('dashboard')
end
it_behaves_like 'page has active tab', 'Overview'
@@ -29,7 +28,8 @@ RSpec.describe 'admin active tab' do
context 'on projects' do
before do
- visit admin_projects_path
+ allow(controller).to receive(:controller_name).and_return('projects')
+ allow(controller).to receive(:controller_path).and_return('admin/projects')
end
it_behaves_like 'page has active tab', 'Overview'
@@ -38,7 +38,7 @@ RSpec.describe 'admin active tab' do
context 'on groups' do
before do
- visit admin_groups_path
+ allow(controller).to receive(:controller_name).and_return('groups')
end
it_behaves_like 'page has active tab', 'Overview'
@@ -47,7 +47,7 @@ RSpec.describe 'admin active tab' do
context 'on users' do
before do
- visit admin_users_path
+ allow(controller).to receive(:controller_name).and_return('users')
end
it_behaves_like 'page has active tab', 'Overview'
@@ -56,7 +56,7 @@ RSpec.describe 'admin active tab' do
context 'on logs' do
before do
- visit admin_logs_path
+ allow(controller).to receive(:controller_name).and_return('logs')
end
it_behaves_like 'page has active tab', 'Monitoring'
@@ -65,7 +65,7 @@ RSpec.describe 'admin active tab' do
context 'on messages' do
before do
- visit admin_broadcast_messages_path
+ allow(controller).to receive(:controller_name).and_return('broadcast_messages')
end
it_behaves_like 'page has active tab', 'Messages'
@@ -73,7 +73,7 @@ RSpec.describe 'admin active tab' do
context 'on hooks' do
before do
- visit admin_hooks_path
+ allow(controller).to receive(:controller_name).and_return('hooks')
end
it_behaves_like 'page has active tab', 'Hooks'
@@ -81,7 +81,7 @@ RSpec.describe 'admin active tab' do
context 'on background jobs' do
before do
- visit admin_background_jobs_path
+ allow(controller).to receive(:controller_name).and_return('background_jobs')
end
it_behaves_like 'page has active tab', 'Monitoring'
diff --git a/spec/workers/cluster_platform_configure_worker_spec.rb b/spec/workers/cluster_platform_configure_worker_spec.rb
index b51f6e07c6a..0eead0ab13d 100644
--- a/spec/workers/cluster_platform_configure_worker_spec.rb
+++ b/spec/workers/cluster_platform_configure_worker_spec.rb
@@ -2,7 +2,43 @@
require 'spec_helper'
-describe ClusterPlatformConfigureWorker, '#execute' do
+describe ClusterPlatformConfigureWorker, '#perform' do
+ let(:worker) { described_class.new }
+
+ context 'when group cluster' do
+ let(:cluster) { create(:cluster, :group, :provided_by_gcp) }
+ let(:group) { cluster.group }
+
+ context 'when group has no projects' do
+ it 'does not create a namespace' do
+ expect_any_instance_of(Clusters::Gcp::Kubernetes::CreateOrUpdateNamespaceService).not_to receive(:execute)
+
+ worker.perform(cluster.id)
+ end
+ end
+
+ context 'when group has a project' do
+ let!(:project) { create(:project, group: group) }
+
+ it 'creates a namespace for the project' do
+ expect_any_instance_of(Clusters::Gcp::Kubernetes::CreateOrUpdateNamespaceService).to receive(:execute).once
+
+ worker.perform(cluster.id)
+ end
+ end
+
+ context 'when group has project in a sub-group' do
+ let!(:subgroup) { create(:group, parent: group) }
+ let!(:project) { create(:project, group: subgroup) }
+
+ it 'creates a namespace for the project' do
+ expect_any_instance_of(Clusters::Gcp::Kubernetes::CreateOrUpdateNamespaceService).to receive(:execute).once
+
+ worker.perform(cluster.id)
+ end
+ end
+ end
+
context 'when provider type is gcp' do
let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
@@ -30,18 +66,4 @@ describe ClusterPlatformConfigureWorker, '#execute' do
described_class.new.perform(123)
end
end
-
- context 'when kubeclient raises error' do
- let(:cluster) { create(:cluster, :project) }
-
- it 'rescues and logs the error' do
- allow_any_instance_of(Clusters::Gcp::Kubernetes::CreateOrUpdateNamespaceService).to receive(:execute).and_raise(::Kubeclient::HttpError.new(500, 'something baaaad happened', ''))
-
- expect(Rails.logger)
- .to receive(:error)
- .with("Failed to create/update Kubernetes namespace for cluster_id: #{cluster.id} with error: something baaaad happened")
-
- described_class.new.perform(cluster.id)
- end
- end
end
diff --git a/spec/workers/pipeline_schedule_worker_spec.rb b/spec/workers/pipeline_schedule_worker_spec.rb
index c5a60e9855b..ff408427926 100644
--- a/spec/workers/pipeline_schedule_worker_spec.rb
+++ b/spec/workers/pipeline_schedule_worker_spec.rb
@@ -11,6 +11,7 @@ describe PipelineScheduleWorker do
end
before do
+ stub_application_setting(auto_devops_enabled: false)
stub_ci_pipeline_to_return_yaml_file
pipeline_schedule.update_column(:next_run_at, 1.day.ago)
@@ -24,12 +25,12 @@ describe PipelineScheduleWorker do
context 'when there is a scheduled pipeline within next_run_at' do
shared_examples 'successful scheduling' do
it 'creates a new pipeline' do
- expect { subject }.to change { project.pipelines.count }.by(1)
+ expect { subject }.to change { project.ci_pipelines.count }.by(1)
expect(Ci::Pipeline.last).to be_schedule
pipeline_schedule.reload
expect(pipeline_schedule.next_run_at).to be > Time.now
- expect(pipeline_schedule).to eq(project.pipelines.last.pipeline_schedule)
+ expect(pipeline_schedule).to eq(project.ci_pipelines.last.pipeline_schedule)
expect(pipeline_schedule).to be_active
end
end
@@ -53,7 +54,7 @@ describe PipelineScheduleWorker do
end
it 'does not creates a new pipeline' do
- expect { subject }.not_to change { project.pipelines.count }
+ expect { subject }.not_to change { project.ci_pipelines.count }
end
end
@@ -63,7 +64,7 @@ describe PipelineScheduleWorker do
end
it 'creates a failed pipeline with the reason' do
- expect { subject }.to change { project.pipelines.count }.by(1)
+ expect { subject }.to change { project.ci_pipelines.count }.by(1)
expect(Ci::Pipeline.last).to be_config_error
expect(Ci::Pipeline.last.yaml_errors).not_to be_nil
end
@@ -104,7 +105,7 @@ describe PipelineScheduleWorker do
end
it 'does not create a pipeline' do
- expect { subject }.not_to change { project.pipelines.count }
+ expect { subject }.not_to change { project.ci_pipelines.count }
end
it 'does not raise an exception' do
@@ -134,7 +135,7 @@ describe PipelineScheduleWorker do
end
it 'does not create a pipeline' do
- expect { subject }.not_to change { project.pipelines.count }
+ expect { subject }.not_to change { project.ci_pipelines.count }
end
it 'does not raise an exception' do
diff --git a/spec/workers/rebase_worker_spec.rb b/spec/workers/rebase_worker_spec.rb
index 936b9deaecc..900332ed6b3 100644
--- a/spec/workers/rebase_worker_spec.rb
+++ b/spec/workers/rebase_worker_spec.rb
@@ -19,7 +19,7 @@ describe RebaseWorker, '#perform' do
expect(MergeRequests::RebaseService)
.to receive(:new).with(forked_project, merge_request.author).and_call_original
- subject.perform(merge_request, merge_request.author)
+ subject.perform(merge_request.id, merge_request.author.id)
end
end
end
diff --git a/spec/workers/repository_cleanup_worker_spec.rb b/spec/workers/repository_cleanup_worker_spec.rb
new file mode 100644
index 00000000000..3adae0b6cfa
--- /dev/null
+++ b/spec/workers/repository_cleanup_worker_spec.rb
@@ -0,0 +1,55 @@
+require 'spec_helper'
+
+describe RepositoryCleanupWorker do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ subject(:worker) { described_class.new }
+
+ describe '#perform' do
+ it 'executes the cleanup service and sends a success notification' do
+ expect_next_instance_of(Projects::CleanupService) do |service|
+ expect(service.project).to eq(project)
+ expect(service.current_user).to eq(user)
+
+ expect(service).to receive(:execute)
+ end
+
+ expect_next_instance_of(NotificationService) do |service|
+ expect(service).to receive(:repository_cleanup_success).with(project, user)
+ end
+
+ worker.perform(project.id, user.id)
+ end
+
+ it 'raises an error if the project cannot be found' do
+ project.destroy
+
+ expect { worker.perform(project.id, user.id) }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ it 'raises an error if the user cannot be found' do
+ user.destroy
+
+ expect { worker.perform(project.id, user.id) }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ describe '#sidekiq_retries_exhausted' do
+ let(:job) { { 'args' => [project.id, user.id], 'error_message' => 'Error' } }
+
+ it 'does not send a failure notification for a RecordNotFound error' do
+ expect(NotificationService).not_to receive(:new)
+
+ described_class.sidekiq_retries_exhausted_block.call(job, ActiveRecord::RecordNotFound.new)
+ end
+
+ it 'sends a failure notification' do
+ expect_next_instance_of(NotificationService) do |service|
+ expect(service).to receive(:repository_cleanup_failure).with(project, user, 'Error')
+ end
+
+ described_class.sidekiq_retries_exhausted_block.call(job, StandardError.new)
+ end
+ end
+end
diff --git a/spec/workers/update_head_pipeline_for_merge_request_worker_spec.rb b/spec/workers/update_head_pipeline_for_merge_request_worker_spec.rb
index 9adde5fc21a..a2bc264b0f6 100644
--- a/spec/workers/update_head_pipeline_for_merge_request_worker_spec.rb
+++ b/spec/workers/update_head_pipeline_for_merge_request_worker_spec.rb
@@ -34,5 +34,33 @@ describe UpdateHeadPipelineForMergeRequestWorker do
expect { subject.perform(merge_request.id) }.not_to change { merge_request.reload.head_pipeline_id }
end
end
+
+ context 'when a merge request pipeline exists' do
+ let!(:merge_request_pipeline) do
+ create(:ci_pipeline,
+ project: project,
+ source: :merge_request,
+ sha: latest_sha,
+ merge_request: merge_request)
+ end
+
+ it 'sets the merge request pipeline as the head pipeline' do
+ expect { subject.perform(merge_request.id) }
+ .to change { merge_request.reload.head_pipeline_id }
+ .from(nil).to(merge_request_pipeline.id)
+ end
+
+ context 'when branch pipeline exists' do
+ let!(:branch_pipeline) do
+ create(:ci_pipeline, project: project, source: :push, sha: latest_sha)
+ end
+
+ it 'prioritizes the merge request pipeline as the head pipeline' do
+ expect { subject.perform(merge_request.id) }
+ .to change { merge_request.reload.head_pipeline_id }
+ .from(nil).to(merge_request_pipeline.id)
+ end
+ end
+ end
end
end