Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'spec/requests/api')
-rw-r--r--spec/requests/api/admin/ci/variables_spec.rb9
-rw-r--r--spec/requests/api/branches_spec.rb82
-rw-r--r--spec/requests/api/ci/pipelines_spec.rb69
-rw-r--r--spec/requests/api/ci/runner/jobs_artifacts_spec.rb901
-rw-r--r--spec/requests/api/ci/runner/jobs_put_spec.rb196
-rw-r--r--spec/requests/api/ci/runner/jobs_request_post_spec.rb861
-rw-r--r--spec/requests/api/ci/runner/jobs_trace_spec.rb292
-rw-r--r--spec/requests/api/ci/runner/runners_delete_spec.rb54
-rw-r--r--spec/requests/api/ci/runner/runners_post_spec.rb250
-rw-r--r--spec/requests/api/ci/runner/runners_verify_post_spec.rb48
-rw-r--r--spec/requests/api/ci/runner_spec.rb2474
-rw-r--r--spec/requests/api/commits_spec.rb46
-rw-r--r--spec/requests/api/composer_packages_spec.rb375
-rw-r--r--spec/requests/api/conan_packages_spec.rb83
-rw-r--r--spec/requests/api/deploy_keys_spec.rb1
-rw-r--r--spec/requests/api/files_spec.rb50
-rw-r--r--spec/requests/api/go_proxy_spec.rb4
-rw-r--r--spec/requests/api/graphql/boards/board_list_issues_query_spec.rb101
-rw-r--r--spec/requests/api/graphql/boards/board_lists_query_spec.rb14
-rw-r--r--spec/requests/api/graphql/ci/groups_spec.rb55
-rw-r--r--spec/requests/api/graphql/ci/jobs_spec.rb93
-rw-r--r--spec/requests/api/graphql/ci/stages_spec.rb46
-rw-r--r--spec/requests/api/graphql/issue_status_counts_spec.rb58
-rw-r--r--spec/requests/api/graphql/metrics/dashboard/annotations_spec.rb22
-rw-r--r--spec/requests/api/graphql/milestone_spec.rb47
-rw-r--r--spec/requests/api/graphql/mutations/boards/issues/issue_move_list_spec.rb109
-rw-r--r--spec/requests/api/graphql/mutations/boards/lists/update_spec.rb57
-rw-r--r--spec/requests/api/graphql/mutations/container_expiration_policy/update_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/design_management/move_spec.rb122
-rw-r--r--spec/requests/api/graphql/mutations/discussions/toggle_resolve_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/issues/set_due_date_spec.rb6
-rw-r--r--spec/requests/api/graphql/mutations/issues/set_subscription_spec.rb12
-rw-r--r--spec/requests/api/graphql/mutations/issues/update_spec.rb41
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/create_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/set_subscription_spec.rb57
-rw-r--r--spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/notes/update/note_spec.rb39
-rw-r--r--spec/requests/api/graphql/mutations/snippets/create_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/snippets/update_spec.rb3
-rw-r--r--spec/requests/api/graphql/project/alert_management/alert/todos_spec.rb51
-rw-r--r--spec/requests/api/graphql/project/alert_management/alert_status_counts_spec.rb6
-rw-r--r--spec/requests/api/graphql/project/alert_management/alerts_spec.rb15
-rw-r--r--spec/requests/api/graphql/project/container_expiration_policy_spec.rb1
-rw-r--r--spec/requests/api/graphql/project/issue/designs/designs_spec.rb3
-rw-r--r--spec/requests/api/graphql/project/issues_spec.rb1
-rw-r--r--spec/requests/api/graphql/project/jira_import_spec.rb5
-rw-r--r--spec/requests/api/graphql/project/jira_projects_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/merge_requests_spec.rb39
-rw-r--r--spec/requests/api/graphql/project/packages_spec.rb50
-rw-r--r--spec/requests/api/graphql/project/pipeline_spec.rb6
-rw-r--r--spec/requests/api/graphql/project/repository_spec.rb1
-rw-r--r--spec/requests/api/graphql/project/tree/tree_spec.rb1
-rw-r--r--spec/requests/api/graphql/project_query_spec.rb14
-rw-r--r--spec/requests/api/graphql/user_query_spec.rb6
-rw-r--r--spec/requests/api/group_import_spec.rb2
-rw-r--r--spec/requests/api/group_milestones_spec.rb68
-rw-r--r--spec/requests/api/group_packages_spec.rb182
-rw-r--r--spec/requests/api/group_variables_spec.rb8
-rw-r--r--spec/requests/api/groups_spec.rb61
-rw-r--r--spec/requests/api/helpers_spec.rb1
-rw-r--r--spec/requests/api/import_bitbucket_server_spec.rb2
-rw-r--r--spec/requests/api/import_github_spec.rb2
-rw-r--r--spec/requests/api/internal/base_spec.rb139
-rw-r--r--spec/requests/api/internal/kubernetes_spec.rb154
-rw-r--r--spec/requests/api/internal/pages_spec.rb1
-rw-r--r--spec/requests/api/issues/get_group_issues_spec.rb6
-rw-r--r--spec/requests/api/issues/get_project_issues_spec.rb9
-rw-r--r--spec/requests/api/issues/issues_spec.rb58
-rw-r--r--spec/requests/api/issues/post_projects_issues_spec.rb3
-rw-r--r--spec/requests/api/jobs_spec.rb12
-rw-r--r--spec/requests/api/merge_requests_spec.rb10
-rw-r--r--spec/requests/api/notes_spec.rb1
-rw-r--r--spec/requests/api/notification_settings_spec.rb3
-rw-r--r--spec/requests/api/npm_packages_spec.rb151
-rw-r--r--spec/requests/api/nuget_packages_spec.rb107
-rw-r--r--spec/requests/api/pages_domains_spec.rb1
-rw-r--r--spec/requests/api/performance_bar_spec.rb41
-rw-r--r--spec/requests/api/project_export_spec.rb16
-rw-r--r--spec/requests/api/project_hooks_spec.rb10
-rw-r--r--spec/requests/api/project_milestones_spec.rb70
-rw-r--r--spec/requests/api/project_snippets_spec.rb80
-rw-r--r--spec/requests/api/project_templates_spec.rb73
-rw-r--r--spec/requests/api/projects_spec.rb33
-rw-r--r--spec/requests/api/pypi_packages_spec.rb313
-rw-r--r--spec/requests/api/releases_spec.rb26
-rw-r--r--spec/requests/api/snippets_spec.rb47
-rw-r--r--spec/requests/api/suggestions_spec.rb4
-rw-r--r--spec/requests/api/users_spec.rb2
88 files changed, 5273 insertions, 3310 deletions
diff --git a/spec/requests/api/admin/ci/variables_spec.rb b/spec/requests/api/admin/ci/variables_spec.rb
index 812ee93ad21..f89964411f8 100644
--- a/spec/requests/api/admin/ci/variables_spec.rb
+++ b/spec/requests/api/admin/ci/variables_spec.rb
@@ -110,20 +110,19 @@ RSpec.describe ::API::Admin::Ci::Variables do
expect(response).to have_gitlab_http_status(:bad_request)
end
- it 'does not allow values above 700 characters' do
+ it 'does not allow values above 10,000 characters' do
too_long_message = <<~MESSAGE.strip
- The encrypted value of the provided variable exceeds 1024 bytes. \
- Variables over 700 characters risk exceeding the limit.
+ The value of the provided variable exceeds the 10000 character limit
MESSAGE
expect do
post api('/admin/ci/variables', admin),
- params: { key: 'too_long', value: SecureRandom.hex(701) }
+ params: { key: 'too_long', value: SecureRandom.hex(10_001) }
end.not_to change { ::Ci::InstanceVariable.count }
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response).to match('message' =>
- a_hash_including('encrypted_value' => [too_long_message]))
+ a_hash_including('value' => [too_long_message]))
end
end
diff --git a/spec/requests/api/branches_spec.rb b/spec/requests/api/branches_spec.rb
index 46acd92803f..4b9b82b3a5b 100644
--- a/spec/requests/api/branches_spec.rb
+++ b/spec/requests/api/branches_spec.rb
@@ -39,9 +39,11 @@ RSpec.describe API::Branches do
end
context 'with branch_list_keyset_pagination feature off' do
- context 'with legacy pagination params' do
+ let(:base_params) { {} }
+
+ context 'with offset pagination params' do
it 'returns the repository branches' do
- get api(route, current_user), params: { per_page: 100 }
+ get api(route, current_user), params: base_params.merge(per_page: 100)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/branches')
@@ -53,7 +55,7 @@ RSpec.describe API::Branches do
it 'determines only a limited number of merged branch names' do
expect(API::Entities::Branch).to receive(:represent).with(anything, has_up_to_merged_branch_names_count(2)).and_call_original
- get api(route, current_user), params: { per_page: 2 }
+ get api(route, current_user), params: base_params.merge(per_page: 2)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.count).to eq 2
@@ -64,7 +66,7 @@ RSpec.describe API::Branches do
it 'merge status matches reality on paginated input' do
expected_first_branch_name = project.repository.branches_sorted_by('name')[20].name
- get api(route, current_user), params: { per_page: 20, page: 2 }
+ get api(route, current_user), params: base_params.merge(per_page: 20, page: 2)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.count).to eq 20
@@ -74,11 +76,11 @@ RSpec.describe API::Branches do
end
end
- context 'with gitaly pagination params ' do
+ context 'with gitaly pagination params' do
it 'merge status matches reality on paginated input' do
expected_first_branch_name = project.repository.branches_sorted_by('name').first.name
- get api(route, current_user), params: { per_page: 20, page_token: 'feature' }
+ get api(route, current_user), params: base_params.merge(per_page: 20, page_token: 'feature')
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.count).to eq 20
@@ -91,52 +93,58 @@ RSpec.describe API::Branches do
context 'with branch_list_keyset_pagination feature on' do
before do
- stub_feature_flags(branch_list_keyset_pagination: true)
+ stub_feature_flags(branch_list_keyset_pagination: project)
end
- context 'with gitaly pagination params ' do
- it 'returns the repository branches' do
- get api(route, current_user), params: { per_page: 100 }
+ context 'with keyset pagination option' do
+ let(:base_params) { { pagination: 'keyset' } }
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('public_api/v4/branches')
- branch_names = json_response.map { |x| x['name'] }
- expect(branch_names).to match_array(project.repository.branch_names)
- end
+ context 'with gitaly pagination params ' do
+ it 'returns the repository branches' do
+ get api(route, current_user), params: base_params.merge(per_page: 100)
- it 'determines only a limited number of merged branch names' do
- expect(API::Entities::Branch).to receive(:represent).with(anything, has_up_to_merged_branch_names_count(2)).and_call_original
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/branches')
+ expect(response.headers).not_to include('Link', 'Links')
+ branch_names = json_response.map { |x| x['name'] }
+ expect(branch_names).to match_array(project.repository.branch_names)
+ end
- get api(route, current_user), params: { per_page: 2 }
+ it 'determines only a limited number of merged branch names' do
+ expect(API::Entities::Branch).to receive(:represent).with(anything, has_up_to_merged_branch_names_count(2)).and_call_original
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response.count).to eq 2
+ get api(route, current_user), params: base_params.merge(per_page: 2)
- check_merge_status(json_response)
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers).to include('Link', 'Links')
+ expect(json_response.count).to eq 2
- it 'merge status matches reality on paginated input' do
- expected_first_branch_name = project.repository.branches_sorted_by('name').drop_while { |b| b.name <= 'feature' }.first.name
+ check_merge_status(json_response)
+ end
- get api(route, current_user), params: { per_page: 20, page_token: 'feature' }
+ it 'merge status matches reality on paginated input' do
+ expected_first_branch_name = project.repository.branches_sorted_by('name').drop_while { |b| b.name <= 'feature' }.first.name
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response.count).to eq 20
- expect(json_response.first['name']).to eq(expected_first_branch_name)
+ get api(route, current_user), params: base_params.merge(per_page: 20, page_token: 'feature')
- check_merge_status(json_response)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.count).to eq 20
+ expect(json_response.first['name']).to eq(expected_first_branch_name)
+
+ check_merge_status(json_response)
+ end
end
- end
- context 'with legacy pagination params' do
- it 'ignores legacy pagination params' do
- expected_first_branch_name = project.repository.branches_sorted_by('name').first.name
- get api(route, current_user), params: { per_page: 20, page: 2 }
+ context 'with offset pagination params' do
+ it 'ignores legacy pagination params' do
+ expected_first_branch_name = project.repository.branches_sorted_by('name').first.name
+ get api(route, current_user), params: base_params.merge(per_page: 20, page: 2)
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response.first['name']).to eq(expected_first_branch_name)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.first['name']).to eq(expected_first_branch_name)
- check_merge_status(json_response)
+ check_merge_status(json_response)
+ end
end
end
end
diff --git a/spec/requests/api/ci/pipelines_spec.rb b/spec/requests/api/ci/pipelines_spec.rb
index c9ca806e2c4..111bc933ea4 100644
--- a/spec/requests/api/ci/pipelines_spec.rb
+++ b/spec/requests/api/ci/pipelines_spec.rb
@@ -438,7 +438,7 @@ RSpec.describe API::Ci::Pipelines do
expect(response).to match_response_schema('public_api/v4/pipeline/detail')
end
- it 'returns project pipelines' do
+ it 'returns project pipeline' do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}", user)
expect(response).to have_gitlab_http_status(:ok)
@@ -475,6 +475,20 @@ RSpec.describe API::Ci::Pipelines do
expect(json_response['id']).to be nil
end
end
+
+ context 'when config source is not ci' do
+ let(:non_ci_config_source) { ::Ci::PipelineEnums.non_ci_config_source_values.first }
+ let(:pipeline_not_ci) do
+ create(:ci_pipeline, config_source: non_ci_config_source, project: project)
+ end
+
+ it 'returns the specified pipeline' do
+ get api("/projects/#{project.id}/pipelines/#{pipeline_not_ci.id}", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['sha']).to eq(pipeline_not_ci.sha)
+ end
+ end
end
describe 'GET /projects/:id/pipelines/latest' do
@@ -721,55 +735,36 @@ RSpec.describe API::Ci::Pipelines do
let(:pipeline) { create(:ci_pipeline, project: project) }
- context 'when feature is enabled' do
- before do
- stub_feature_flags(junit_pipeline_view: true)
- end
-
- context 'when pipeline does not have a test report' do
- it 'returns an empty test report' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['total_count']).to eq(0)
- end
- end
-
- context 'when pipeline has a test report' do
- let(:pipeline) { create(:ci_pipeline, :with_test_reports, project: project) }
-
- it 'returns the test report' do
- subject
+ context 'when pipeline does not have a test report' do
+ it 'returns an empty test report' do
+ subject
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['total_count']).to eq(4)
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['total_count']).to eq(0)
end
+ end
- context 'when pipeline has corrupt test reports' do
- before do
- job = create(:ci_build, pipeline: pipeline)
- create(:ci_job_artifact, :junit_with_corrupted_data, job: job, project: project)
- end
+ context 'when pipeline has a test report' do
+ let(:pipeline) { create(:ci_pipeline, :with_test_reports, project: project) }
- it 'returns a suite_error' do
- subject
+ it 'returns the test report' do
+ subject
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['test_suites'].first['suite_error']).to eq('JUnit XML parsing failed: 1:1: FATAL: Document is empty')
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['total_count']).to eq(4)
end
end
- context 'when feature is disabled' do
+ context 'when pipeline has corrupt test reports' do
before do
- stub_feature_flags(junit_pipeline_view: false)
+ create(:ci_build, :broken_test_reports, name: 'rspec', pipeline: pipeline)
end
- it 'renders empty response' do
+ it 'returns a suite_error' do
subject
- expect(response).to have_gitlab_http_status(:not_found)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['test_suites'].first['suite_error']).to eq('JUnit XML parsing failed: 1:1: FATAL: Document is empty')
end
end
end
diff --git a/spec/requests/api/ci/runner/jobs_artifacts_spec.rb b/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
new file mode 100644
index 00000000000..e5c60bb539b
--- /dev/null
+++ b/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
@@ -0,0 +1,901 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
+ include StubGitlabCalls
+ include RedisHelpers
+ include WorkhorseHelpers
+
+ let(:registration_token) { 'abcdefg123456' }
+
+ before do
+ stub_feature_flags(ci_enable_live_trace: true)
+ stub_gitlab_calls
+ stub_application_setting(runners_registration_token: registration_token)
+ allow_any_instance_of(::Ci::Runner).to receive(:cache_attributes)
+ end
+
+ describe '/api/v4/jobs' do
+ let(:root_namespace) { create(:namespace) }
+ let(:namespace) { create(:namespace, parent: root_namespace) }
+ let(:project) { create(:project, namespace: namespace, shared_runners_enabled: false) }
+ let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master') }
+ let(:runner) { create(:ci_runner, :project, projects: [project]) }
+ let(:user) { create(:user) }
+ let(:job) do
+ create(:ci_build, :artifacts, :extended_options,
+ pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0)
+ end
+
+ describe 'artifacts' do
+ let(:job) { create(:ci_build, :pending, user: user, project: project, pipeline: pipeline, runner_id: runner.id) }
+ let(:jwt) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
+ let(:headers) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => jwt } }
+ let(:headers_with_token) { headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.token) }
+ let(:file_upload) { fixture_file_upload('spec/fixtures/banana_sample.gif', 'image/gif') }
+ let(:file_upload2) { fixture_file_upload('spec/fixtures/dk.png', 'image/gif') }
+
+ before do
+ stub_artifacts_object_storage
+ job.run!
+ end
+
+ shared_examples_for 'rejecting artifacts that are too large' do
+ let(:filesize) { 100.megabytes.to_i }
+ let(:sample_max_size) { (filesize / 1.megabyte) - 10 } # Set max size to be smaller than file size to trigger error
+
+ shared_examples_for 'failed request' do
+ it 'responds with payload too large error' do
+ send_request
+
+ expect(response).to have_gitlab_http_status(:payload_too_large)
+ end
+ end
+
+ context 'based on plan limit setting' do
+ let(:application_max_size) { sample_max_size + 100 }
+ let(:limit_name) { "#{Ci::JobArtifact::PLAN_LIMIT_PREFIX}archive" }
+
+ before do
+ create(:plan_limits, :default_plan, limit_name => sample_max_size)
+ stub_application_setting(max_artifacts_size: application_max_size)
+ end
+
+ it_behaves_like 'failed request'
+ end
+
+ context 'based on application setting' do
+ before do
+ stub_application_setting(max_artifacts_size: sample_max_size)
+ end
+
+ it_behaves_like 'failed request'
+ end
+
+ context 'based on root namespace setting' do
+ let(:application_max_size) { sample_max_size + 10 }
+
+ before do
+ stub_application_setting(max_artifacts_size: application_max_size)
+ root_namespace.update!(max_artifacts_size: sample_max_size)
+ end
+
+ it_behaves_like 'failed request'
+ end
+
+ context 'based on child namespace setting' do
+ let(:application_max_size) { sample_max_size + 10 }
+ let(:root_namespace_max_size) { sample_max_size + 10 }
+
+ before do
+ stub_application_setting(max_artifacts_size: application_max_size)
+ root_namespace.update!(max_artifacts_size: root_namespace_max_size)
+ namespace.update!(max_artifacts_size: sample_max_size)
+ end
+
+ it_behaves_like 'failed request'
+ end
+
+ context 'based on project setting' do
+ let(:application_max_size) { sample_max_size + 10 }
+ let(:root_namespace_max_size) { sample_max_size + 10 }
+ let(:child_namespace_max_size) { sample_max_size + 10 }
+
+ before do
+ stub_application_setting(max_artifacts_size: application_max_size)
+ root_namespace.update!(max_artifacts_size: root_namespace_max_size)
+ namespace.update!(max_artifacts_size: child_namespace_max_size)
+ project.update!(max_artifacts_size: sample_max_size)
+ end
+
+ it_behaves_like 'failed request'
+ end
+ end
+
+ describe 'POST /api/v4/jobs/:id/artifacts/authorize' do
+ context 'when using token as parameter' do
+ context 'and the artifact is too large' do
+ it_behaves_like 'rejecting artifacts that are too large' do
+ let(:success_code) { :ok }
+ let(:send_request) { authorize_artifacts_with_token_in_params(filesize: filesize) }
+ end
+ end
+
+ context 'posting artifacts to running job' do
+ subject do
+ authorize_artifacts_with_token_in_params
+ end
+
+ it_behaves_like 'API::CI::Runner application context metadata', '/api/:version/jobs/:id/artifacts/authorize' do
+ let(:send_request) { subject }
+ end
+
+ it 'updates runner info' do
+ expect { subject }.to change { runner.reload.contacted_at }
+ end
+
+ shared_examples 'authorizes local file' do
+ it 'succeeds' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(json_response['TempPath']).to eq(JobArtifactUploader.workhorse_local_upload_path)
+ expect(json_response['RemoteObject']).to be_nil
+ end
+ end
+
+ context 'when using local storage' do
+ it_behaves_like 'authorizes local file'
+ end
+
+ context 'when using remote storage' do
+ context 'when direct upload is enabled' do
+ before do
+ stub_artifacts_object_storage(enabled: true, direct_upload: true)
+ end
+
+ it 'succeeds' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(json_response).not_to have_key('TempPath')
+ expect(json_response['RemoteObject']).to have_key('ID')
+ expect(json_response['RemoteObject']).to have_key('GetURL')
+ expect(json_response['RemoteObject']).to have_key('StoreURL')
+ expect(json_response['RemoteObject']).to have_key('DeleteURL')
+ expect(json_response['RemoteObject']).to have_key('MultipartUpload')
+ end
+ end
+
+ context 'when direct upload is disabled' do
+ before do
+ stub_artifacts_object_storage(enabled: true, direct_upload: false)
+ end
+
+ it_behaves_like 'authorizes local file'
+ end
+ end
+ end
+ end
+
+ context 'when using token as header' do
+ it 'authorizes posting artifacts to running job' do
+ authorize_artifacts_with_token_in_headers
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(json_response['TempPath']).not_to be_nil
+ end
+
+ it 'fails to post too large artifact' do
+ stub_application_setting(max_artifacts_size: 0)
+
+ authorize_artifacts_with_token_in_headers(filesize: 100)
+
+ expect(response).to have_gitlab_http_status(:payload_too_large)
+ end
+ end
+
+ context 'when using runners token' do
+ it 'fails to authorize artifacts posting' do
+ authorize_artifacts(token: job.project.runners_token)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ it 'reject requests that did not go through gitlab-workhorse' do
+ headers.delete(Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER)
+
+ authorize_artifacts
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ context 'authorization token is invalid' do
+ it 'responds with forbidden' do
+ authorize_artifacts(token: 'invalid', filesize: 100 )
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'authorize uploading of an lsif artifact' do
+ before do
+ stub_feature_flags(code_navigation: job.project)
+ end
+
+ it 'adds ProcessLsif header' do
+ authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['ProcessLsif']).to be_truthy
+ end
+
+ it 'adds ProcessLsifReferences header' do
+ authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['ProcessLsifReferences']).to be_truthy
+ end
+
+ context 'code_navigation feature flag is disabled' do
+ it 'responds with a forbidden error' do
+ stub_feature_flags(code_navigation: false)
+ authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
+
+ aggregate_failures do
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(json_response['ProcessLsif']).to be_falsy
+ expect(json_response['ProcessLsifReferences']).to be_falsy
+ end
+ end
+ end
+
+ context 'code_navigation_references feature flag is disabled' do
+ it 'sets ProcessLsifReferences header to false' do
+ stub_feature_flags(code_navigation_references: false)
+ authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
+
+ aggregate_failures do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['ProcessLsif']).to be_truthy
+ expect(json_response['ProcessLsifReferences']).to be_falsy
+ end
+ end
+ end
+ end
+
+ def authorize_artifacts(params = {}, request_headers = headers)
+ post api("/jobs/#{job.id}/artifacts/authorize"), params: params, headers: request_headers
+ end
+
+ def authorize_artifacts_with_token_in_params(params = {}, request_headers = headers)
+ params = params.merge(token: job.token)
+ authorize_artifacts(params, request_headers)
+ end
+
+ def authorize_artifacts_with_token_in_headers(params = {}, request_headers = headers_with_token)
+ authorize_artifacts(params, request_headers)
+ end
+ end
+
+ describe 'POST /api/v4/jobs/:id/artifacts' do
+ it_behaves_like 'API::CI::Runner application context metadata', '/api/:version/jobs/:id/artifacts' do
+ let(:send_request) do
+ upload_artifacts(file_upload, headers_with_token)
+ end
+ end
+
+ it 'updates runner info' do
+ expect { upload_artifacts(file_upload, headers_with_token) }.to change { runner.reload.contacted_at }
+ end
+
+ context 'when the artifact is too large' do
+ it_behaves_like 'rejecting artifacts that are too large' do
+ # This filesize validation also happens in non remote stored files,
+ # it's just that it's hard to stub the filesize in other cases to be
+ # more than a megabyte.
+ let!(:fog_connection) do
+ stub_artifacts_object_storage(direct_upload: true)
+ end
+
+ let(:file_upload) { fog_to_uploaded_file(object) }
+ let(:success_code) { :created }
+
+ let(:object) do
+ fog_connection.directories.new(key: 'artifacts').files.create( # rubocop:disable Rails/SaveBang
+ key: 'tmp/uploads/12312300',
+ body: 'content'
+ )
+ end
+
+ let(:send_request) do
+ upload_artifacts(file_upload, headers_with_token, 'file.remote_id' => '12312300')
+ end
+
+ before do
+ allow(object).to receive(:content_length).and_return(filesize)
+ end
+ end
+ end
+
+ context 'when artifacts are being stored inside of tmp path' do
+ before do
+ # by configuring this path we allow to pass temp file from any path
+ allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return('/')
+ end
+
+ context 'when job has been erased' do
+ let(:job) { create(:ci_build, erased_at: Time.now) }
+
+ before do
+ upload_artifacts(file_upload, headers_with_token)
+ end
+
+ it 'responds with forbidden' do
+ upload_artifacts(file_upload, headers_with_token)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when job is running' do
+ shared_examples 'successful artifacts upload' do
+ it 'updates successfully' do
+ expect(response).to have_gitlab_http_status(:created)
+ end
+ end
+
+ context 'when uses accelerated file post' do
+ context 'for file stored locally' do
+ before do
+ upload_artifacts(file_upload, headers_with_token)
+ end
+
+ it_behaves_like 'successful artifacts upload'
+ end
+
+ context 'for file stored remotely' do
+ let!(:fog_connection) do
+ stub_artifacts_object_storage(direct_upload: true)
+ end
+
+ let(:object) do
+ fog_connection.directories.new(key: 'artifacts').files.create( # rubocop:disable Rails/SaveBang
+ key: 'tmp/uploads/12312300',
+ body: 'content'
+ )
+ end
+
+ let(:file_upload) { fog_to_uploaded_file(object) }
+
+ before do
+ upload_artifacts(file_upload, headers_with_token, 'file.remote_id' => remote_id)
+ end
+
+ context 'when valid remote_id is used' do
+ let(:remote_id) { '12312300' }
+
+ it_behaves_like 'successful artifacts upload'
+ end
+
+ context 'when invalid remote_id is used' do
+ let(:remote_id) { 'invalid id' }
+
+ it 'responds with bad request' do
+ expect(response).to have_gitlab_http_status(:internal_server_error)
+ expect(json_response['message']).to eq("Missing file")
+ end
+ end
+ end
+ end
+
+ context 'when using runners token' do
+ it 'responds with forbidden' do
+ upload_artifacts(file_upload, headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.project.runners_token))
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
+ context 'when artifacts post request does not contain file' do
+ it 'fails to post artifacts without file' do
+ post api("/jobs/#{job.id}/artifacts"), params: {}, headers: headers_with_token
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'GitLab Workhorse is not configured' do
+ it 'fails to post artifacts without GitLab-Workhorse' do
+ post api("/jobs/#{job.id}/artifacts"), params: { token: job.token }, headers: {}
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'Is missing GitLab Workhorse token headers' do
+ let(:jwt) { JWT.encode({ 'iss' => 'invalid-header' }, Gitlab::Workhorse.secret, 'HS256') }
+
+ it 'fails to post artifacts without GitLab-Workhorse' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).once
+
+ upload_artifacts(file_upload, headers_with_token)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when setting an expire date' do
+ let(:default_artifacts_expire_in) {}
+ let(:post_data) do
+ { file: file_upload,
+ expire_in: expire_in }
+ end
+
+ before do
+ stub_application_setting(default_artifacts_expire_in: default_artifacts_expire_in)
+
+ upload_artifacts(file_upload, headers_with_token, post_data)
+ end
+
+ context 'when an expire_in is given' do
+ let(:expire_in) { '7 days' }
+
+ it 'updates when specified' do
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.artifacts_expire_at).to be_within(5.minutes).of(7.days.from_now)
+ end
+ end
+
+ context 'when no expire_in is given' do
+ let(:expire_in) { nil }
+
+ it 'ignores if not specified' do
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.artifacts_expire_at).to be_nil
+ end
+
+ context 'with application default' do
+ context 'when default is 5 days' do
+ let(:default_artifacts_expire_in) { '5 days' }
+
+ it 'sets to application default' do
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.artifacts_expire_at).to be_within(5.minutes).of(5.days.from_now)
+ end
+ end
+
+ context 'when default is 0' do
+ let(:default_artifacts_expire_in) { '0' }
+
+ it 'does not set expire_in' do
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.artifacts_expire_at).to be_nil
+ end
+ end
+
+ context 'when value is never' do
+ let(:expire_in) { 'never' }
+ let(:default_artifacts_expire_in) { '5 days' }
+
+ it 'does not set expire_in' do
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.artifacts_expire_at).to be_nil
+ end
+ end
+ end
+ end
+ end
+
+ context 'posts artifacts file and metadata file' do
+ let!(:artifacts) { file_upload }
+ let!(:artifacts_sha256) { Digest::SHA256.file(artifacts.path).hexdigest }
+ let!(:metadata) { file_upload2 }
+ let!(:metadata_sha256) { Digest::SHA256.file(metadata.path).hexdigest }
+
+ let(:stored_artifacts_file) { job.reload.artifacts_file }
+ let(:stored_metadata_file) { job.reload.artifacts_metadata }
+ let(:stored_artifacts_size) { job.reload.artifacts_size }
+ let(:stored_artifacts_sha256) { job.reload.job_artifacts_archive.file_sha256 }
+ let(:stored_metadata_sha256) { job.reload.job_artifacts_metadata.file_sha256 }
+ let(:file_keys) { post_data.keys }
+ let(:send_rewritten_field) { true }
+
+ before do
+ workhorse_finalize_with_multiple_files(
+ api("/jobs/#{job.id}/artifacts"),
+ method: :post,
+ file_keys: file_keys,
+ params: post_data,
+ headers: headers_with_token,
+ send_rewritten_field: send_rewritten_field
+ )
+ end
+
+ context 'when posts data accelerated by workhorse is correct' do
+ let(:post_data) { { file: artifacts, metadata: metadata } }
+
+ it 'stores artifacts and artifacts metadata' do
+ expect(response).to have_gitlab_http_status(:created)
+ expect(stored_artifacts_file.filename).to eq(artifacts.original_filename)
+ expect(stored_metadata_file.filename).to eq(metadata.original_filename)
+ expect(stored_artifacts_size).to eq(artifacts.size)
+ expect(stored_artifacts_sha256).to eq(artifacts_sha256)
+ expect(stored_metadata_sha256).to eq(metadata_sha256)
+ end
+ end
+
+ context 'with a malicious file.path param' do
+ let(:post_data) { {} }
+ let(:tmp_file) { Tempfile.new('crafted.file.path') }
+ let(:url) { "/jobs/#{job.id}/artifacts?file.path=#{tmp_file.path}" }
+
+ it 'rejects the request' do
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(stored_artifacts_size).to be_nil
+ end
+ end
+
+ context 'when workhorse header is missing' do
+ let(:post_data) { { file: artifacts, metadata: metadata } }
+ let(:send_rewritten_field) { false }
+
+ it 'rejects the request' do
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(stored_artifacts_size).to be_nil
+ end
+ end
+
+ context 'when there is no artifacts file in post data' do
+ let(:post_data) do
+ { metadata: metadata }
+ end
+
+ it 'is expected to respond with bad request' do
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'does not store metadata' do
+ expect(stored_metadata_file).to be_nil
+ end
+ end
+ end
+
+ context 'when artifact_type is archive' do
+ context 'when artifact_format is zip' do
+ let(:params) { { artifact_type: :archive, artifact_format: :zip } }
+
+ it 'stores junit test report' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.job_artifacts_archive).not_to be_nil
+ end
+ end
+
+ context 'when artifact_format is gzip' do
+ let(:params) { { artifact_type: :archive, artifact_format: :gzip } }
+
+ it 'returns an error' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(job.reload.job_artifacts_archive).to be_nil
+ end
+ end
+ end
+
+ context 'when artifact_type is junit' do
+ context 'when artifact_format is gzip' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/junit/junit.xml.gz') }
+ let(:params) { { artifact_type: :junit, artifact_format: :gzip } }
+
+ it 'stores junit test report' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.job_artifacts_junit).not_to be_nil
+ end
+ end
+
+ context 'when artifact_format is raw' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/junit/junit.xml.gz') }
+ let(:params) { { artifact_type: :junit, artifact_format: :raw } }
+
+ it 'returns an error' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(job.reload.job_artifacts_junit).to be_nil
+ end
+ end
+ end
+
+ context 'when artifact_type is metrics_referee' do
+ context 'when artifact_format is gzip' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/referees/metrics_referee.json.gz') }
+ let(:params) { { artifact_type: :metrics_referee, artifact_format: :gzip } }
+
+ it 'stores metrics_referee data' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.job_artifacts_metrics_referee).not_to be_nil
+ end
+ end
+
+ context 'when artifact_format is raw' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/referees/metrics_referee.json.gz') }
+ let(:params) { { artifact_type: :metrics_referee, artifact_format: :raw } }
+
+ it 'returns an error' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(job.reload.job_artifacts_metrics_referee).to be_nil
+ end
+ end
+ end
+
+ context 'when artifact_type is network_referee' do
+ context 'when artifact_format is gzip' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/referees/network_referee.json.gz') }
+ let(:params) { { artifact_type: :network_referee, artifact_format: :gzip } }
+
+ it 'stores network_referee data' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.job_artifacts_network_referee).not_to be_nil
+ end
+ end
+
+ context 'when artifact_format is raw' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/referees/network_referee.json.gz') }
+ let(:params) { { artifact_type: :network_referee, artifact_format: :raw } }
+
+ it 'returns an error' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(job.reload.job_artifacts_network_referee).to be_nil
+ end
+ end
+ end
+
+ context 'when artifact_type is dotenv' do
+ context 'when artifact_format is gzip' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/build.env.gz') }
+ let(:params) { { artifact_type: :dotenv, artifact_format: :gzip } }
+
+ it 'stores dotenv file' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.job_artifacts_dotenv).not_to be_nil
+ end
+
+ it 'parses dotenv file' do
+ expect do
+ upload_artifacts(file_upload, headers_with_token, params)
+ end.to change { job.job_variables.count }.from(0).to(2)
+ end
+
+ context 'when parse error happens' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/ci_build_artifacts_metadata.gz') }
+
+ it 'returns an error' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to eq('Invalid Format')
+ end
+ end
+ end
+
+ context 'when artifact_format is raw' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/build.env.gz') }
+ let(:params) { { artifact_type: :dotenv, artifact_format: :raw } }
+
+ it 'returns an error' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(job.reload.job_artifacts_dotenv).to be_nil
+ end
+ end
+ end
+ end
+
+ context 'when artifacts already exist for the job' do
+ let(:params) do
+ {
+ artifact_type: :archive,
+ artifact_format: :zip,
+ 'file.sha256' => uploaded_sha256
+ }
+ end
+
+ let(:existing_sha256) { '0' * 64 }
+
+ let!(:existing_artifact) do
+ create(:ci_job_artifact, :archive, file_sha256: existing_sha256, job: job)
+ end
+
+ context 'when sha256 is the same of the existing artifact' do
+ let(:uploaded_sha256) { existing_sha256 }
+
+ it 'ignores the new artifact' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.job_artifacts_archive).to eq(existing_artifact)
+ end
+ end
+
+ context 'when sha256 is different than the existing artifact' do
+ let(:uploaded_sha256) { '1' * 64 }
+
+ it 'logs and returns an error' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(job.reload.job_artifacts_archive).to eq(existing_artifact)
+ end
+ end
+ end
+
+ context 'when object storage throws errors' do
+ let(:params) { { artifact_type: :archive, artifact_format: :zip } }
+
+ it 'does not store artifacts' do
+ allow_next_instance_of(JobArtifactUploader) do |uploader|
+ allow(uploader).to receive(:store!).and_raise(Errno::EIO)
+ end
+
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:service_unavailable)
+ expect(job.reload.job_artifacts_archive).to be_nil
+ end
+ end
+
+ context 'when artifacts are being stored outside of tmp path' do
+ let(:new_tmpdir) { Dir.mktmpdir }
+
+ before do
+ # init before overwriting tmp dir
+ file_upload
+
+ # by configuring this path we allow to pass file from @tmpdir only
+ # but all temporary files are stored in system tmp directory
+ allow(Dir).to receive(:tmpdir).and_return(new_tmpdir)
+ end
+
+ after do
+ FileUtils.remove_entry(new_tmpdir)
+ end
+
+ it 'fails to post artifacts for outside of tmp path' do
+ upload_artifacts(file_upload, headers_with_token)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ def upload_artifacts(file, headers = {}, params = {})
+ workhorse_finalize(
+ api("/jobs/#{job.id}/artifacts"),
+ method: :post,
+ file_key: :file,
+ params: params.merge(file: file),
+ headers: headers,
+ send_rewritten_field: true
+ )
+ end
+ end
+
+ describe 'GET /api/v4/jobs/:id/artifacts' do
+ let(:token) { job.token }
+
+ it_behaves_like 'API::CI::Runner application context metadata', '/api/:version/jobs/:id/artifacts' do
+ let(:send_request) { download_artifact }
+ end
+
+ it 'updates runner info' do
+ expect { download_artifact }.to change { runner.reload.contacted_at }
+ end
+
+ context 'when job has artifacts' do
+ let(:job) { create(:ci_build) }
+ let(:store) { JobArtifactUploader::Store::LOCAL }
+
+ before do
+ create(:ci_job_artifact, :archive, file_store: store, job: job)
+ end
+
+ context 'when using job token' do
+ context 'when artifacts are stored locally' do
+ let(:download_headers) do
+ { 'Content-Transfer-Encoding' => 'binary',
+ 'Content-Disposition' => %q(attachment; filename="ci_build_artifacts.zip"; filename*=UTF-8''ci_build_artifacts.zip) }
+ end
+
+ before do
+ download_artifact
+ end
+
+ it 'download artifacts' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers.to_h).to include download_headers
+ end
+ end
+
+ context 'when artifacts are stored remotely' do
+ let(:store) { JobArtifactUploader::Store::REMOTE }
+ let!(:job) { create(:ci_build) }
+
+ context 'when proxy download is being used' do
+ before do
+ download_artifact(direct_download: false)
+ end
+
+ it 'uses workhorse send-url' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers.to_h).to include(
+ 'Gitlab-Workhorse-Send-Data' => /send-url:/)
+ end
+ end
+
+ context 'when direct download is being used' do
+ before do
+ download_artifact(direct_download: true)
+ end
+
+ it 'receive redirect for downloading artifacts' do
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response.headers).to include('Location')
+ end
+ end
+ end
+ end
+
+ context 'when using runnners token' do
+ let(:token) { job.project.runners_token }
+
+ before do
+ download_artifact
+ end
+
+ it 'responds with forbidden' do
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
+ context 'when job does not have artifacts' do
+ it 'responds with not found' do
+ download_artifact
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ def download_artifact(params = {}, request_headers = headers)
+ params = params.merge(token: token)
+ job.reload
+
+ get api("/jobs/#{job.id}/artifacts"), params: params, headers: request_headers
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/ci/runner/jobs_put_spec.rb b/spec/requests/api/ci/runner/jobs_put_spec.rb
new file mode 100644
index 00000000000..025747f2f0c
--- /dev/null
+++ b/spec/requests/api/ci/runner/jobs_put_spec.rb
@@ -0,0 +1,196 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
+ include StubGitlabCalls
+ include RedisHelpers
+ include WorkhorseHelpers
+
+ let(:registration_token) { 'abcdefg123456' }
+
+ before do
+ stub_feature_flags(ci_enable_live_trace: true)
+ stub_gitlab_calls
+ stub_application_setting(runners_registration_token: registration_token)
+ allow_any_instance_of(::Ci::Runner).to receive(:cache_attributes)
+ end
+
+ describe '/api/v4/jobs' do
+ let(:root_namespace) { create(:namespace) }
+ let(:namespace) { create(:namespace, parent: root_namespace) }
+ let(:project) { create(:project, namespace: namespace, shared_runners_enabled: false) }
+ let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master') }
+ let(:runner) { create(:ci_runner, :project, projects: [project]) }
+ let(:user) { create(:user) }
+ let(:job) do
+ create(:ci_build, :artifacts, :extended_options,
+ pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0)
+ end
+
+ describe 'PUT /api/v4/jobs/:id' do
+ let(:job) do
+ create(:ci_build, :pending, :trace_live, pipeline: pipeline, project: project, user: user, runner_id: runner.id)
+ end
+
+ before do
+ job.run!
+ end
+
+ it_behaves_like 'API::CI::Runner application context metadata', '/api/:version/jobs/:id' do
+ let(:send_request) { update_job(state: 'success') }
+ end
+
+ it 'updates runner info' do
+ expect { update_job(state: 'success') }.to change { runner.reload.contacted_at }
+ end
+
+ context 'when status is given' do
+ it 'mark job as succeeded' do
+ update_job(state: 'success')
+
+ job.reload
+ expect(job).to be_success
+ end
+
+ it 'mark job as failed' do
+ update_job(state: 'failed')
+
+ job.reload
+ expect(job).to be_failed
+ expect(job).to be_unknown_failure
+ end
+
+ context 'when failure_reason is script_failure' do
+ before do
+ update_job(state: 'failed', failure_reason: 'script_failure')
+ job.reload
+ end
+
+ it { expect(job).to be_script_failure }
+ end
+
+ context 'when failure_reason is runner_system_failure' do
+ before do
+ update_job(state: 'failed', failure_reason: 'runner_system_failure')
+ job.reload
+ end
+
+ it { expect(job).to be_runner_system_failure }
+ end
+
+ context 'when failure_reason is unrecognized value' do
+ before do
+ update_job(state: 'failed', failure_reason: 'what_is_this')
+ job.reload
+ end
+
+ it { expect(job).to be_unknown_failure }
+ end
+
+ context 'when failure_reason is job_execution_timeout' do
+ before do
+ update_job(state: 'failed', failure_reason: 'job_execution_timeout')
+ job.reload
+ end
+
+ it { expect(job).to be_job_execution_timeout }
+ end
+
+ context 'when failure_reason is unmet_prerequisites' do
+ before do
+ update_job(state: 'failed', failure_reason: 'unmet_prerequisites')
+ job.reload
+ end
+
+ it { expect(job).to be_unmet_prerequisites }
+ end
+ end
+
+ context 'when trace is given' do
+ it 'creates a trace artifact' do
+ allow(BuildFinishedWorker).to receive(:perform_async).with(job.id) do
+ ArchiveTraceWorker.new.perform(job.id)
+ end
+
+ update_job(state: 'success', trace: 'BUILD TRACE UPDATED')
+
+ job.reload
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(job.trace.raw).to eq 'BUILD TRACE UPDATED'
+ expect(job.job_artifacts_trace.open.read).to eq 'BUILD TRACE UPDATED'
+ end
+
+ context 'when concurrent update of trace is happening' do
+ before do
+ job.trace.write('wb') do
+ update_job(state: 'success', trace: 'BUILD TRACE UPDATED')
+ end
+ end
+
+ it 'returns that operation conflicts' do
+ expect(response).to have_gitlab_http_status(:conflict)
+ end
+ end
+ end
+
+ context 'when no trace is given' do
+ it 'does not override trace information' do
+ update_job
+
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE'
+ end
+
+ context 'when running state is sent' do
+ it 'updates update_at value' do
+ expect { update_job_after_time }.to change { job.reload.updated_at }
+ end
+ end
+
+ context 'when other state is sent' do
+ it "doesn't update update_at value" do
+ expect { update_job_after_time(20.minutes, state: 'success') }.not_to change { job.reload.updated_at }
+ end
+ end
+ end
+
+ context 'when job has been erased' do
+ let(:job) { create(:ci_build, runner_id: runner.id, erased_at: Time.now) }
+
+ it 'responds with forbidden' do
+ update_job
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when job has already been finished' do
+ before do
+ job.trace.set('Job failed')
+ job.drop!(:script_failure)
+ end
+
+ it 'does not update job status and job trace' do
+ update_job(state: 'success', trace: 'BUILD TRACE UPDATED')
+
+ job.reload
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(response.header['Job-Status']).to eq 'failed'
+ expect(job.trace.raw).to eq 'Job failed'
+ expect(job).to be_failed
+ end
+ end
+
+ def update_job(token = job.token, **params)
+ new_params = params.merge(token: token)
+ put api("/jobs/#{job.id}"), params: new_params
+ end
+
+ def update_job_after_time(update_interval = 20.minutes, state = 'running')
+ Timecop.travel(job.updated_at + update_interval) do
+ update_job(job.token, state: state)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/ci/runner/jobs_request_post_spec.rb b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
new file mode 100644
index 00000000000..4fa95f8ebb2
--- /dev/null
+++ b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
@@ -0,0 +1,861 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
+ include StubGitlabCalls
+ include RedisHelpers
+ include WorkhorseHelpers
+
+ let(:registration_token) { 'abcdefg123456' }
+
+ before do
+ stub_feature_flags(ci_enable_live_trace: true)
+ stub_gitlab_calls
+ stub_application_setting(runners_registration_token: registration_token)
+ allow_any_instance_of(::Ci::Runner).to receive(:cache_attributes)
+ end
+
+ describe '/api/v4/jobs' do
+ let(:root_namespace) { create(:namespace) }
+ let(:namespace) { create(:namespace, parent: root_namespace) }
+ let(:project) { create(:project, namespace: namespace, shared_runners_enabled: false) }
+ let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master') }
+ let(:runner) { create(:ci_runner, :project, projects: [project]) }
+ let(:user) { create(:user) }
+ let(:job) do
+ create(:ci_build, :artifacts, :extended_options,
+ pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0)
+ end
+
+ describe 'POST /api/v4/jobs/request' do
+ let!(:last_update) {}
+ let!(:new_update) { }
+ let(:user_agent) { 'gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)' }
+
+ before do
+ job
+ stub_container_registry_config(enabled: false)
+ end
+
+ shared_examples 'no jobs available' do
+ before do
+ request_job
+ end
+
+ context 'when runner sends version in User-Agent' do
+ context 'for stable version' do
+ it 'gives 204 and set X-GitLab-Last-Update' do
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(response.header).to have_key('X-GitLab-Last-Update')
+ end
+ end
+
+ context 'when last_update is up-to-date' do
+ let(:last_update) { runner.ensure_runner_queue_value }
+
+ it 'gives 204 and set the same X-GitLab-Last-Update' do
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(response.header['X-GitLab-Last-Update']).to eq(last_update)
+ end
+ end
+
+ context 'when last_update is outdated' do
+ let(:last_update) { runner.ensure_runner_queue_value }
+ let(:new_update) { runner.tick_runner_queue }
+
+ it 'gives 204 and set a new X-GitLab-Last-Update' do
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(response.header['X-GitLab-Last-Update']).to eq(new_update)
+ end
+ end
+
+ context 'when beta version is sent' do
+ let(:user_agent) { 'gitlab-runner 9.0.0~beta.167.g2b2bacc (master; go1.7.4; linux/amd64)' }
+
+ it { expect(response).to have_gitlab_http_status(:no_content) }
+ end
+
+ context 'when pre-9-0 version is sent' do
+ let(:user_agent) { 'gitlab-ci-multi-runner 1.6.0 (1-6-stable; go1.6.3; linux/amd64)' }
+
+ it { expect(response).to have_gitlab_http_status(:no_content) }
+ end
+
+ context 'when pre-9-0 beta version is sent' do
+ let(:user_agent) { 'gitlab-ci-multi-runner 1.6.0~beta.167.g2b2bacc (master; go1.6.3; linux/amd64)' }
+
+ it { expect(response).to have_gitlab_http_status(:no_content) }
+ end
+ end
+ end
+
+ context 'when no token is provided' do
+ it 'returns 400 error' do
+ post api('/jobs/request')
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'when invalid token is provided' do
+ it 'returns 403 error' do
+ post api('/jobs/request'), params: { token: 'invalid' }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when valid token is provided' do
+ context 'when Runner is not active' do
+ let(:runner) { create(:ci_runner, :inactive) }
+ let(:update_value) { runner.ensure_runner_queue_value }
+
+ it 'returns 204 error' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(response.header['X-GitLab-Last-Update']).to eq(update_value)
+ end
+ end
+
+ context 'when jobs are finished' do
+ before do
+ job.success
+ end
+
+ it_behaves_like 'no jobs available'
+ end
+
+ context 'when other projects have pending jobs' do
+ before do
+ job.success
+ create(:ci_build, :pending)
+ end
+
+ it_behaves_like 'no jobs available'
+ end
+
+ context 'when shared runner requests job for project without shared_runners_enabled' do
+ let(:runner) { create(:ci_runner, :instance) }
+
+ it_behaves_like 'no jobs available'
+ end
+
+ context 'when there is a pending job' do
+ let(:expected_job_info) do
+ { 'name' => job.name,
+ 'stage' => job.stage,
+ 'project_id' => job.project.id,
+ 'project_name' => job.project.name }
+ end
+
+ let(:expected_git_info) do
+ { 'repo_url' => job.repo_url,
+ 'ref' => job.ref,
+ 'sha' => job.sha,
+ 'before_sha' => job.before_sha,
+ 'ref_type' => 'branch',
+ 'refspecs' => ["+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
+ "+refs/heads/#{job.ref}:refs/remotes/origin/#{job.ref}"],
+ 'depth' => project.ci_default_git_depth }
+ end
+
+ let(:expected_steps) do
+ [{ 'name' => 'script',
+ 'script' => %w(echo),
+ 'timeout' => job.metadata_timeout,
+ 'when' => 'on_success',
+ 'allow_failure' => false },
+ { 'name' => 'after_script',
+ 'script' => %w(ls date),
+ 'timeout' => job.metadata_timeout,
+ 'when' => 'always',
+ 'allow_failure' => true }]
+ end
+
+ let(:expected_variables) do
+ [{ 'key' => 'CI_JOB_NAME', 'value' => 'spinach', 'public' => true, 'masked' => false },
+ { 'key' => 'CI_JOB_STAGE', 'value' => 'test', 'public' => true, 'masked' => false },
+ { 'key' => 'DB_NAME', 'value' => 'postgres', 'public' => true, 'masked' => false }]
+ end
+
+ let(:expected_artifacts) do
+ [{ 'name' => 'artifacts_file',
+ 'untracked' => false,
+ 'paths' => %w(out/),
+ 'when' => 'always',
+ 'expire_in' => '7d',
+ "artifact_type" => "archive",
+ "artifact_format" => "zip" }]
+ end
+
+ let(:expected_cache) do
+ [{ 'key' => 'cache_key',
+ 'untracked' => false,
+ 'paths' => ['vendor/*'],
+ 'policy' => 'pull-push' }]
+ end
+
+ let(:expected_features) { { 'trace_sections' => true } }
+
+ it 'picks a job' do
+ request_job info: { platform: :darwin }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response.headers['Content-Type']).to eq('application/json')
+ expect(response.headers).not_to have_key('X-GitLab-Last-Update')
+ expect(runner.reload.platform).to eq('darwin')
+ expect(json_response['id']).to eq(job.id)
+ expect(json_response['token']).to eq(job.token)
+ expect(json_response['job_info']).to eq(expected_job_info)
+ expect(json_response['git_info']).to eq(expected_git_info)
+ expect(json_response['image']).to eq({ 'name' => 'ruby:2.7', 'entrypoint' => '/bin/sh', 'ports' => [] })
+ expect(json_response['services']).to eq([{ 'name' => 'postgres', 'entrypoint' => nil,
+ 'alias' => nil, 'command' => nil, 'ports' => [] },
+ { 'name' => 'docker:stable-dind', 'entrypoint' => '/bin/sh',
+ 'alias' => 'docker', 'command' => 'sleep 30', 'ports' => [] }])
+ expect(json_response['steps']).to eq(expected_steps)
+ expect(json_response['artifacts']).to eq(expected_artifacts)
+ expect(json_response['cache']).to eq(expected_cache)
+ expect(json_response['variables']).to include(*expected_variables)
+ expect(json_response['features']).to eq(expected_features)
+ end
+
+ it 'creates persistent ref' do
+ expect_any_instance_of(::Ci::PersistentRef).to receive(:create_ref)
+ .with(job.sha, "refs/#{Repository::REF_PIPELINES}/#{job.commit_id}")
+
+ request_job info: { platform: :darwin }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['id']).to eq(job.id)
+ end
+
+ context 'when job is made for tag' do
+ let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
+
+ it 'sets branch as ref_type' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['git_info']['ref_type']).to eq('tag')
+ end
+
+ context 'when GIT_DEPTH is specified' do
+ before do
+ create(:ci_pipeline_variable, key: 'GIT_DEPTH', value: 1, pipeline: pipeline)
+ end
+
+ it 'specifies refspecs' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['git_info']['refspecs']).to include("+refs/tags/#{job.ref}:refs/tags/#{job.ref}")
+ end
+ end
+
+ context 'when a Gitaly exception is thrown during response' do
+ before do
+ allow_next_instance_of(Ci::BuildRunnerPresenter) do |instance|
+ allow(instance).to receive(:artifacts).and_raise(GRPC::DeadlineExceeded)
+ end
+ end
+
+ it 'fails the job as a scheduler failure' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(job.reload.failed?).to be_truthy
+ expect(job.failure_reason).to eq('scheduler_failure')
+ expect(job.runner_id).to eq(runner.id)
+ expect(job.runner_session).to be_nil
+ end
+ end
+
+ context 'when GIT_DEPTH is not specified and there is no default git depth for the project' do
+ before do
+ project.update!(ci_default_git_depth: nil)
+ end
+
+ it 'specifies refspecs' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['git_info']['refspecs'])
+ .to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
+ '+refs/tags/*:refs/tags/*',
+ '+refs/heads/*:refs/remotes/origin/*')
+ end
+ end
+ end
+
+ context 'when job filtered by job_age' do
+ let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0, queued_at: 60.seconds.ago) }
+
+ context 'job is queued less than job_age parameter' do
+ let(:job_age) { 120 }
+
+ it 'gives 204' do
+ request_job(job_age: job_age)
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+
+ context 'job is queued more than job_age parameter' do
+ let(:job_age) { 30 }
+
+ it 'picks a job' do
+ request_job(job_age: job_age)
+
+ expect(response).to have_gitlab_http_status(:created)
+ end
+ end
+ end
+
+ context 'when job is made for branch' do
+ it 'sets tag as ref_type' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['git_info']['ref_type']).to eq('branch')
+ end
+
+ context 'when GIT_DEPTH is specified' do
+ before do
+ create(:ci_pipeline_variable, key: 'GIT_DEPTH', value: 1, pipeline: pipeline)
+ end
+
+ it 'specifies refspecs' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['git_info']['refspecs']).to include("+refs/heads/#{job.ref}:refs/remotes/origin/#{job.ref}")
+ end
+ end
+
+ context 'when GIT_DEPTH is not specified and there is no default git depth for the project' do
+ before do
+ project.update!(ci_default_git_depth: nil)
+ end
+
+ it 'specifies refspecs' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['git_info']['refspecs'])
+ .to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
+ '+refs/tags/*:refs/tags/*',
+ '+refs/heads/*:refs/remotes/origin/*')
+ end
+ end
+ end
+
+ context 'when job is for a release' do
+ let!(:job) { create(:ci_build, :release_options, pipeline: pipeline) }
+
+ context 'when `multi_build_steps` is passed by the runner' do
+ it 'exposes release info' do
+ request_job info: { features: { multi_build_steps: true } }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response.headers).not_to have_key('X-GitLab-Last-Update')
+ expect(json_response['steps']).to eq([
+ {
+ "name" => "script",
+ "script" => ["make changelog | tee release_changelog.txt"],
+ "timeout" => 3600,
+ "when" => "on_success",
+ "allow_failure" => false
+ },
+ {
+ "name" => "release",
+ "script" =>
+ ["release-cli create --name \"Release $CI_COMMIT_SHA\" --description \"Created using the release-cli $EXTRA_DESCRIPTION\" --tag-name \"release-$CI_COMMIT_SHA\" --ref \"$CI_COMMIT_SHA\""],
+ "timeout" => 3600,
+ "when" => "on_success",
+ "allow_failure" => false
+ }
+ ])
+ end
+ end
+
+ context 'when `multi_build_steps` is not passed by the runner' do
+ it 'drops the job' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+ end
+
+ context 'when job is made for merge request' do
+ let(:pipeline) { create(:ci_pipeline, source: :merge_request_event, project: project, ref: 'feature', merge_request: merge_request) }
+ let!(:job) { create(:ci_build, pipeline: pipeline, name: 'spinach', ref: 'feature', stage: 'test', stage_idx: 0) }
+ let(:merge_request) { create(:merge_request) }
+
+ it 'sets branch as ref_type' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['git_info']['ref_type']).to eq('branch')
+ end
+
+ context 'when GIT_DEPTH is specified' do
+ before do
+ create(:ci_pipeline_variable, key: 'GIT_DEPTH', value: 1, pipeline: pipeline)
+ end
+
+ it 'returns the overwritten git depth for merge request refspecs' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['git_info']['depth']).to eq(1)
+ end
+ end
+ end
+
+ it 'updates runner info' do
+ expect { request_job }.to change { runner.reload.contacted_at }
+ end
+
+ %w(version revision platform architecture).each do |param|
+ context "when info parameter '#{param}' is present" do
+ let(:value) { "#{param}_value" }
+
+ it "updates provided Runner's parameter" do
+ request_job info: { param => value }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(runner.reload.read_attribute(param.to_sym)).to eq(value)
+ end
+ end
+ end
+
+ it "sets the runner's ip_address" do
+ post api('/jobs/request'),
+ params: { token: runner.token },
+ headers: { 'User-Agent' => user_agent, 'X-Forwarded-For' => '123.222.123.222' }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(runner.reload.ip_address).to eq('123.222.123.222')
+ end
+
+ it "handles multiple X-Forwarded-For addresses" do
+ post api('/jobs/request'),
+ params: { token: runner.token },
+ headers: { 'User-Agent' => user_agent, 'X-Forwarded-For' => '123.222.123.222, 127.0.0.1' }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(runner.reload.ip_address).to eq('123.222.123.222')
+ end
+
+ context 'when concurrently updating a job' do
+ before do
+ expect_any_instance_of(::Ci::Build).to receive(:run!)
+ .and_raise(ActiveRecord::StaleObjectError.new(nil, nil))
+ end
+
+ it 'returns a conflict' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:conflict)
+ expect(response.headers).not_to have_key('X-GitLab-Last-Update')
+ end
+ end
+
+ context 'when project and pipeline have multiple jobs' do
+ let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
+ let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
+ let!(:test_job) { create(:ci_build, pipeline: pipeline, name: 'deploy', stage: 'deploy', stage_idx: 1) }
+
+ before do
+ job.success
+ job2.success
+ end
+
+ it 'returns dependent jobs' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['id']).to eq(test_job.id)
+ expect(json_response['dependencies'].count).to eq(2)
+ expect(json_response['dependencies']).to include(
+ { 'id' => job.id, 'name' => job.name, 'token' => job.token },
+ { 'id' => job2.id, 'name' => job2.name, 'token' => job2.token })
+ end
+ end
+
+ context 'when pipeline have jobs with artifacts' do
+ let!(:job) { create(:ci_build, :tag, :artifacts, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
+ let!(:test_job) { create(:ci_build, pipeline: pipeline, name: 'deploy', stage: 'deploy', stage_idx: 1) }
+
+ before do
+ job.success
+ end
+
+ it 'returns dependent jobs' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['id']).to eq(test_job.id)
+ expect(json_response['dependencies'].count).to eq(1)
+ expect(json_response['dependencies']).to include(
+ { 'id' => job.id, 'name' => job.name, 'token' => job.token,
+ 'artifacts_file' => { 'filename' => 'ci_build_artifacts.zip', 'size' => 107464 } })
+ end
+ end
+
+ context 'when explicit dependencies are defined' do
+ let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
+ let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
+ let!(:test_job) do
+ create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'deploy',
+ stage: 'deploy', stage_idx: 1,
+ options: { script: ['bash'], dependencies: [job2.name] })
+ end
+
+ before do
+ job.success
+ job2.success
+ end
+
+ it 'returns dependent jobs' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['id']).to eq(test_job.id)
+ expect(json_response['dependencies'].count).to eq(1)
+ expect(json_response['dependencies'][0]).to include('id' => job2.id, 'name' => job2.name, 'token' => job2.token)
+ end
+ end
+
+ context 'when dependencies is an empty array' do
+ let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
+ let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
+ let!(:empty_dependencies_job) do
+ create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'empty_dependencies_job',
+ stage: 'deploy', stage_idx: 1,
+ options: { script: ['bash'], dependencies: [] })
+ end
+
+ before do
+ job.success
+ job2.success
+ end
+
+ it 'returns an empty array' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['id']).to eq(empty_dependencies_job.id)
+ expect(json_response['dependencies'].count).to eq(0)
+ end
+ end
+
+ context 'when job has no tags' do
+ before do
+ job.update!(tags: [])
+ end
+
+ context 'when runner is allowed to pick untagged jobs' do
+ before do
+ runner.update_column(:run_untagged, true)
+ end
+
+ it 'picks job' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ end
+ end
+
+ context 'when runner is not allowed to pick untagged jobs' do
+ before do
+ runner.update_column(:run_untagged, false)
+ end
+
+ it_behaves_like 'no jobs available'
+ end
+ end
+
+ context 'when triggered job is available' do
+ let(:expected_variables) do
+ [{ 'key' => 'CI_JOB_NAME', 'value' => 'spinach', 'public' => true, 'masked' => false },
+ { 'key' => 'CI_JOB_STAGE', 'value' => 'test', 'public' => true, 'masked' => false },
+ { 'key' => 'CI_PIPELINE_TRIGGERED', 'value' => 'true', 'public' => true, 'masked' => false },
+ { 'key' => 'DB_NAME', 'value' => 'postgres', 'public' => true, 'masked' => false },
+ { 'key' => 'SECRET_KEY', 'value' => 'secret_value', 'public' => false, 'masked' => false },
+ { 'key' => 'TRIGGER_KEY_1', 'value' => 'TRIGGER_VALUE_1', 'public' => false, 'masked' => false }]
+ end
+
+ let(:trigger) { create(:ci_trigger, project: project) }
+ let!(:trigger_request) { create(:ci_trigger_request, pipeline: pipeline, builds: [job], trigger: trigger) }
+
+ before do
+ project.variables << ::Ci::Variable.new(key: 'SECRET_KEY', value: 'secret_value')
+ end
+
+ shared_examples 'expected variables behavior' do
+ it 'returns variables for triggers' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['variables']).to include(*expected_variables)
+ end
+ end
+
+ context 'when variables are stored in trigger_request' do
+ before do
+ trigger_request.update_attribute(:variables, { TRIGGER_KEY_1: 'TRIGGER_VALUE_1' } )
+ end
+
+ it_behaves_like 'expected variables behavior'
+ end
+
+ context 'when variables are stored in pipeline_variables' do
+ before do
+ create(:ci_pipeline_variable, pipeline: pipeline, key: :TRIGGER_KEY_1, value: 'TRIGGER_VALUE_1')
+ end
+
+ it_behaves_like 'expected variables behavior'
+ end
+ end
+
+ describe 'registry credentials support' do
+ let(:registry_url) { 'registry.example.com:5005' }
+ let(:registry_credentials) do
+ { 'type' => 'registry',
+ 'url' => registry_url,
+ 'username' => 'gitlab-ci-token',
+ 'password' => job.token }
+ end
+
+ context 'when registry is enabled' do
+ before do
+ stub_container_registry_config(enabled: true, host_port: registry_url)
+ end
+
+ it 'sends registry credentials key' do
+ request_job
+
+ expect(json_response).to have_key('credentials')
+ expect(json_response['credentials']).to include(registry_credentials)
+ end
+ end
+
+ context 'when registry is disabled' do
+ before do
+ stub_container_registry_config(enabled: false, host_port: registry_url)
+ end
+
+ it 'does not send registry credentials' do
+ request_job
+
+ expect(json_response).to have_key('credentials')
+ expect(json_response['credentials']).not_to include(registry_credentials)
+ end
+ end
+ end
+
+ describe 'timeout support' do
+ context 'when project specifies job timeout' do
+ let(:project) { create(:project, shared_runners_enabled: false, build_timeout: 1234) }
+
+ it 'contains info about timeout taken from project' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['runner_info']).to include({ 'timeout' => 1234 })
+ end
+
+ context 'when runner specifies lower timeout' do
+ let(:runner) { create(:ci_runner, :project, maximum_timeout: 1000, projects: [project]) }
+
+ it 'contains info about timeout overridden by runner' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['runner_info']).to include({ 'timeout' => 1000 })
+ end
+ end
+
+ context 'when runner specifies bigger timeout' do
+ let(:runner) { create(:ci_runner, :project, maximum_timeout: 2000, projects: [project]) }
+
+ it 'contains info about timeout not overridden by runner' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['runner_info']).to include({ 'timeout' => 1234 })
+ end
+ end
+ end
+ end
+ end
+
+ describe 'port support' do
+ let(:job) { create(:ci_build, pipeline: pipeline, options: options) }
+
+ context 'when job image has ports' do
+ let(:options) do
+ {
+ image: {
+ name: 'ruby',
+ ports: [80]
+ },
+ services: ['mysql']
+ }
+ end
+
+ it 'returns the image ports' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to include(
+ 'id' => job.id,
+ 'image' => a_hash_including('name' => 'ruby', 'ports' => [{ 'number' => 80, 'protocol' => 'http', 'name' => 'default_port' }]),
+ 'services' => all(a_hash_including('name' => 'mysql')))
+ end
+ end
+
+ context 'when job services settings has ports' do
+ let(:options) do
+ {
+ image: 'ruby',
+ services: [
+ {
+ name: 'tomcat',
+ ports: [{ number: 8081, protocol: 'http', name: 'custom_port' }]
+ }
+ ]
+ }
+ end
+
+ it 'returns the service ports' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to include(
+ 'id' => job.id,
+ 'image' => a_hash_including('name' => 'ruby'),
+ 'services' => all(a_hash_including('name' => 'tomcat', 'ports' => [{ 'number' => 8081, 'protocol' => 'http', 'name' => 'custom_port' }])))
+ end
+ end
+ end
+
+ describe 'a job with excluded artifacts' do
+ context 'when excluded paths are defined' do
+ let(:job) do
+ create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'test',
+ stage: 'deploy', stage_idx: 1,
+ options: { artifacts: { paths: ['abc'], exclude: ['cde'] } })
+ end
+
+ context 'when a runner supports this feature' do
+ it 'exposes excluded paths when the feature is enabled' do
+ stub_feature_flags(ci_artifacts_exclude: true)
+
+ request_job info: { features: { artifacts_exclude: true } }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response.dig('artifacts').first).to include('exclude' => ['cde'])
+ end
+
+ it 'does not expose excluded paths when the feature is disabled' do
+ stub_feature_flags(ci_artifacts_exclude: false)
+
+ request_job info: { features: { artifacts_exclude: true } }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response.dig('artifacts').first).not_to have_key('exclude')
+ end
+ end
+
+ context 'when a runner does not support this feature' do
+ it 'does not expose the build at all' do
+ stub_feature_flags(ci_artifacts_exclude: true)
+
+ request_job
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+ end
+
+ it 'does not expose excluded paths when these are empty' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response.dig('artifacts').first).not_to have_key('exclude')
+ end
+ end
+
+ def request_job(token = runner.token, **params)
+ new_params = params.merge(token: token, last_update: last_update)
+ post api('/jobs/request'), params: new_params.to_json, headers: { 'User-Agent' => user_agent, 'Content-Type': 'application/json' }
+ end
+ end
+
+ context 'for web-ide job' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:runner) { create(:ci_runner, :project, projects: [project]) }
+ let(:service) { ::Ci::CreateWebIdeTerminalService.new(project, user, ref: 'master').execute }
+ let(:pipeline) { service[:pipeline] }
+ let(:build) { pipeline.builds.first }
+ let(:job) { {} }
+ let(:config_content) do
+ 'terminal: { image: ruby, services: [mysql], before_script: [ls], tags: [tag-1], variables: { KEY: value } }'
+ end
+
+ before do
+ stub_webide_config_file(config_content)
+ project.add_maintainer(user)
+
+ pipeline
+ end
+
+ context 'when runner has matching tag' do
+ before do
+ runner.update!(tag_list: ['tag-1'])
+ end
+
+ it 'successfully picks job' do
+ request_job
+
+ build.reload
+
+ expect(build).to be_running
+ expect(build.runner).to eq(runner)
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to include(
+ "id" => build.id,
+ "variables" => include("key" => 'KEY', "value" => 'value', "public" => true, "masked" => false),
+ "image" => a_hash_including("name" => 'ruby'),
+ "services" => all(a_hash_including("name" => 'mysql')),
+ "job_info" => a_hash_including("name" => 'terminal', "stage" => 'terminal'))
+ end
+ end
+
+ context 'when runner does not have matching tags' do
+ it 'does not pick a job' do
+ request_job
+
+ build.reload
+
+ expect(build).to be_pending
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+
+ def request_job(token = runner.token, **params)
+ post api('/jobs/request'), params: params.merge(token: token)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/ci/runner/jobs_trace_spec.rb b/spec/requests/api/ci/runner/jobs_trace_spec.rb
new file mode 100644
index 00000000000..1980c1a9f51
--- /dev/null
+++ b/spec/requests/api/ci/runner/jobs_trace_spec.rb
@@ -0,0 +1,292 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
+ include StubGitlabCalls
+ include RedisHelpers
+ include WorkhorseHelpers
+
+ let(:registration_token) { 'abcdefg123456' }
+
+ before do
+ stub_feature_flags(ci_enable_live_trace: true)
+ stub_gitlab_calls
+ stub_application_setting(runners_registration_token: registration_token)
+ allow_any_instance_of(::Ci::Runner).to receive(:cache_attributes)
+ end
+
+ describe '/api/v4/jobs' do
+ let(:root_namespace) { create(:namespace) }
+ let(:namespace) { create(:namespace, parent: root_namespace) }
+ let(:project) { create(:project, namespace: namespace, shared_runners_enabled: false) }
+ let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master') }
+ let(:runner) { create(:ci_runner, :project, projects: [project]) }
+ let(:user) { create(:user) }
+ let(:job) do
+ create(:ci_build, :artifacts, :extended_options,
+ pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0)
+ end
+
+ describe 'PATCH /api/v4/jobs/:id/trace' do
+ let(:job) do
+ create(:ci_build, :running, :trace_live,
+ project: project, user: user, runner_id: runner.id, pipeline: pipeline)
+ end
+
+ let(:headers) { { API::Helpers::Runner::JOB_TOKEN_HEADER => job.token, 'Content-Type' => 'text/plain' } }
+ let(:headers_with_range) { headers.merge({ 'Content-Range' => '11-20' }) }
+ let(:update_interval) { 10.seconds.to_i }
+
+ before do
+ initial_patch_the_trace
+ end
+
+ it_behaves_like 'API::CI::Runner application context metadata', '/api/:version/jobs/:id/trace' do
+ let(:send_request) { patch_the_trace }
+ end
+
+ it 'updates runner info' do
+ runner.update!(contacted_at: 1.year.ago)
+
+ expect { patch_the_trace }.to change { runner.reload.contacted_at }
+ end
+
+ context 'when request is valid' do
+ it 'gets correct response' do
+ expect(response).to have_gitlab_http_status(:accepted)
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
+ expect(response.header).to have_key 'Range'
+ expect(response.header).to have_key 'Job-Status'
+ expect(response.header).to have_key 'X-GitLab-Trace-Update-Interval'
+ end
+
+ context 'when job has been updated recently' do
+ it { expect { patch_the_trace }.not_to change { job.updated_at }}
+
+ it "changes the job's trace" do
+ patch_the_trace
+
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
+ end
+
+ context 'when Runner makes a force-patch' do
+ it { expect { force_patch_the_trace }.not_to change { job.updated_at }}
+
+ it "doesn't change the build.trace" do
+ force_patch_the_trace
+
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
+ end
+ end
+ end
+
+ context 'when job was not updated recently' do
+ let(:update_interval) { 15.minutes.to_i }
+
+ it { expect { patch_the_trace }.to change { job.updated_at } }
+
+ it 'changes the job.trace' do
+ patch_the_trace
+
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
+ end
+
+ context 'when Runner makes a force-patch' do
+ it { expect { force_patch_the_trace }.to change { job.updated_at } }
+
+ it "doesn't change the job.trace" do
+ force_patch_the_trace
+
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
+ end
+ end
+ end
+
+ context 'when project for the build has been deleted' do
+ let(:job) do
+ create(:ci_build, :running, :trace_live, runner_id: runner.id, pipeline: pipeline) do |job|
+ job.project.update!(pending_delete: true)
+ end
+ end
+
+ it 'responds with forbidden' do
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when trace is patched' do
+ before do
+ patch_the_trace
+ end
+
+ it 'has valid trace' do
+ expect(response).to have_gitlab_http_status(:accepted)
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
+ end
+
+ context 'when job is cancelled' do
+ before do
+ job.cancel
+ end
+
+ context 'when trace is patched' do
+ before do
+ patch_the_trace
+ end
+
+ it 'returns Forbidden ' do
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
+ context 'when redis data are flushed' do
+ before do
+ redis_shared_state_cleanup!
+ end
+
+ it 'has empty trace' do
+ expect(job.reload.trace.raw).to eq ''
+ end
+
+ context 'when we perform partial patch' do
+ before do
+ patch_the_trace('hello', headers.merge({ 'Content-Range' => "28-32/5" }))
+ end
+
+ it 'returns an error' do
+ expect(response).to have_gitlab_http_status(:range_not_satisfiable)
+ expect(response.header['Range']).to eq('0-0')
+ end
+ end
+
+ context 'when we resend full trace' do
+ before do
+ patch_the_trace('BUILD TRACE appended appended hello', headers.merge({ 'Content-Range' => "0-34/35" }))
+ end
+
+ it 'succeeds with updating trace' do
+ expect(response).to have_gitlab_http_status(:accepted)
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended hello'
+ end
+ end
+ end
+ end
+
+ context 'when concurrent update of trace is happening' do
+ before do
+ job.trace.write('wb') do
+ patch_the_trace
+ end
+ end
+
+ it 'returns that operation conflicts' do
+ expect(response).to have_gitlab_http_status(:conflict)
+ end
+ end
+
+ context 'when the job is canceled' do
+ before do
+ job.cancel
+ patch_the_trace
+ end
+
+ it 'receives status in header' do
+ expect(response.header['Job-Status']).to eq 'canceled'
+ end
+ end
+
+ context 'when build trace is being watched' do
+ before do
+ job.trace.being_watched!
+ end
+
+ it 'returns X-GitLab-Trace-Update-Interval as 3' do
+ patch_the_trace
+
+ expect(response).to have_gitlab_http_status(:accepted)
+ expect(response.header['X-GitLab-Trace-Update-Interval']).to eq('3')
+ end
+ end
+
+ context 'when build trace is not being watched' do
+ it 'returns X-GitLab-Trace-Update-Interval as 30' do
+ patch_the_trace
+
+ expect(response).to have_gitlab_http_status(:accepted)
+ expect(response.header['X-GitLab-Trace-Update-Interval']).to eq('30')
+ end
+ end
+ end
+
+ context 'when Runner makes a force-patch' do
+ before do
+ force_patch_the_trace
+ end
+
+ it 'gets correct response' do
+ expect(response).to have_gitlab_http_status(:accepted)
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
+ expect(response.header).to have_key 'Range'
+ expect(response.header).to have_key 'Job-Status'
+ end
+ end
+
+ context 'when content-range start is too big' do
+ let(:headers_with_range) { headers.merge({ 'Content-Range' => '15-20/6' }) }
+
+ it 'gets 416 error response with range headers' do
+ expect(response).to have_gitlab_http_status(:range_not_satisfiable)
+ expect(response.header).to have_key 'Range'
+ expect(response.header['Range']).to eq '0-11'
+ end
+ end
+
+ context 'when content-range start is too small' do
+ let(:headers_with_range) { headers.merge({ 'Content-Range' => '8-20/13' }) }
+
+ it 'gets 416 error response with range headers' do
+ expect(response).to have_gitlab_http_status(:range_not_satisfiable)
+ expect(response.header).to have_key 'Range'
+ expect(response.header['Range']).to eq '0-11'
+ end
+ end
+
+ context 'when Content-Range header is missing' do
+ let(:headers_with_range) { headers }
+
+ it { expect(response).to have_gitlab_http_status(:bad_request) }
+ end
+
+ context 'when job has been errased' do
+ let(:job) { create(:ci_build, runner_id: runner.id, erased_at: Time.now) }
+
+ it { expect(response).to have_gitlab_http_status(:forbidden) }
+ end
+
+ def patch_the_trace(content = ' appended', request_headers = nil)
+ unless request_headers
+ job.trace.read do |stream|
+ offset = stream.size
+ limit = offset + content.length - 1
+ request_headers = headers.merge({ 'Content-Range' => "#{offset}-#{limit}" })
+ end
+ end
+
+ Timecop.travel(job.updated_at + update_interval) do
+ patch api("/jobs/#{job.id}/trace"), params: content, headers: request_headers
+ job.reload
+ end
+ end
+
+ def initial_patch_the_trace
+ patch_the_trace(' appended', headers_with_range)
+ end
+
+ def force_patch_the_trace
+ 2.times { patch_the_trace('') }
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/ci/runner/runners_delete_spec.rb b/spec/requests/api/ci/runner/runners_delete_spec.rb
new file mode 100644
index 00000000000..75960a1a1c0
--- /dev/null
+++ b/spec/requests/api/ci/runner/runners_delete_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
+ include StubGitlabCalls
+ include RedisHelpers
+ include WorkhorseHelpers
+
+ let(:registration_token) { 'abcdefg123456' }
+
+ before do
+ stub_feature_flags(ci_enable_live_trace: true)
+ stub_gitlab_calls
+ stub_application_setting(runners_registration_token: registration_token)
+ allow_any_instance_of(::Ci::Runner).to receive(:cache_attributes)
+ end
+
+ describe '/api/v4/runners' do
+ describe 'DELETE /api/v4/runners' do
+ context 'when no token is provided' do
+ it 'returns 400 error' do
+ delete api('/runners')
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'when invalid token is provided' do
+ it 'returns 403 error' do
+ delete api('/runners'), params: { token: 'invalid' }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when valid token is provided' do
+ let(:runner) { create(:ci_runner) }
+
+ it 'deletes Runner' do
+ delete api('/runners'), params: { token: runner.token }
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(::Ci::Runner.count).to eq(0)
+ end
+
+ it_behaves_like '412 response' do
+ let(:request) { api('/runners') }
+ let(:params) { { token: runner.token } }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/ci/runner/runners_post_spec.rb b/spec/requests/api/ci/runner/runners_post_spec.rb
new file mode 100644
index 00000000000..7c362fae7d2
--- /dev/null
+++ b/spec/requests/api/ci/runner/runners_post_spec.rb
@@ -0,0 +1,250 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
+ include StubGitlabCalls
+ include RedisHelpers
+ include WorkhorseHelpers
+
+ let(:registration_token) { 'abcdefg123456' }
+
+ before do
+ stub_feature_flags(ci_enable_live_trace: true)
+ stub_gitlab_calls
+ stub_application_setting(runners_registration_token: registration_token)
+ allow_any_instance_of(::Ci::Runner).to receive(:cache_attributes)
+ end
+
+ describe '/api/v4/runners' do
+ describe 'POST /api/v4/runners' do
+ context 'when no token is provided' do
+ it 'returns 400 error' do
+ post api('/runners')
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'when invalid token is provided' do
+ it 'returns 403 error' do
+ post api('/runners'), params: { token: 'invalid' }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when valid token is provided' do
+ it 'creates runner with default values' do
+ post api('/runners'), params: { token: registration_token }
+
+ runner = ::Ci::Runner.first
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['id']).to eq(runner.id)
+ expect(json_response['token']).to eq(runner.token)
+ expect(runner.run_untagged).to be true
+ expect(runner.active).to be true
+ expect(runner.token).not_to eq(registration_token)
+ expect(runner).to be_instance_type
+ end
+
+ context 'when project token is used' do
+ let(:project) { create(:project) }
+
+ it 'creates project runner' do
+ post api('/runners'), params: { token: project.runners_token }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(project.runners.size).to eq(1)
+ runner = ::Ci::Runner.first
+ expect(runner.token).not_to eq(registration_token)
+ expect(runner.token).not_to eq(project.runners_token)
+ expect(runner).to be_project_type
+ end
+ end
+
+ context 'when group token is used' do
+ let(:group) { create(:group) }
+
+ it 'creates a group runner' do
+ post api('/runners'), params: { token: group.runners_token }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(group.runners.reload.size).to eq(1)
+ runner = ::Ci::Runner.first
+ expect(runner.token).not_to eq(registration_token)
+ expect(runner.token).not_to eq(group.runners_token)
+ expect(runner).to be_group_type
+ end
+ end
+ end
+
+ context 'when runner description is provided' do
+ it 'creates runner' do
+ post api('/runners'), params: {
+ token: registration_token,
+ description: 'server.hostname'
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.description).to eq('server.hostname')
+ end
+ end
+
+ context 'when runner tags are provided' do
+ it 'creates runner' do
+ post api('/runners'), params: {
+ token: registration_token,
+ tag_list: 'tag1, tag2'
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.tag_list.sort).to eq(%w(tag1 tag2))
+ end
+ end
+
+ context 'when option for running untagged jobs is provided' do
+ context 'when tags are provided' do
+ it 'creates runner' do
+ post api('/runners'), params: {
+ token: registration_token,
+ run_untagged: false,
+ tag_list: ['tag']
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.run_untagged).to be false
+ expect(::Ci::Runner.first.tag_list.sort).to eq(['tag'])
+ end
+ end
+
+ context 'when tags are not provided' do
+ it 'returns 400 error' do
+ post api('/runners'), params: {
+ token: registration_token,
+ run_untagged: false
+ }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to include(
+ 'tags_list' => ['can not be empty when runner is not allowed to pick untagged jobs'])
+ end
+ end
+ end
+
+ context 'when option for locking Runner is provided' do
+ it 'creates runner' do
+ post api('/runners'), params: {
+ token: registration_token,
+ locked: true
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.locked).to be true
+ end
+ end
+
+ context 'when option for activating a Runner is provided' do
+ context 'when active is set to true' do
+ it 'creates runner' do
+ post api('/runners'), params: {
+ token: registration_token,
+ active: true
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.active).to be true
+ end
+ end
+
+ context 'when active is set to false' do
+ it 'creates runner' do
+ post api('/runners'), params: {
+ token: registration_token,
+ active: false
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.active).to be false
+ end
+ end
+ end
+
+ context 'when access_level is provided for Runner' do
+ context 'when access_level is set to ref_protected' do
+ it 'creates runner' do
+ post api('/runners'), params: {
+ token: registration_token,
+ access_level: 'ref_protected'
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.ref_protected?).to be true
+ end
+ end
+
+ context 'when access_level is set to not_protected' do
+ it 'creates runner' do
+ post api('/runners'), params: {
+ token: registration_token,
+ access_level: 'not_protected'
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.ref_protected?).to be false
+ end
+ end
+ end
+
+ context 'when maximum job timeout is specified' do
+ it 'creates runner' do
+ post api('/runners'), params: {
+ token: registration_token,
+ maximum_timeout: 9000
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.maximum_timeout).to eq(9000)
+ end
+
+ context 'when maximum job timeout is empty' do
+ it 'creates runner' do
+ post api('/runners'), params: {
+ token: registration_token,
+ maximum_timeout: ''
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.maximum_timeout).to be_nil
+ end
+ end
+ end
+
+ %w(name version revision platform architecture).each do |param|
+ context "when info parameter '#{param}' info is present" do
+ let(:value) { "#{param}_value" }
+
+ it "updates provided Runner's parameter" do
+ post api('/runners'), params: {
+ token: registration_token,
+ info: { param => value }
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.read_attribute(param.to_sym)).to eq(value)
+ end
+ end
+ end
+
+ it "sets the runner's ip_address" do
+ post api('/runners'),
+ params: { token: registration_token },
+ headers: { 'X-Forwarded-For' => '123.111.123.111' }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.ip_address).to eq('123.111.123.111')
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/ci/runner/runners_verify_post_spec.rb b/spec/requests/api/ci/runner/runners_verify_post_spec.rb
new file mode 100644
index 00000000000..e2f5f9b2d68
--- /dev/null
+++ b/spec/requests/api/ci/runner/runners_verify_post_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
+ include StubGitlabCalls
+ include RedisHelpers
+ include WorkhorseHelpers
+
+ let(:registration_token) { 'abcdefg123456' }
+
+ before do
+ stub_feature_flags(ci_enable_live_trace: true)
+ stub_gitlab_calls
+ stub_application_setting(runners_registration_token: registration_token)
+ allow_any_instance_of(::Ci::Runner).to receive(:cache_attributes)
+ end
+
+ describe '/api/v4/runners' do
+ describe 'POST /api/v4/runners/verify' do
+ let(:runner) { create(:ci_runner) }
+
+ context 'when no token is provided' do
+ it 'returns 400 error' do
+ post api('/runners/verify')
+
+ expect(response).to have_gitlab_http_status :bad_request
+ end
+ end
+
+ context 'when invalid token is provided' do
+ it 'returns 403 error' do
+ post api('/runners/verify'), params: { token: 'invalid-token' }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when valid token is provided' do
+ it 'verifies Runner credentials' do
+ post api('/runners/verify'), params: { token: runner.token }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/ci/runner_spec.rb b/spec/requests/api/ci/runner_spec.rb
deleted file mode 100644
index c8718309bf2..00000000000
--- a/spec/requests/api/ci/runner_spec.rb
+++ /dev/null
@@ -1,2474 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
- include StubGitlabCalls
- include RedisHelpers
- include WorkhorseHelpers
-
- let(:registration_token) { 'abcdefg123456' }
-
- before do
- stub_feature_flags(ci_enable_live_trace: true)
- stub_gitlab_calls
- stub_application_setting(runners_registration_token: registration_token)
- allow_any_instance_of(::Ci::Runner).to receive(:cache_attributes)
- end
-
- describe '/api/v4/runners' do
- describe 'POST /api/v4/runners' do
- context 'when no token is provided' do
- it 'returns 400 error' do
- post api('/runners')
-
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
-
- context 'when invalid token is provided' do
- it 'returns 403 error' do
- post api('/runners'), params: { token: 'invalid' }
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'when valid token is provided' do
- it 'creates runner with default values' do
- post api('/runners'), params: { token: registration_token }
-
- runner = ::Ci::Runner.first
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['id']).to eq(runner.id)
- expect(json_response['token']).to eq(runner.token)
- expect(runner.run_untagged).to be true
- expect(runner.active).to be true
- expect(runner.token).not_to eq(registration_token)
- expect(runner).to be_instance_type
- end
-
- context 'when project token is used' do
- let(:project) { create(:project) }
-
- it 'creates project runner' do
- post api('/runners'), params: { token: project.runners_token }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(project.runners.size).to eq(1)
- runner = ::Ci::Runner.first
- expect(runner.token).not_to eq(registration_token)
- expect(runner.token).not_to eq(project.runners_token)
- expect(runner).to be_project_type
- end
- end
-
- context 'when group token is used' do
- let(:group) { create(:group) }
-
- it 'creates a group runner' do
- post api('/runners'), params: { token: group.runners_token }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(group.runners.reload.size).to eq(1)
- runner = ::Ci::Runner.first
- expect(runner.token).not_to eq(registration_token)
- expect(runner.token).not_to eq(group.runners_token)
- expect(runner).to be_group_type
- end
- end
- end
-
- context 'when runner description is provided' do
- it 'creates runner' do
- post api('/runners'), params: {
- token: registration_token,
- description: 'server.hostname'
- }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.description).to eq('server.hostname')
- end
- end
-
- context 'when runner tags are provided' do
- it 'creates runner' do
- post api('/runners'), params: {
- token: registration_token,
- tag_list: 'tag1, tag2'
- }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.tag_list.sort).to eq(%w(tag1 tag2))
- end
- end
-
- context 'when option for running untagged jobs is provided' do
- context 'when tags are provided' do
- it 'creates runner' do
- post api('/runners'), params: {
- token: registration_token,
- run_untagged: false,
- tag_list: ['tag']
- }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.run_untagged).to be false
- expect(::Ci::Runner.first.tag_list.sort).to eq(['tag'])
- end
- end
-
- context 'when tags are not provided' do
- it 'returns 400 error' do
- post api('/runners'), params: {
- token: registration_token,
- run_untagged: false
- }
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']).to include(
- 'tags_list' => ['can not be empty when runner is not allowed to pick untagged jobs'])
- end
- end
- end
-
- context 'when option for locking Runner is provided' do
- it 'creates runner' do
- post api('/runners'), params: {
- token: registration_token,
- locked: true
- }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.locked).to be true
- end
- end
-
- context 'when option for activating a Runner is provided' do
- context 'when active is set to true' do
- it 'creates runner' do
- post api('/runners'), params: {
- token: registration_token,
- active: true
- }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.active).to be true
- end
- end
-
- context 'when active is set to false' do
- it 'creates runner' do
- post api('/runners'), params: {
- token: registration_token,
- active: false
- }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.active).to be false
- end
- end
- end
-
- context 'when access_level is provided for Runner' do
- context 'when access_level is set to ref_protected' do
- it 'creates runner' do
- post api('/runners'), params: {
- token: registration_token,
- access_level: 'ref_protected'
- }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.ref_protected?).to be true
- end
- end
-
- context 'when access_level is set to not_protected' do
- it 'creates runner' do
- post api('/runners'), params: {
- token: registration_token,
- access_level: 'not_protected'
- }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.ref_protected?).to be false
- end
- end
- end
-
- context 'when maximum job timeout is specified' do
- it 'creates runner' do
- post api('/runners'), params: {
- token: registration_token,
- maximum_timeout: 9000
- }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.maximum_timeout).to eq(9000)
- end
-
- context 'when maximum job timeout is empty' do
- it 'creates runner' do
- post api('/runners'), params: {
- token: registration_token,
- maximum_timeout: ''
- }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.maximum_timeout).to be_nil
- end
- end
- end
-
- %w(name version revision platform architecture).each do |param|
- context "when info parameter '#{param}' info is present" do
- let(:value) { "#{param}_value" }
-
- it "updates provided Runner's parameter" do
- post api('/runners'), params: {
- token: registration_token,
- info: { param => value }
- }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.read_attribute(param.to_sym)).to eq(value)
- end
- end
- end
-
- it "sets the runner's ip_address" do
- post api('/runners'),
- params: { token: registration_token },
- headers: { 'X-Forwarded-For' => '123.111.123.111' }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.ip_address).to eq('123.111.123.111')
- end
- end
-
- describe 'DELETE /api/v4/runners' do
- context 'when no token is provided' do
- it 'returns 400 error' do
- delete api('/runners')
-
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
-
- context 'when invalid token is provided' do
- it 'returns 403 error' do
- delete api('/runners'), params: { token: 'invalid' }
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'when valid token is provided' do
- let(:runner) { create(:ci_runner) }
-
- it 'deletes Runner' do
- delete api('/runners'), params: { token: runner.token }
-
- expect(response).to have_gitlab_http_status(:no_content)
- expect(::Ci::Runner.count).to eq(0)
- end
-
- it_behaves_like '412 response' do
- let(:request) { api('/runners') }
- let(:params) { { token: runner.token } }
- end
- end
- end
-
- describe 'POST /api/v4/runners/verify' do
- let(:runner) { create(:ci_runner) }
-
- context 'when no token is provided' do
- it 'returns 400 error' do
- post api('/runners/verify')
-
- expect(response).to have_gitlab_http_status :bad_request
- end
- end
-
- context 'when invalid token is provided' do
- it 'returns 403 error' do
- post api('/runners/verify'), params: { token: 'invalid-token' }
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'when valid token is provided' do
- it 'verifies Runner credentials' do
- post api('/runners/verify'), params: { token: runner.token }
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
- end
- end
-
- describe '/api/v4/jobs' do
- shared_examples 'application context metadata' do |api_route|
- it 'contains correct context metadata' do
- # Avoids popping the context from the thread so we can
- # check its content after the request.
- allow(Labkit::Context).to receive(:pop)
-
- send_request
-
- Labkit::Context.with_context do |context|
- expected_context = {
- 'meta.caller_id' => api_route,
- 'meta.user' => job.user.username,
- 'meta.project' => job.project.full_path,
- 'meta.root_namespace' => job.project.full_path_components.first
- }
-
- expect(context.to_h).to include(expected_context)
- end
- end
- end
-
- let(:root_namespace) { create(:namespace) }
- let(:namespace) { create(:namespace, parent: root_namespace) }
- let(:project) { create(:project, namespace: namespace, shared_runners_enabled: false) }
- let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master') }
- let(:runner) { create(:ci_runner, :project, projects: [project]) }
- let(:user) { create(:user) }
- let(:job) do
- create(:ci_build, :artifacts, :extended_options,
- pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0)
- end
-
- describe 'POST /api/v4/jobs/request' do
- let!(:last_update) {}
- let!(:new_update) { }
- let(:user_agent) { 'gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)' }
-
- before do
- job
- stub_container_registry_config(enabled: false)
- end
-
- shared_examples 'no jobs available' do
- before do
- request_job
- end
-
- context 'when runner sends version in User-Agent' do
- context 'for stable version' do
- it 'gives 204 and set X-GitLab-Last-Update' do
- expect(response).to have_gitlab_http_status(:no_content)
- expect(response.header).to have_key('X-GitLab-Last-Update')
- end
- end
-
- context 'when last_update is up-to-date' do
- let(:last_update) { runner.ensure_runner_queue_value }
-
- it 'gives 204 and set the same X-GitLab-Last-Update' do
- expect(response).to have_gitlab_http_status(:no_content)
- expect(response.header['X-GitLab-Last-Update']).to eq(last_update)
- end
- end
-
- context 'when last_update is outdated' do
- let(:last_update) { runner.ensure_runner_queue_value }
- let(:new_update) { runner.tick_runner_queue }
-
- it 'gives 204 and set a new X-GitLab-Last-Update' do
- expect(response).to have_gitlab_http_status(:no_content)
- expect(response.header['X-GitLab-Last-Update']).to eq(new_update)
- end
- end
-
- context 'when beta version is sent' do
- let(:user_agent) { 'gitlab-runner 9.0.0~beta.167.g2b2bacc (master; go1.7.4; linux/amd64)' }
-
- it { expect(response).to have_gitlab_http_status(:no_content) }
- end
-
- context 'when pre-9-0 version is sent' do
- let(:user_agent) { 'gitlab-ci-multi-runner 1.6.0 (1-6-stable; go1.6.3; linux/amd64)' }
-
- it { expect(response).to have_gitlab_http_status(:no_content) }
- end
-
- context 'when pre-9-0 beta version is sent' do
- let(:user_agent) { 'gitlab-ci-multi-runner 1.6.0~beta.167.g2b2bacc (master; go1.6.3; linux/amd64)' }
-
- it { expect(response).to have_gitlab_http_status(:no_content) }
- end
- end
- end
-
- context 'when no token is provided' do
- it 'returns 400 error' do
- post api('/jobs/request')
-
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
-
- context 'when invalid token is provided' do
- it 'returns 403 error' do
- post api('/jobs/request'), params: { token: 'invalid' }
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'when valid token is provided' do
- context 'when Runner is not active' do
- let(:runner) { create(:ci_runner, :inactive) }
- let(:update_value) { runner.ensure_runner_queue_value }
-
- it 'returns 204 error' do
- request_job
-
- expect(response).to have_gitlab_http_status(:no_content)
- expect(response.header['X-GitLab-Last-Update']).to eq(update_value)
- end
- end
-
- context 'when jobs are finished' do
- before do
- job.success
- end
-
- it_behaves_like 'no jobs available'
- end
-
- context 'when other projects have pending jobs' do
- before do
- job.success
- create(:ci_build, :pending)
- end
-
- it_behaves_like 'no jobs available'
- end
-
- context 'when shared runner requests job for project without shared_runners_enabled' do
- let(:runner) { create(:ci_runner, :instance) }
-
- it_behaves_like 'no jobs available'
- end
-
- context 'when there is a pending job' do
- let(:expected_job_info) do
- { 'name' => job.name,
- 'stage' => job.stage,
- 'project_id' => job.project.id,
- 'project_name' => job.project.name }
- end
-
- let(:expected_git_info) do
- { 'repo_url' => job.repo_url,
- 'ref' => job.ref,
- 'sha' => job.sha,
- 'before_sha' => job.before_sha,
- 'ref_type' => 'branch',
- 'refspecs' => ["+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
- "+refs/heads/#{job.ref}:refs/remotes/origin/#{job.ref}"],
- 'depth' => project.ci_default_git_depth }
- end
-
- let(:expected_steps) do
- [{ 'name' => 'script',
- 'script' => %w(echo),
- 'timeout' => job.metadata_timeout,
- 'when' => 'on_success',
- 'allow_failure' => false },
- { 'name' => 'after_script',
- 'script' => %w(ls date),
- 'timeout' => job.metadata_timeout,
- 'when' => 'always',
- 'allow_failure' => true }]
- end
-
- let(:expected_variables) do
- [{ 'key' => 'CI_JOB_NAME', 'value' => 'spinach', 'public' => true, 'masked' => false },
- { 'key' => 'CI_JOB_STAGE', 'value' => 'test', 'public' => true, 'masked' => false },
- { 'key' => 'DB_NAME', 'value' => 'postgres', 'public' => true, 'masked' => false }]
- end
-
- let(:expected_artifacts) do
- [{ 'name' => 'artifacts_file',
- 'untracked' => false,
- 'paths' => %w(out/),
- 'when' => 'always',
- 'expire_in' => '7d',
- "artifact_type" => "archive",
- "artifact_format" => "zip" }]
- end
-
- let(:expected_cache) do
- [{ 'key' => 'cache_key',
- 'untracked' => false,
- 'paths' => ['vendor/*'],
- 'policy' => 'pull-push' }]
- end
-
- let(:expected_features) { { 'trace_sections' => true } }
-
- it 'picks a job' do
- request_job info: { platform: :darwin }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(response.headers['Content-Type']).to eq('application/json')
- expect(response.headers).not_to have_key('X-GitLab-Last-Update')
- expect(runner.reload.platform).to eq('darwin')
- expect(json_response['id']).to eq(job.id)
- expect(json_response['token']).to eq(job.token)
- expect(json_response['job_info']).to eq(expected_job_info)
- expect(json_response['git_info']).to eq(expected_git_info)
- expect(json_response['image']).to eq({ 'name' => 'ruby:2.7', 'entrypoint' => '/bin/sh', 'ports' => [] })
- expect(json_response['services']).to eq([{ 'name' => 'postgres', 'entrypoint' => nil,
- 'alias' => nil, 'command' => nil, 'ports' => [] },
- { 'name' => 'docker:stable-dind', 'entrypoint' => '/bin/sh',
- 'alias' => 'docker', 'command' => 'sleep 30', 'ports' => [] }])
- expect(json_response['steps']).to eq(expected_steps)
- expect(json_response['artifacts']).to eq(expected_artifacts)
- expect(json_response['cache']).to eq(expected_cache)
- expect(json_response['variables']).to include(*expected_variables)
- expect(json_response['features']).to eq(expected_features)
- end
-
- it 'creates persistent ref' do
- expect_any_instance_of(::Ci::PersistentRef).to receive(:create_ref)
- .with(job.sha, "refs/#{Repository::REF_PIPELINES}/#{job.commit_id}")
-
- request_job info: { platform: :darwin }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['id']).to eq(job.id)
- end
-
- context 'when job is made for tag' do
- let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
-
- it 'sets branch as ref_type' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['git_info']['ref_type']).to eq('tag')
- end
-
- context 'when GIT_DEPTH is specified' do
- before do
- create(:ci_pipeline_variable, key: 'GIT_DEPTH', value: 1, pipeline: pipeline)
- end
-
- it 'specifies refspecs' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['git_info']['refspecs']).to include("+refs/tags/#{job.ref}:refs/tags/#{job.ref}")
- end
- end
-
- context 'when a Gitaly exception is thrown during response' do
- before do
- allow_next_instance_of(Ci::BuildRunnerPresenter) do |instance|
- allow(instance).to receive(:artifacts).and_raise(GRPC::DeadlineExceeded)
- end
- end
-
- it 'fails the job as a scheduler failure' do
- request_job
-
- expect(response).to have_gitlab_http_status(:no_content)
- expect(job.reload.failed?).to be_truthy
- expect(job.failure_reason).to eq('scheduler_failure')
- expect(job.runner_id).to eq(runner.id)
- expect(job.runner_session).to be_nil
- end
- end
-
- context 'when GIT_DEPTH is not specified and there is no default git depth for the project' do
- before do
- project.update!(ci_default_git_depth: nil)
- end
-
- it 'specifies refspecs' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['git_info']['refspecs'])
- .to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
- '+refs/tags/*:refs/tags/*',
- '+refs/heads/*:refs/remotes/origin/*')
- end
- end
- end
-
- context 'when job filtered by job_age' do
- let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0, queued_at: 60.seconds.ago) }
-
- context 'job is queued less than job_age parameter' do
- let(:job_age) { 120 }
-
- it 'gives 204' do
- request_job(job_age: job_age)
-
- expect(response).to have_gitlab_http_status(:no_content)
- end
- end
-
- context 'job is queued more than job_age parameter' do
- let(:job_age) { 30 }
-
- it 'picks a job' do
- request_job(job_age: job_age)
-
- expect(response).to have_gitlab_http_status(:created)
- end
- end
- end
-
- context 'when job is made for branch' do
- it 'sets tag as ref_type' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['git_info']['ref_type']).to eq('branch')
- end
-
- context 'when GIT_DEPTH is specified' do
- before do
- create(:ci_pipeline_variable, key: 'GIT_DEPTH', value: 1, pipeline: pipeline)
- end
-
- it 'specifies refspecs' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['git_info']['refspecs']).to include("+refs/heads/#{job.ref}:refs/remotes/origin/#{job.ref}")
- end
- end
-
- context 'when GIT_DEPTH is not specified and there is no default git depth for the project' do
- before do
- project.update!(ci_default_git_depth: nil)
- end
-
- it 'specifies refspecs' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['git_info']['refspecs'])
- .to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
- '+refs/tags/*:refs/tags/*',
- '+refs/heads/*:refs/remotes/origin/*')
- end
- end
- end
-
- context 'when job is for a release' do
- let!(:job) { create(:ci_build, :release_options, pipeline: pipeline) }
-
- context 'when `multi_build_steps` is passed by the runner' do
- it 'exposes release info' do
- request_job info: { features: { multi_build_steps: true } }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(response.headers).not_to have_key('X-GitLab-Last-Update')
- expect(json_response['steps']).to eq([
- {
- "name" => "script",
- "script" => ["make changelog | tee release_changelog.txt"],
- "timeout" => 3600,
- "when" => "on_success",
- "allow_failure" => false
- },
- {
- "name" => "release",
- "script" =>
- ["release-cli create --name \"Release $CI_COMMIT_SHA\" --description \"Created using the release-cli $EXTRA_DESCRIPTION\" --tag-name \"release-$CI_COMMIT_SHA\" --ref \"$CI_COMMIT_SHA\""],
- "timeout" => 3600,
- "when" => "on_success",
- "allow_failure" => false
- }
- ])
- end
- end
-
- context 'when `multi_build_steps` is not passed by the runner' do
- it 'drops the job' do
- request_job
-
- expect(response).to have_gitlab_http_status(:no_content)
- end
- end
- end
-
- context 'when job is made for merge request' do
- let(:pipeline) { create(:ci_pipeline, source: :merge_request_event, project: project, ref: 'feature', merge_request: merge_request) }
- let!(:job) { create(:ci_build, pipeline: pipeline, name: 'spinach', ref: 'feature', stage: 'test', stage_idx: 0) }
- let(:merge_request) { create(:merge_request) }
-
- it 'sets branch as ref_type' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['git_info']['ref_type']).to eq('branch')
- end
-
- context 'when GIT_DEPTH is specified' do
- before do
- create(:ci_pipeline_variable, key: 'GIT_DEPTH', value: 1, pipeline: pipeline)
- end
-
- it 'returns the overwritten git depth for merge request refspecs' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['git_info']['depth']).to eq(1)
- end
- end
- end
-
- it 'updates runner info' do
- expect { request_job }.to change { runner.reload.contacted_at }
- end
-
- %w(version revision platform architecture).each do |param|
- context "when info parameter '#{param}' is present" do
- let(:value) { "#{param}_value" }
-
- it "updates provided Runner's parameter" do
- request_job info: { param => value }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(runner.reload.read_attribute(param.to_sym)).to eq(value)
- end
- end
- end
-
- it "sets the runner's ip_address" do
- post api('/jobs/request'),
- params: { token: runner.token },
- headers: { 'User-Agent' => user_agent, 'X-Forwarded-For' => '123.222.123.222' }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(runner.reload.ip_address).to eq('123.222.123.222')
- end
-
- it "handles multiple X-Forwarded-For addresses" do
- post api('/jobs/request'),
- params: { token: runner.token },
- headers: { 'User-Agent' => user_agent, 'X-Forwarded-For' => '123.222.123.222, 127.0.0.1' }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(runner.reload.ip_address).to eq('123.222.123.222')
- end
-
- context 'when concurrently updating a job' do
- before do
- expect_any_instance_of(::Ci::Build).to receive(:run!)
- .and_raise(ActiveRecord::StaleObjectError.new(nil, nil))
- end
-
- it 'returns a conflict' do
- request_job
-
- expect(response).to have_gitlab_http_status(:conflict)
- expect(response.headers).not_to have_key('X-GitLab-Last-Update')
- end
- end
-
- context 'when project and pipeline have multiple jobs' do
- let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
- let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
- let!(:test_job) { create(:ci_build, pipeline: pipeline, name: 'deploy', stage: 'deploy', stage_idx: 1) }
-
- before do
- job.success
- job2.success
- end
-
- it 'returns dependent jobs' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['id']).to eq(test_job.id)
- expect(json_response['dependencies'].count).to eq(2)
- expect(json_response['dependencies']).to include(
- { 'id' => job.id, 'name' => job.name, 'token' => job.token },
- { 'id' => job2.id, 'name' => job2.name, 'token' => job2.token })
- end
- end
-
- context 'when pipeline have jobs with artifacts' do
- let!(:job) { create(:ci_build, :tag, :artifacts, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
- let!(:test_job) { create(:ci_build, pipeline: pipeline, name: 'deploy', stage: 'deploy', stage_idx: 1) }
-
- before do
- job.success
- end
-
- it 'returns dependent jobs' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['id']).to eq(test_job.id)
- expect(json_response['dependencies'].count).to eq(1)
- expect(json_response['dependencies']).to include(
- { 'id' => job.id, 'name' => job.name, 'token' => job.token,
- 'artifacts_file' => { 'filename' => 'ci_build_artifacts.zip', 'size' => 107464 } })
- end
- end
-
- context 'when explicit dependencies are defined' do
- let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
- let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
- let!(:test_job) do
- create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'deploy',
- stage: 'deploy', stage_idx: 1,
- options: { script: ['bash'], dependencies: [job2.name] })
- end
-
- before do
- job.success
- job2.success
- end
-
- it 'returns dependent jobs' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['id']).to eq(test_job.id)
- expect(json_response['dependencies'].count).to eq(1)
- expect(json_response['dependencies'][0]).to include('id' => job2.id, 'name' => job2.name, 'token' => job2.token)
- end
- end
-
- context 'when dependencies is an empty array' do
- let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
- let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
- let!(:empty_dependencies_job) do
- create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'empty_dependencies_job',
- stage: 'deploy', stage_idx: 1,
- options: { script: ['bash'], dependencies: [] })
- end
-
- before do
- job.success
- job2.success
- end
-
- it 'returns an empty array' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['id']).to eq(empty_dependencies_job.id)
- expect(json_response['dependencies'].count).to eq(0)
- end
- end
-
- context 'when job has no tags' do
- before do
- job.update(tags: [])
- end
-
- context 'when runner is allowed to pick untagged jobs' do
- before do
- runner.update_column(:run_untagged, true)
- end
-
- it 'picks job' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- end
- end
-
- context 'when runner is not allowed to pick untagged jobs' do
- before do
- runner.update_column(:run_untagged, false)
- end
-
- it_behaves_like 'no jobs available'
- end
- end
-
- context 'when triggered job is available' do
- let(:expected_variables) do
- [{ 'key' => 'CI_JOB_NAME', 'value' => 'spinach', 'public' => true, 'masked' => false },
- { 'key' => 'CI_JOB_STAGE', 'value' => 'test', 'public' => true, 'masked' => false },
- { 'key' => 'CI_PIPELINE_TRIGGERED', 'value' => 'true', 'public' => true, 'masked' => false },
- { 'key' => 'DB_NAME', 'value' => 'postgres', 'public' => true, 'masked' => false },
- { 'key' => 'SECRET_KEY', 'value' => 'secret_value', 'public' => false, 'masked' => false },
- { 'key' => 'TRIGGER_KEY_1', 'value' => 'TRIGGER_VALUE_1', 'public' => false, 'masked' => false }]
- end
-
- let(:trigger) { create(:ci_trigger, project: project) }
- let!(:trigger_request) { create(:ci_trigger_request, pipeline: pipeline, builds: [job], trigger: trigger) }
-
- before do
- project.variables << ::Ci::Variable.new(key: 'SECRET_KEY', value: 'secret_value')
- end
-
- shared_examples 'expected variables behavior' do
- it 'returns variables for triggers' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['variables']).to include(*expected_variables)
- end
- end
-
- context 'when variables are stored in trigger_request' do
- before do
- trigger_request.update_attribute(:variables, { TRIGGER_KEY_1: 'TRIGGER_VALUE_1' } )
- end
-
- it_behaves_like 'expected variables behavior'
- end
-
- context 'when variables are stored in pipeline_variables' do
- before do
- create(:ci_pipeline_variable, pipeline: pipeline, key: :TRIGGER_KEY_1, value: 'TRIGGER_VALUE_1')
- end
-
- it_behaves_like 'expected variables behavior'
- end
- end
-
- describe 'registry credentials support' do
- let(:registry_url) { 'registry.example.com:5005' }
- let(:registry_credentials) do
- { 'type' => 'registry',
- 'url' => registry_url,
- 'username' => 'gitlab-ci-token',
- 'password' => job.token }
- end
-
- context 'when registry is enabled' do
- before do
- stub_container_registry_config(enabled: true, host_port: registry_url)
- end
-
- it 'sends registry credentials key' do
- request_job
-
- expect(json_response).to have_key('credentials')
- expect(json_response['credentials']).to include(registry_credentials)
- end
- end
-
- context 'when registry is disabled' do
- before do
- stub_container_registry_config(enabled: false, host_port: registry_url)
- end
-
- it 'does not send registry credentials' do
- request_job
-
- expect(json_response).to have_key('credentials')
- expect(json_response['credentials']).not_to include(registry_credentials)
- end
- end
- end
-
- describe 'timeout support' do
- context 'when project specifies job timeout' do
- let(:project) { create(:project, shared_runners_enabled: false, build_timeout: 1234) }
-
- it 'contains info about timeout taken from project' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['runner_info']).to include({ 'timeout' => 1234 })
- end
-
- context 'when runner specifies lower timeout' do
- let(:runner) { create(:ci_runner, :project, maximum_timeout: 1000, projects: [project]) }
-
- it 'contains info about timeout overridden by runner' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['runner_info']).to include({ 'timeout' => 1000 })
- end
- end
-
- context 'when runner specifies bigger timeout' do
- let(:runner) { create(:ci_runner, :project, maximum_timeout: 2000, projects: [project]) }
-
- it 'contains info about timeout not overridden by runner' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['runner_info']).to include({ 'timeout' => 1234 })
- end
- end
- end
- end
- end
-
- describe 'port support' do
- let(:job) { create(:ci_build, pipeline: pipeline, options: options) }
-
- context 'when job image has ports' do
- let(:options) do
- {
- image: {
- name: 'ruby',
- ports: [80]
- },
- services: ['mysql']
- }
- end
-
- it 'returns the image ports' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response).to include(
- 'id' => job.id,
- 'image' => a_hash_including('name' => 'ruby', 'ports' => [{ 'number' => 80, 'protocol' => 'http', 'name' => 'default_port' }]),
- 'services' => all(a_hash_including('name' => 'mysql')))
- end
- end
-
- context 'when job services settings has ports' do
- let(:options) do
- {
- image: 'ruby',
- services: [
- {
- name: 'tomcat',
- ports: [{ number: 8081, protocol: 'http', name: 'custom_port' }]
- }
- ]
- }
- end
-
- it 'returns the service ports' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response).to include(
- 'id' => job.id,
- 'image' => a_hash_including('name' => 'ruby'),
- 'services' => all(a_hash_including('name' => 'tomcat', 'ports' => [{ 'number' => 8081, 'protocol' => 'http', 'name' => 'custom_port' }])))
- end
- end
- end
-
- describe 'a job with excluded artifacts' do
- context 'when excluded paths are defined' do
- let(:job) do
- create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'test',
- stage: 'deploy', stage_idx: 1,
- options: { artifacts: { paths: ['abc'], exclude: ['cde'] } })
- end
-
- context 'when a runner supports this feature' do
- it 'exposes excluded paths when the feature is enabled' do
- stub_feature_flags(ci_artifacts_exclude: true)
-
- request_job info: { features: { artifacts_exclude: true } }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response.dig('artifacts').first).to include('exclude' => ['cde'])
- end
-
- it 'does not expose excluded paths when the feature is disabled' do
- stub_feature_flags(ci_artifacts_exclude: false)
-
- request_job info: { features: { artifacts_exclude: true } }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response.dig('artifacts').first).not_to have_key('exclude')
- end
- end
-
- context 'when a runner does not support this feature' do
- it 'does not expose the build at all' do
- stub_feature_flags(ci_artifacts_exclude: true)
-
- request_job
-
- expect(response).to have_gitlab_http_status(:no_content)
- end
- end
- end
-
- it 'does not expose excluded paths when these are empty' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response.dig('artifacts').first).not_to have_key('exclude')
- end
- end
-
- def request_job(token = runner.token, **params)
- new_params = params.merge(token: token, last_update: last_update)
- post api('/jobs/request'), params: new_params.to_json, headers: { 'User-Agent' => user_agent, 'Content-Type': 'application/json' }
- end
- end
-
- context 'for web-ide job' do
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, :repository) }
-
- let(:runner) { create(:ci_runner, :project, projects: [project]) }
- let(:service) { ::Ci::CreateWebIdeTerminalService.new(project, user, ref: 'master').execute }
- let(:pipeline) { service[:pipeline] }
- let(:build) { pipeline.builds.first }
- let(:job) { {} }
- let(:config_content) do
- 'terminal: { image: ruby, services: [mysql], before_script: [ls], tags: [tag-1], variables: { KEY: value } }'
- end
-
- before do
- stub_webide_config_file(config_content)
- project.add_maintainer(user)
-
- pipeline
- end
-
- context 'when runner has matching tag' do
- before do
- runner.update!(tag_list: ['tag-1'])
- end
-
- it 'successfully picks job' do
- request_job
-
- build.reload
-
- expect(build).to be_running
- expect(build.runner).to eq(runner)
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response).to include(
- "id" => build.id,
- "variables" => include("key" => 'KEY', "value" => 'value', "public" => true, "masked" => false),
- "image" => a_hash_including("name" => 'ruby'),
- "services" => all(a_hash_including("name" => 'mysql')),
- "job_info" => a_hash_including("name" => 'terminal', "stage" => 'terminal'))
- end
- end
-
- context 'when runner does not have matching tags' do
- it 'does not pick a job' do
- request_job
-
- build.reload
-
- expect(build).to be_pending
- expect(response).to have_gitlab_http_status(:no_content)
- end
- end
-
- def request_job(token = runner.token, **params)
- post api('/jobs/request'), params: params.merge(token: token)
- end
- end
- end
-
- describe 'PUT /api/v4/jobs/:id' do
- let(:job) do
- create(:ci_build, :pending, :trace_live, pipeline: pipeline, project: project, user: user, runner_id: runner.id)
- end
-
- before do
- job.run!
- end
-
- it_behaves_like 'application context metadata', '/api/:version/jobs/:id' do
- let(:send_request) { update_job(state: 'success') }
- end
-
- it 'updates runner info' do
- expect { update_job(state: 'success') }.to change { runner.reload.contacted_at }
- end
-
- context 'when status is given' do
- it 'mark job as succeeded' do
- update_job(state: 'success')
-
- job.reload
- expect(job).to be_success
- end
-
- it 'mark job as failed' do
- update_job(state: 'failed')
-
- job.reload
- expect(job).to be_failed
- expect(job).to be_unknown_failure
- end
-
- context 'when failure_reason is script_failure' do
- before do
- update_job(state: 'failed', failure_reason: 'script_failure')
- job.reload
- end
-
- it { expect(job).to be_script_failure }
- end
-
- context 'when failure_reason is runner_system_failure' do
- before do
- update_job(state: 'failed', failure_reason: 'runner_system_failure')
- job.reload
- end
-
- it { expect(job).to be_runner_system_failure }
- end
-
- context 'when failure_reason is unrecognized value' do
- before do
- update_job(state: 'failed', failure_reason: 'what_is_this')
- job.reload
- end
-
- it { expect(job).to be_unknown_failure }
- end
-
- context 'when failure_reason is job_execution_timeout' do
- before do
- update_job(state: 'failed', failure_reason: 'job_execution_timeout')
- job.reload
- end
-
- it { expect(job).to be_job_execution_timeout }
- end
-
- context 'when failure_reason is unmet_prerequisites' do
- before do
- update_job(state: 'failed', failure_reason: 'unmet_prerequisites')
- job.reload
- end
-
- it { expect(job).to be_unmet_prerequisites }
- end
- end
-
- context 'when trace is given' do
- it 'creates a trace artifact' do
- allow(BuildFinishedWorker).to receive(:perform_async).with(job.id) do
- ArchiveTraceWorker.new.perform(job.id)
- end
-
- update_job(state: 'success', trace: 'BUILD TRACE UPDATED')
-
- job.reload
- expect(response).to have_gitlab_http_status(:ok)
- expect(job.trace.raw).to eq 'BUILD TRACE UPDATED'
- expect(job.job_artifacts_trace.open.read).to eq 'BUILD TRACE UPDATED'
- end
-
- context 'when concurrent update of trace is happening' do
- before do
- job.trace.write('wb') do
- update_job(state: 'success', trace: 'BUILD TRACE UPDATED')
- end
- end
-
- it 'returns that operation conflicts' do
- expect(response).to have_gitlab_http_status(:conflict)
- end
- end
- end
-
- context 'when no trace is given' do
- it 'does not override trace information' do
- update_job
-
- expect(job.reload.trace.raw).to eq 'BUILD TRACE'
- end
-
- context 'when running state is sent' do
- it 'updates update_at value' do
- expect { update_job_after_time }.to change { job.reload.updated_at }
- end
- end
-
- context 'when other state is sent' do
- it "doesn't update update_at value" do
- expect { update_job_after_time(20.minutes, state: 'success') }.not_to change { job.reload.updated_at }
- end
- end
- end
-
- context 'when job has been erased' do
- let(:job) { create(:ci_build, runner_id: runner.id, erased_at: Time.now) }
-
- it 'responds with forbidden' do
- update_job
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'when job has already been finished' do
- before do
- job.trace.set('Job failed')
- job.drop!(:script_failure)
- end
-
- it 'does not update job status and job trace' do
- update_job(state: 'success', trace: 'BUILD TRACE UPDATED')
-
- job.reload
- expect(response).to have_gitlab_http_status(:forbidden)
- expect(response.header['Job-Status']).to eq 'failed'
- expect(job.trace.raw).to eq 'Job failed'
- expect(job).to be_failed
- end
- end
-
- def update_job(token = job.token, **params)
- new_params = params.merge(token: token)
- put api("/jobs/#{job.id}"), params: new_params
- end
-
- def update_job_after_time(update_interval = 20.minutes, state = 'running')
- Timecop.travel(job.updated_at + update_interval) do
- update_job(job.token, state: state)
- end
- end
- end
-
- describe 'PATCH /api/v4/jobs/:id/trace' do
- let(:job) do
- create(:ci_build, :running, :trace_live,
- project: project, user: user, runner_id: runner.id, pipeline: pipeline)
- end
- let(:headers) { { API::Helpers::Runner::JOB_TOKEN_HEADER => job.token, 'Content-Type' => 'text/plain' } }
- let(:headers_with_range) { headers.merge({ 'Content-Range' => '11-20' }) }
- let(:update_interval) { 10.seconds.to_i }
-
- before do
- initial_patch_the_trace
- end
-
- it_behaves_like 'application context metadata', '/api/:version/jobs/:id/trace' do
- let(:send_request) { patch_the_trace }
- end
-
- it 'updates runner info' do
- runner.update!(contacted_at: 1.year.ago)
-
- expect { patch_the_trace }.to change { runner.reload.contacted_at }
- end
-
- context 'when request is valid' do
- it 'gets correct response' do
- expect(response).to have_gitlab_http_status(:accepted)
- expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
- expect(response.header).to have_key 'Range'
- expect(response.header).to have_key 'Job-Status'
- expect(response.header).to have_key 'X-GitLab-Trace-Update-Interval'
- end
-
- context 'when job has been updated recently' do
- it { expect { patch_the_trace }.not_to change { job.updated_at }}
-
- it "changes the job's trace" do
- patch_the_trace
-
- expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
- end
-
- context 'when Runner makes a force-patch' do
- it { expect { force_patch_the_trace }.not_to change { job.updated_at }}
-
- it "doesn't change the build.trace" do
- force_patch_the_trace
-
- expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
- end
- end
- end
-
- context 'when job was not updated recently' do
- let(:update_interval) { 15.minutes.to_i }
-
- it { expect { patch_the_trace }.to change { job.updated_at } }
-
- it 'changes the job.trace' do
- patch_the_trace
-
- expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
- end
-
- context 'when Runner makes a force-patch' do
- it { expect { force_patch_the_trace }.to change { job.updated_at } }
-
- it "doesn't change the job.trace" do
- force_patch_the_trace
-
- expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
- end
- end
- end
-
- context 'when project for the build has been deleted' do
- let(:job) do
- create(:ci_build, :running, :trace_live, runner_id: runner.id, pipeline: pipeline) do |job|
- job.project.update(pending_delete: true)
- end
- end
-
- it 'responds with forbidden' do
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'when trace is patched' do
- before do
- patch_the_trace
- end
-
- it 'has valid trace' do
- expect(response).to have_gitlab_http_status(:accepted)
- expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
- end
-
- context 'when job is cancelled' do
- before do
- job.cancel
- end
-
- context 'when trace is patched' do
- before do
- patch_the_trace
- end
-
- it 'returns Forbidden ' do
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
- end
-
- context 'when redis data are flushed' do
- before do
- redis_shared_state_cleanup!
- end
-
- it 'has empty trace' do
- expect(job.reload.trace.raw).to eq ''
- end
-
- context 'when we perform partial patch' do
- before do
- patch_the_trace('hello', headers.merge({ 'Content-Range' => "28-32/5" }))
- end
-
- it 'returns an error' do
- expect(response).to have_gitlab_http_status(:range_not_satisfiable)
- expect(response.header['Range']).to eq('0-0')
- end
- end
-
- context 'when we resend full trace' do
- before do
- patch_the_trace('BUILD TRACE appended appended hello', headers.merge({ 'Content-Range' => "0-34/35" }))
- end
-
- it 'succeeds with updating trace' do
- expect(response).to have_gitlab_http_status(:accepted)
- expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended hello'
- end
- end
- end
- end
-
- context 'when concurrent update of trace is happening' do
- before do
- job.trace.write('wb') do
- patch_the_trace
- end
- end
-
- it 'returns that operation conflicts' do
- expect(response).to have_gitlab_http_status(:conflict)
- end
- end
-
- context 'when the job is canceled' do
- before do
- job.cancel
- patch_the_trace
- end
-
- it 'receives status in header' do
- expect(response.header['Job-Status']).to eq 'canceled'
- end
- end
-
- context 'when build trace is being watched' do
- before do
- job.trace.being_watched!
- end
-
- it 'returns X-GitLab-Trace-Update-Interval as 3' do
- patch_the_trace
-
- expect(response).to have_gitlab_http_status(:accepted)
- expect(response.header['X-GitLab-Trace-Update-Interval']).to eq('3')
- end
- end
-
- context 'when build trace is not being watched' do
- it 'returns X-GitLab-Trace-Update-Interval as 30' do
- patch_the_trace
-
- expect(response).to have_gitlab_http_status(:accepted)
- expect(response.header['X-GitLab-Trace-Update-Interval']).to eq('30')
- end
- end
- end
-
- context 'when Runner makes a force-patch' do
- before do
- force_patch_the_trace
- end
-
- it 'gets correct response' do
- expect(response).to have_gitlab_http_status(:accepted)
- expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
- expect(response.header).to have_key 'Range'
- expect(response.header).to have_key 'Job-Status'
- end
- end
-
- context 'when content-range start is too big' do
- let(:headers_with_range) { headers.merge({ 'Content-Range' => '15-20/6' }) }
-
- it 'gets 416 error response with range headers' do
- expect(response).to have_gitlab_http_status(:range_not_satisfiable)
- expect(response.header).to have_key 'Range'
- expect(response.header['Range']).to eq '0-11'
- end
- end
-
- context 'when content-range start is too small' do
- let(:headers_with_range) { headers.merge({ 'Content-Range' => '8-20/13' }) }
-
- it 'gets 416 error response with range headers' do
- expect(response).to have_gitlab_http_status(:range_not_satisfiable)
- expect(response.header).to have_key 'Range'
- expect(response.header['Range']).to eq '0-11'
- end
- end
-
- context 'when Content-Range header is missing' do
- let(:headers_with_range) { headers }
-
- it { expect(response).to have_gitlab_http_status(:bad_request) }
- end
-
- context 'when job has been errased' do
- let(:job) { create(:ci_build, runner_id: runner.id, erased_at: Time.now) }
-
- it { expect(response).to have_gitlab_http_status(:forbidden) }
- end
-
- def patch_the_trace(content = ' appended', request_headers = nil)
- unless request_headers
- job.trace.read do |stream|
- offset = stream.size
- limit = offset + content.length - 1
- request_headers = headers.merge({ 'Content-Range' => "#{offset}-#{limit}" })
- end
- end
-
- Timecop.travel(job.updated_at + update_interval) do
- patch api("/jobs/#{job.id}/trace"), params: content, headers: request_headers
- job.reload
- end
- end
-
- def initial_patch_the_trace
- patch_the_trace(' appended', headers_with_range)
- end
-
- def force_patch_the_trace
- 2.times { patch_the_trace('') }
- end
- end
-
- describe 'artifacts' do
- let(:job) { create(:ci_build, :pending, user: user, project: project, pipeline: pipeline, runner_id: runner.id) }
- let(:jwt) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
- let(:headers) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => jwt } }
- let(:headers_with_token) { headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.token) }
- let(:file_upload) { fixture_file_upload('spec/fixtures/banana_sample.gif', 'image/gif') }
- let(:file_upload2) { fixture_file_upload('spec/fixtures/dk.png', 'image/gif') }
-
- before do
- stub_artifacts_object_storage
- job.run!
- end
-
- shared_examples_for 'rejecting artifacts that are too large' do
- let(:filesize) { 100.megabytes.to_i }
- let(:sample_max_size) { (filesize / 1.megabyte) - 10 } # Set max size to be smaller than file size to trigger error
-
- shared_examples_for 'failed request' do
- it 'responds with payload too large error' do
- send_request
-
- expect(response).to have_gitlab_http_status(:payload_too_large)
- end
- end
-
- context 'based on plan limit setting' do
- let(:application_max_size) { sample_max_size + 100 }
- let(:limit_name) { "#{Ci::JobArtifact::PLAN_LIMIT_PREFIX}archive" }
-
- before do
- create(:plan_limits, :default_plan, limit_name => sample_max_size)
- stub_application_setting(max_artifacts_size: application_max_size)
- end
-
- context 'and feature flag ci_max_artifact_size_per_type is enabled' do
- before do
- stub_feature_flags(ci_max_artifact_size_per_type: true)
- end
-
- it_behaves_like 'failed request'
- end
-
- context 'and feature flag ci_max_artifact_size_per_type is disabled' do
- before do
- stub_feature_flags(ci_max_artifact_size_per_type: false)
- end
-
- it 'bases of project closest setting' do
- send_request
-
- expect(response).to have_gitlab_http_status(success_code)
- end
- end
- end
-
- context 'based on application setting' do
- before do
- stub_application_setting(max_artifacts_size: sample_max_size)
- end
-
- it_behaves_like 'failed request'
- end
-
- context 'based on root namespace setting' do
- let(:application_max_size) { sample_max_size + 10 }
-
- before do
- stub_application_setting(max_artifacts_size: application_max_size)
- root_namespace.update!(max_artifacts_size: sample_max_size)
- end
-
- it_behaves_like 'failed request'
- end
-
- context 'based on child namespace setting' do
- let(:application_max_size) { sample_max_size + 10 }
- let(:root_namespace_max_size) { sample_max_size + 10 }
-
- before do
- stub_application_setting(max_artifacts_size: application_max_size)
- root_namespace.update!(max_artifacts_size: root_namespace_max_size)
- namespace.update!(max_artifacts_size: sample_max_size)
- end
-
- it_behaves_like 'failed request'
- end
-
- context 'based on project setting' do
- let(:application_max_size) { sample_max_size + 10 }
- let(:root_namespace_max_size) { sample_max_size + 10 }
- let(:child_namespace_max_size) { sample_max_size + 10 }
-
- before do
- stub_application_setting(max_artifacts_size: application_max_size)
- root_namespace.update!(max_artifacts_size: root_namespace_max_size)
- namespace.update!(max_artifacts_size: child_namespace_max_size)
- project.update!(max_artifacts_size: sample_max_size)
- end
-
- it_behaves_like 'failed request'
- end
- end
-
- describe 'POST /api/v4/jobs/:id/artifacts/authorize' do
- context 'when using token as parameter' do
- context 'and the artifact is too large' do
- it_behaves_like 'rejecting artifacts that are too large' do
- let(:success_code) { :ok }
- let(:send_request) { authorize_artifacts_with_token_in_params(filesize: filesize) }
- end
- end
-
- context 'posting artifacts to running job' do
- subject do
- authorize_artifacts_with_token_in_params
- end
-
- it_behaves_like 'application context metadata', '/api/:version/jobs/:id/artifacts/authorize' do
- let(:send_request) { subject }
- end
-
- it 'updates runner info' do
- expect { subject }.to change { runner.reload.contacted_at }
- end
-
- shared_examples 'authorizes local file' do
- it 'succeeds' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
- expect(json_response['TempPath']).to eq(JobArtifactUploader.workhorse_local_upload_path)
- expect(json_response['RemoteObject']).to be_nil
- end
- end
-
- context 'when using local storage' do
- it_behaves_like 'authorizes local file'
- end
-
- context 'when using remote storage' do
- context 'when direct upload is enabled' do
- before do
- stub_artifacts_object_storage(enabled: true, direct_upload: true)
- end
-
- it 'succeeds' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
- expect(json_response).not_to have_key('TempPath')
- expect(json_response['RemoteObject']).to have_key('ID')
- expect(json_response['RemoteObject']).to have_key('GetURL')
- expect(json_response['RemoteObject']).to have_key('StoreURL')
- expect(json_response['RemoteObject']).to have_key('DeleteURL')
- expect(json_response['RemoteObject']).to have_key('MultipartUpload')
- end
- end
-
- context 'when direct upload is disabled' do
- before do
- stub_artifacts_object_storage(enabled: true, direct_upload: false)
- end
-
- it_behaves_like 'authorizes local file'
- end
- end
- end
- end
-
- context 'when using token as header' do
- it 'authorizes posting artifacts to running job' do
- authorize_artifacts_with_token_in_headers
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
- expect(json_response['TempPath']).not_to be_nil
- end
-
- it 'fails to post too large artifact' do
- stub_application_setting(max_artifacts_size: 0)
-
- authorize_artifacts_with_token_in_headers(filesize: 100)
-
- expect(response).to have_gitlab_http_status(:payload_too_large)
- end
- end
-
- context 'when using runners token' do
- it 'fails to authorize artifacts posting' do
- authorize_artifacts(token: job.project.runners_token)
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- it 'reject requests that did not go through gitlab-workhorse' do
- headers.delete(Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER)
-
- authorize_artifacts
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
-
- context 'authorization token is invalid' do
- it 'responds with forbidden' do
- authorize_artifacts(token: 'invalid', filesize: 100 )
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'authorize uploading of an lsif artifact' do
- before do
- stub_feature_flags(code_navigation: job.project)
- end
-
- it 'adds ProcessLsif header' do
- authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['ProcessLsif']).to be_truthy
- end
-
- it 'adds ProcessLsifReferences header' do
- authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['ProcessLsifReferences']).to be_truthy
- end
-
- context 'code_navigation feature flag is disabled' do
- it 'responds with a forbidden error' do
- stub_feature_flags(code_navigation: false)
- authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
-
- aggregate_failures do
- expect(response).to have_gitlab_http_status(:forbidden)
- expect(json_response['ProcessLsif']).to be_falsy
- expect(json_response['ProcessLsifReferences']).to be_falsy
- end
- end
- end
-
- context 'code_navigation_references feature flag is disabled' do
- it 'sets ProcessLsifReferences header to false' do
- stub_feature_flags(code_navigation_references: false)
- authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
-
- aggregate_failures do
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['ProcessLsif']).to be_truthy
- expect(json_response['ProcessLsifReferences']).to be_falsy
- end
- end
- end
- end
-
- def authorize_artifacts(params = {}, request_headers = headers)
- post api("/jobs/#{job.id}/artifacts/authorize"), params: params, headers: request_headers
- end
-
- def authorize_artifacts_with_token_in_params(params = {}, request_headers = headers)
- params = params.merge(token: job.token)
- authorize_artifacts(params, request_headers)
- end
-
- def authorize_artifacts_with_token_in_headers(params = {}, request_headers = headers_with_token)
- authorize_artifacts(params, request_headers)
- end
- end
-
- describe 'POST /api/v4/jobs/:id/artifacts' do
- it_behaves_like 'application context metadata', '/api/:version/jobs/:id/artifacts' do
- let(:send_request) do
- upload_artifacts(file_upload, headers_with_token)
- end
- end
-
- it 'updates runner info' do
- expect { upload_artifacts(file_upload, headers_with_token) }.to change { runner.reload.contacted_at }
- end
-
- context 'when the artifact is too large' do
- it_behaves_like 'rejecting artifacts that are too large' do
- # This filesize validation also happens in non remote stored files,
- # it's just that it's hard to stub the filesize in other cases to be
- # more than a megabyte.
- let!(:fog_connection) do
- stub_artifacts_object_storage(direct_upload: true)
- end
- let(:object) do
- fog_connection.directories.new(key: 'artifacts').files.create(
- key: 'tmp/uploads/12312300',
- body: 'content'
- )
- end
- let(:file_upload) { fog_to_uploaded_file(object) }
- let(:send_request) do
- upload_artifacts(file_upload, headers_with_token, 'file.remote_id' => '12312300')
- end
- let(:success_code) { :created }
-
- before do
- allow(object).to receive(:content_length).and_return(filesize)
- end
- end
- end
-
- context 'when artifacts are being stored inside of tmp path' do
- before do
- # by configuring this path we allow to pass temp file from any path
- allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return('/')
- end
-
- context 'when job has been erased' do
- let(:job) { create(:ci_build, erased_at: Time.now) }
-
- before do
- upload_artifacts(file_upload, headers_with_token)
- end
-
- it 'responds with forbidden' do
- upload_artifacts(file_upload, headers_with_token)
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'when job is running' do
- shared_examples 'successful artifacts upload' do
- it 'updates successfully' do
- expect(response).to have_gitlab_http_status(:created)
- end
- end
-
- context 'when uses accelerated file post' do
- context 'for file stored locally' do
- before do
- upload_artifacts(file_upload, headers_with_token)
- end
-
- it_behaves_like 'successful artifacts upload'
- end
-
- context 'for file stored remotely' do
- let!(:fog_connection) do
- stub_artifacts_object_storage(direct_upload: true)
- end
- let(:object) do
- fog_connection.directories.new(key: 'artifacts').files.create(
- key: 'tmp/uploads/12312300',
- body: 'content'
- )
- end
- let(:file_upload) { fog_to_uploaded_file(object) }
-
- before do
- upload_artifacts(file_upload, headers_with_token, 'file.remote_id' => remote_id)
- end
-
- context 'when valid remote_id is used' do
- let(:remote_id) { '12312300' }
-
- it_behaves_like 'successful artifacts upload'
- end
-
- context 'when invalid remote_id is used' do
- let(:remote_id) { 'invalid id' }
-
- it 'responds with bad request' do
- expect(response).to have_gitlab_http_status(:internal_server_error)
- expect(json_response['message']).to eq("Missing file")
- end
- end
- end
- end
-
- context 'when using runners token' do
- it 'responds with forbidden' do
- upload_artifacts(file_upload, headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.project.runners_token))
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
- end
-
- context 'when artifacts post request does not contain file' do
- it 'fails to post artifacts without file' do
- post api("/jobs/#{job.id}/artifacts"), params: {}, headers: headers_with_token
-
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
-
- context 'GitLab Workhorse is not configured' do
- it 'fails to post artifacts without GitLab-Workhorse' do
- post api("/jobs/#{job.id}/artifacts"), params: { token: job.token }, headers: {}
-
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
-
- context 'Is missing GitLab Workhorse token headers' do
- let(:jwt) { JWT.encode({ 'iss' => 'invalid-header' }, Gitlab::Workhorse.secret, 'HS256') }
-
- it 'fails to post artifacts without GitLab-Workhorse' do
- expect(Gitlab::ErrorTracking).to receive(:track_exception).once
-
- upload_artifacts(file_upload, headers_with_token)
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'when setting an expire date' do
- let(:default_artifacts_expire_in) {}
- let(:post_data) do
- { file: file_upload,
- expire_in: expire_in }
- end
-
- before do
- stub_application_setting(default_artifacts_expire_in: default_artifacts_expire_in)
-
- upload_artifacts(file_upload, headers_with_token, post_data)
- end
-
- context 'when an expire_in is given' do
- let(:expire_in) { '7 days' }
-
- it 'updates when specified' do
- expect(response).to have_gitlab_http_status(:created)
- expect(job.reload.artifacts_expire_at).to be_within(5.minutes).of(7.days.from_now)
- end
- end
-
- context 'when no expire_in is given' do
- let(:expire_in) { nil }
-
- it 'ignores if not specified' do
- expect(response).to have_gitlab_http_status(:created)
- expect(job.reload.artifacts_expire_at).to be_nil
- end
-
- context 'with application default' do
- context 'when default is 5 days' do
- let(:default_artifacts_expire_in) { '5 days' }
-
- it 'sets to application default' do
- expect(response).to have_gitlab_http_status(:created)
- expect(job.reload.artifacts_expire_at).to be_within(5.minutes).of(5.days.from_now)
- end
- end
-
- context 'when default is 0' do
- let(:default_artifacts_expire_in) { '0' }
-
- it 'does not set expire_in' do
- expect(response).to have_gitlab_http_status(:created)
- expect(job.reload.artifacts_expire_at).to be_nil
- end
- end
- end
- end
- end
-
- context 'posts artifacts file and metadata file' do
- let!(:artifacts) { file_upload }
- let!(:artifacts_sha256) { Digest::SHA256.file(artifacts.path).hexdigest }
- let!(:metadata) { file_upload2 }
- let!(:metadata_sha256) { Digest::SHA256.file(metadata.path).hexdigest }
-
- let(:stored_artifacts_file) { job.reload.artifacts_file }
- let(:stored_metadata_file) { job.reload.artifacts_metadata }
- let(:stored_artifacts_size) { job.reload.artifacts_size }
- let(:stored_artifacts_sha256) { job.reload.job_artifacts_archive.file_sha256 }
- let(:stored_metadata_sha256) { job.reload.job_artifacts_metadata.file_sha256 }
- let(:file_keys) { post_data.keys }
- let(:send_rewritten_field) { true }
-
- before do
- workhorse_finalize_with_multiple_files(
- api("/jobs/#{job.id}/artifacts"),
- method: :post,
- file_keys: file_keys,
- params: post_data,
- headers: headers_with_token,
- send_rewritten_field: send_rewritten_field
- )
- end
-
- context 'when posts data accelerated by workhorse is correct' do
- let(:post_data) { { file: artifacts, metadata: metadata } }
-
- it 'stores artifacts and artifacts metadata' do
- expect(response).to have_gitlab_http_status(:created)
- expect(stored_artifacts_file.filename).to eq(artifacts.original_filename)
- expect(stored_metadata_file.filename).to eq(metadata.original_filename)
- expect(stored_artifacts_size).to eq(artifacts.size)
- expect(stored_artifacts_sha256).to eq(artifacts_sha256)
- expect(stored_metadata_sha256).to eq(metadata_sha256)
- end
- end
-
- context 'with a malicious file.path param' do
- let(:post_data) { {} }
- let(:tmp_file) { Tempfile.new('crafted.file.path') }
- let(:url) { "/jobs/#{job.id}/artifacts?file.path=#{tmp_file.path}" }
-
- it 'rejects the request' do
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(stored_artifacts_size).to be_nil
- end
- end
-
- context 'when workhorse header is missing' do
- let(:post_data) { { file: artifacts, metadata: metadata } }
- let(:send_rewritten_field) { false }
-
- it 'rejects the request' do
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(stored_artifacts_size).to be_nil
- end
- end
-
- context 'when there is no artifacts file in post data' do
- let(:post_data) do
- { metadata: metadata }
- end
-
- it 'is expected to respond with bad request' do
- expect(response).to have_gitlab_http_status(:bad_request)
- end
-
- it 'does not store metadata' do
- expect(stored_metadata_file).to be_nil
- end
- end
- end
-
- context 'when artifact_type is archive' do
- context 'when artifact_format is zip' do
- let(:params) { { artifact_type: :archive, artifact_format: :zip } }
-
- it 'stores junit test report' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:created)
- expect(job.reload.job_artifacts_archive).not_to be_nil
- end
- end
-
- context 'when artifact_format is gzip' do
- let(:params) { { artifact_type: :archive, artifact_format: :gzip } }
-
- it 'returns an error' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(job.reload.job_artifacts_archive).to be_nil
- end
- end
- end
-
- context 'when artifact_type is junit' do
- context 'when artifact_format is gzip' do
- let(:file_upload) { fixture_file_upload('spec/fixtures/junit/junit.xml.gz') }
- let(:params) { { artifact_type: :junit, artifact_format: :gzip } }
-
- it 'stores junit test report' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:created)
- expect(job.reload.job_artifacts_junit).not_to be_nil
- end
- end
-
- context 'when artifact_format is raw' do
- let(:file_upload) { fixture_file_upload('spec/fixtures/junit/junit.xml.gz') }
- let(:params) { { artifact_type: :junit, artifact_format: :raw } }
-
- it 'returns an error' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(job.reload.job_artifacts_junit).to be_nil
- end
- end
- end
-
- context 'when artifact_type is metrics_referee' do
- context 'when artifact_format is gzip' do
- let(:file_upload) { fixture_file_upload('spec/fixtures/referees/metrics_referee.json.gz') }
- let(:params) { { artifact_type: :metrics_referee, artifact_format: :gzip } }
-
- it 'stores metrics_referee data' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:created)
- expect(job.reload.job_artifacts_metrics_referee).not_to be_nil
- end
- end
-
- context 'when artifact_format is raw' do
- let(:file_upload) { fixture_file_upload('spec/fixtures/referees/metrics_referee.json.gz') }
- let(:params) { { artifact_type: :metrics_referee, artifact_format: :raw } }
-
- it 'returns an error' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(job.reload.job_artifacts_metrics_referee).to be_nil
- end
- end
- end
-
- context 'when artifact_type is network_referee' do
- context 'when artifact_format is gzip' do
- let(:file_upload) { fixture_file_upload('spec/fixtures/referees/network_referee.json.gz') }
- let(:params) { { artifact_type: :network_referee, artifact_format: :gzip } }
-
- it 'stores network_referee data' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:created)
- expect(job.reload.job_artifacts_network_referee).not_to be_nil
- end
- end
-
- context 'when artifact_format is raw' do
- let(:file_upload) { fixture_file_upload('spec/fixtures/referees/network_referee.json.gz') }
- let(:params) { { artifact_type: :network_referee, artifact_format: :raw } }
-
- it 'returns an error' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(job.reload.job_artifacts_network_referee).to be_nil
- end
- end
- end
-
- context 'when artifact_type is dotenv' do
- context 'when artifact_format is gzip' do
- let(:file_upload) { fixture_file_upload('spec/fixtures/build.env.gz') }
- let(:params) { { artifact_type: :dotenv, artifact_format: :gzip } }
-
- it 'stores dotenv file' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:created)
- expect(job.reload.job_artifacts_dotenv).not_to be_nil
- end
-
- it 'parses dotenv file' do
- expect do
- upload_artifacts(file_upload, headers_with_token, params)
- end.to change { job.job_variables.count }.from(0).to(2)
- end
-
- context 'when parse error happens' do
- let(:file_upload) { fixture_file_upload('spec/fixtures/ci_build_artifacts_metadata.gz') }
-
- it 'returns an error' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']).to eq('Invalid Format')
- end
- end
- end
-
- context 'when artifact_format is raw' do
- let(:file_upload) { fixture_file_upload('spec/fixtures/build.env.gz') }
- let(:params) { { artifact_type: :dotenv, artifact_format: :raw } }
-
- it 'returns an error' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(job.reload.job_artifacts_dotenv).to be_nil
- end
- end
- end
- end
-
- context 'when artifacts already exist for the job' do
- let(:params) do
- {
- artifact_type: :archive,
- artifact_format: :zip,
- 'file.sha256' => uploaded_sha256
- }
- end
-
- let(:existing_sha256) { '0' * 64 }
-
- let!(:existing_artifact) do
- create(:ci_job_artifact, :archive, file_sha256: existing_sha256, job: job)
- end
-
- context 'when sha256 is the same of the existing artifact' do
- let(:uploaded_sha256) { existing_sha256 }
-
- it 'ignores the new artifact' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:created)
- expect(job.reload.job_artifacts_archive).to eq(existing_artifact)
- end
- end
-
- context 'when sha256 is different than the existing artifact' do
- let(:uploaded_sha256) { '1' * 64 }
-
- it 'logs and returns an error' do
- expect(Gitlab::ErrorTracking).to receive(:track_exception)
-
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(job.reload.job_artifacts_archive).to eq(existing_artifact)
- end
- end
- end
-
- context 'when object storage throws errors' do
- let(:params) { { artifact_type: :archive, artifact_format: :zip } }
-
- it 'does not store artifacts' do
- allow_next_instance_of(JobArtifactUploader) do |uploader|
- allow(uploader).to receive(:store!).and_raise(Errno::EIO)
- end
-
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:service_unavailable)
- expect(job.reload.job_artifacts_archive).to be_nil
- end
- end
-
- context 'when artifacts are being stored outside of tmp path' do
- let(:new_tmpdir) { Dir.mktmpdir }
-
- before do
- # init before overwriting tmp dir
- file_upload
-
- # by configuring this path we allow to pass file from @tmpdir only
- # but all temporary files are stored in system tmp directory
- allow(Dir).to receive(:tmpdir).and_return(new_tmpdir)
- end
-
- after do
- FileUtils.remove_entry(new_tmpdir)
- end
-
- it 'fails to post artifacts for outside of tmp path' do
- upload_artifacts(file_upload, headers_with_token)
-
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
-
- def upload_artifacts(file, headers = {}, params = {})
- workhorse_finalize(
- api("/jobs/#{job.id}/artifacts"),
- method: :post,
- file_key: :file,
- params: params.merge(file: file),
- headers: headers,
- send_rewritten_field: true
- )
- end
- end
-
- describe 'GET /api/v4/jobs/:id/artifacts' do
- let(:token) { job.token }
-
- it_behaves_like 'application context metadata', '/api/:version/jobs/:id/artifacts' do
- let(:send_request) { download_artifact }
- end
-
- it 'updates runner info' do
- expect { download_artifact }.to change { runner.reload.contacted_at }
- end
-
- context 'when job has artifacts' do
- let(:job) { create(:ci_build) }
- let(:store) { JobArtifactUploader::Store::LOCAL }
-
- before do
- create(:ci_job_artifact, :archive, file_store: store, job: job)
- end
-
- context 'when using job token' do
- context 'when artifacts are stored locally' do
- let(:download_headers) do
- { 'Content-Transfer-Encoding' => 'binary',
- 'Content-Disposition' => %q(attachment; filename="ci_build_artifacts.zip"; filename*=UTF-8''ci_build_artifacts.zip) }
- end
-
- before do
- download_artifact
- end
-
- it 'download artifacts' do
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.headers.to_h).to include download_headers
- end
- end
-
- context 'when artifacts are stored remotely' do
- let(:store) { JobArtifactUploader::Store::REMOTE }
- let!(:job) { create(:ci_build) }
-
- context 'when proxy download is being used' do
- before do
- download_artifact(direct_download: false)
- end
-
- it 'uses workhorse send-url' do
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.headers.to_h).to include(
- 'Gitlab-Workhorse-Send-Data' => /send-url:/)
- end
- end
-
- context 'when direct download is being used' do
- before do
- download_artifact(direct_download: true)
- end
-
- it 'receive redirect for downloading artifacts' do
- expect(response).to have_gitlab_http_status(:found)
- expect(response.headers).to include('Location')
- end
- end
- end
- end
-
- context 'when using runnners token' do
- let(:token) { job.project.runners_token }
-
- before do
- download_artifact
- end
-
- it 'responds with forbidden' do
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
- end
-
- context 'when job does not have artifacts' do
- it 'responds with not found' do
- download_artifact
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- def download_artifact(params = {}, request_headers = headers)
- params = params.merge(token: token)
- job.reload
-
- get api("/jobs/#{job.id}/artifacts"), params: params, headers: request_headers
- end
- end
- end
- end
-end
diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb
index 724e3177173..21ff0a94db9 100644
--- a/spec/requests/api/commits_spec.rb
+++ b/spec/requests/api/commits_spec.rb
@@ -329,6 +329,7 @@ RSpec.describe API::Commits do
]
}
end
+
let(:valid_c_params) do
{
branch: 'master',
@@ -342,6 +343,7 @@ RSpec.describe API::Commits do
]
}
end
+
let(:valid_utf8_c_params) do
{
branch: 'master',
@@ -621,6 +623,7 @@ RSpec.describe API::Commits do
]
}
end
+
let(:valid_d_params) do
{
branch: 'markdown',
@@ -664,6 +667,7 @@ RSpec.describe API::Commits do
]
}
end
+
let(:valid_m_params) do
{
branch: 'feature',
@@ -708,6 +712,7 @@ RSpec.describe API::Commits do
]
}
end
+
let(:valid_u_params) do
{
branch: 'master',
@@ -819,6 +824,7 @@ RSpec.describe API::Commits do
]
}
end
+
let(:valid_mo_params) do
{
branch: 'master',
@@ -1462,6 +1468,16 @@ RSpec.describe API::Commits do
expect(json_response['author_name']).to eq(commit.author_name)
expect(json_response['committer_name']).to eq(user.name)
end
+
+ it 'supports dry-run without applying changes' do
+ head = project.commit(branch)
+
+ post api(route, current_user), params: { branch: branch, dry_run: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq("dry_run" => "success")
+ expect(project.commit(branch)).to eq(head)
+ end
end
context 'when repository is disabled' do
@@ -1533,6 +1549,14 @@ RSpec.describe API::Commits do
expect(json_response['error_code']).to eq 'empty'
end
+
+ it 'includes an additional dry_run error field when enabled' do
+ post api(route, current_user), params: { branch: 'markdown', dry_run: true }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error_code']).to eq 'empty'
+ expect(json_response['dry_run']).to eq 'error'
+ end
end
context 'when ref contains a dot' do
@@ -1623,6 +1647,16 @@ RSpec.describe API::Commits do
expect(json_response['committer_name']).to eq(user.name)
expect(json_response['parent_ids']).to contain_exactly(commit_id)
end
+
+ it 'supports dry-run without applying changes' do
+ head = project.commit(branch)
+
+ post api(route, current_user), params: { branch: branch, dry_run: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq("dry_run" => "success")
+ expect(project.commit(branch)).to eq(head)
+ end
end
context 'when repository is disabled' do
@@ -1704,6 +1738,18 @@ RSpec.describe API::Commits do
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error_code']).to eq 'empty'
end
+
+ it 'includes an additional dry_run error field when enabled' do
+ # First one actually reverts
+ post api(route, current_user), params: { branch: 'markdown' }
+
+ # Second one is redundant and should be empty
+ post api(route, current_user), params: { branch: 'markdown', dry_run: true }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error_code']).to eq 'empty'
+ expect(json_response['dry_run']).to eq 'error'
+ end
end
end
diff --git a/spec/requests/api/composer_packages_spec.rb b/spec/requests/api/composer_packages_spec.rb
index d756a7700f6..f5b8ebb545b 100644
--- a/spec/requests/api/composer_packages_spec.rb
+++ b/spec/requests/api/composer_packages_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe API::ComposerPackages do
- include PackagesManagerApiSpecHelpers
+ include HttpBasicAuthHelpers
let_it_be(:user) { create(:user) }
let_it_be(:group, reload: true) { create(:group, :public) }
@@ -11,47 +11,88 @@ RSpec.describe API::ComposerPackages do
let_it_be(:project, reload: true) { create(:project, :custom_repo, files: { 'composer.json' => { name: package_name }.to_json }, group: group) }
let(:headers) { {} }
+ using RSpec::Parameterized::TableSyntax
+
describe 'GET /api/v4/group/:id/-/packages/composer/packages' do
let(:url) { "/group/#{group.id}/-/packages/composer/packages.json" }
subject { get api(url), headers: headers }
- context 'without the need for a license' do
- context 'with valid project' do
- let!(:package) { create(:composer_package, :with_metadatum, project: project) }
+ context 'with valid project' do
+ let!(:package) { create(:composer_package, :with_metadatum, project: project) }
- using RSpec::Parameterized::TableSyntax
+ context 'with a public group' do
+ before do
+ group.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ end
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'Composer package index' | :success
- 'PUBLIC' | :guest | true | true | 'Composer package index' | :success
- 'PUBLIC' | :developer | true | false | 'Composer package index' | :success
- 'PUBLIC' | :guest | true | false | 'Composer package index' | :success
- 'PUBLIC' | :developer | false | true | 'Composer package index' | :success
- 'PUBLIC' | :guest | false | true | 'Composer package index' | :success
- 'PUBLIC' | :developer | false | false | 'Composer package index' | :success
- 'PUBLIC' | :guest | false | false | 'Composer package index' | :success
- 'PUBLIC' | :anonymous | false | true | 'Composer package index' | :success
- 'PRIVATE' | :developer | true | true | 'Composer package index' | :success
- 'PRIVATE' | :guest | true | true | 'Composer package index' | :success
- 'PRIVATE' | :developer | true | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | true | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :developer | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :developer | false | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | false | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :anonymous | false | true | 'process Composer api request' | :not_found
+ where(:project_visibility_level, :user_role, :member, :user_token, :include_package) do
+ 'PUBLIC' | :developer | true | true | :include_package
+ 'PUBLIC' | :developer | true | false | :include_package
+ 'PUBLIC' | :developer | false | false | :include_package
+ 'PUBLIC' | :developer | false | true | :include_package
+ 'PUBLIC' | :guest | true | true | :include_package
+ 'PUBLIC' | :guest | true | false | :include_package
+ 'PUBLIC' | :guest | false | true | :include_package
+ 'PUBLIC' | :guest | false | false | :include_package
+ 'PUBLIC' | :anonymous | false | true | :include_package
+ 'PRIVATE' | :developer | true | true | :include_package
+ 'PRIVATE' | :developer | true | false | :does_not_include_package
+ 'PRIVATE' | :developer | false | true | :does_not_include_package
+ 'PRIVATE' | :developer | false | false | :does_not_include_package
+ 'PRIVATE' | :guest | true | true | :does_not_include_package
+ 'PRIVATE' | :guest | true | false | :does_not_include_package
+ 'PRIVATE' | :guest | false | true | :does_not_include_package
+ 'PRIVATE' | :guest | false | false | :does_not_include_package
+ 'PRIVATE' | :anonymous | false | true | :does_not_include_package
end
with_them do
- include_context 'Composer api group access', params[:project_visibility_level], params[:user_role], params[:user_token] do
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ include_context 'Composer api project access', params[:project_visibility_level], params[:user_role], params[:user_token] do
+ it_behaves_like 'Composer package index', params[:user_role], :success, params[:member], params[:include_package]
end
end
end
- it_behaves_like 'rejects Composer access with unknown group id'
+ context 'with a private group' do
+ before do
+ group.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ end
+
+ context 'with access to the api' do
+ where(:project_visibility_level, :user_role, :member, :user_token, :include_package) do
+ 'PRIVATE' | :developer | true | true | :include_package
+ 'PRIVATE' | :guest | true | true | :does_not_include_package
+ end
+
+ with_them do
+ include_context 'Composer api project access', params[:project_visibility_level], params[:user_role], params[:user_token] do
+ it_behaves_like 'Composer package index', params[:user_role], :success, params[:member], params[:include_package]
+ end
+ end
+ end
+
+ context 'without access to the api' do
+ where(:project_visibility_level, :user_role, :member, :user_token) do
+ 'PRIVATE' | :developer | true | false
+ 'PRIVATE' | :developer | false | true
+ 'PRIVATE' | :developer | false | false
+ 'PRIVATE' | :guest | true | false
+ 'PRIVATE' | :guest | false | true
+ 'PRIVATE' | :guest | false | false
+ 'PRIVATE' | :anonymous | false | true
+ end
+
+ with_them do
+ include_context 'Composer api project access', params[:project_visibility_level], params[:user_role], params[:user_token] do
+ it_behaves_like 'process Composer api request', params[:user_role], :not_found, params[:member]
+ end
+ end
+ end
+ end
end
+
+ it_behaves_like 'rejects Composer access with unknown group id'
end
describe 'GET /api/v4/group/:id/-/packages/composer/p/:sha.json' do
@@ -61,40 +102,36 @@ RSpec.describe API::ComposerPackages do
subject { get api(url), headers: headers }
- context 'without the need for a license' do
- context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'Composer provider index' | :success
- 'PUBLIC' | :guest | true | true | 'Composer provider index' | :success
- 'PUBLIC' | :developer | true | false | 'Composer provider index' | :success
- 'PUBLIC' | :guest | true | false | 'Composer provider index' | :success
- 'PUBLIC' | :developer | false | true | 'Composer provider index' | :success
- 'PUBLIC' | :guest | false | true | 'Composer provider index' | :success
- 'PUBLIC' | :developer | false | false | 'Composer provider index' | :success
- 'PUBLIC' | :guest | false | false | 'Composer provider index' | :success
- 'PUBLIC' | :anonymous | false | true | 'Composer provider index' | :success
- 'PRIVATE' | :developer | true | true | 'Composer provider index' | :success
- 'PRIVATE' | :guest | true | true | 'Composer empty provider index' | :success
- 'PRIVATE' | :developer | true | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | true | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :developer | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :developer | false | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | false | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :anonymous | false | true | 'process Composer api request' | :not_found
- end
+ context 'with valid project' do
+ where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'Composer provider index' | :success
+ 'PUBLIC' | :developer | true | false | 'Composer provider index' | :success
+ 'PUBLIC' | :developer | false | true | 'Composer provider index' | :success
+ 'PUBLIC' | :developer | false | false | 'Composer provider index' | :success
+ 'PUBLIC' | :guest | true | true | 'Composer provider index' | :success
+ 'PUBLIC' | :guest | true | false | 'Composer provider index' | :success
+ 'PUBLIC' | :guest | false | true | 'Composer provider index' | :success
+ 'PUBLIC' | :guest | false | false | 'Composer provider index' | :success
+ 'PUBLIC' | :anonymous | false | true | 'Composer provider index' | :success
+ 'PRIVATE' | :developer | true | true | 'Composer provider index' | :success
+ 'PRIVATE' | :developer | true | false | 'process Composer api request' | :not_found
+ 'PRIVATE' | :developer | false | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | :developer | false | false | 'process Composer api request' | :not_found
+ 'PRIVATE' | :guest | true | true | 'Composer empty provider index' | :success
+ 'PRIVATE' | :guest | true | false | 'process Composer api request' | :not_found
+ 'PRIVATE' | :guest | false | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | :guest | false | false | 'process Composer api request' | :not_found
+ 'PRIVATE' | :anonymous | false | true | 'process Composer api request' | :not_found
+ end
- with_them do
- include_context 'Composer api group access', params[:project_visibility_level], params[:user_role], params[:user_token] do
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
- end
+ with_them do
+ include_context 'Composer api group access', params[:project_visibility_level], params[:user_role], params[:user_token] do
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
end
-
- it_behaves_like 'rejects Composer access with unknown group id'
end
+
+ it_behaves_like 'rejects Composer access with unknown group id'
end
describe 'GET /api/v4/group/:id/-/packages/composer/*package_name.json' do
@@ -103,48 +140,44 @@ RSpec.describe API::ComposerPackages do
subject { get api(url), headers: headers }
- context 'without the need for a license' do
- context 'with no packages' do
- include_context 'Composer user type', :developer, true do
- it_behaves_like 'returning response status', :not_found
- end
+ context 'with no packages' do
+ include_context 'Composer user type', :developer, true do
+ it_behaves_like 'returning response status', :not_found
end
+ end
- context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
- let!(:package) { create(:composer_package, :with_metadatum, name: package_name, project: project) }
-
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'Composer package api request' | :success
- 'PUBLIC' | :guest | true | true | 'Composer package api request' | :success
- 'PUBLIC' | :developer | true | false | 'Composer package api request' | :success
- 'PUBLIC' | :guest | true | false | 'Composer package api request' | :success
- 'PUBLIC' | :developer | false | true | 'Composer package api request' | :success
- 'PUBLIC' | :guest | false | true | 'Composer package api request' | :success
- 'PUBLIC' | :developer | false | false | 'Composer package api request' | :success
- 'PUBLIC' | :guest | false | false | 'Composer package api request' | :success
- 'PUBLIC' | :anonymous | false | true | 'Composer package api request' | :success
- 'PRIVATE' | :developer | true | true | 'Composer package api request' | :success
- 'PRIVATE' | :guest | true | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :developer | true | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | true | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :developer | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :developer | false | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | false | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :anonymous | false | true | 'process Composer api request' | :not_found
- end
+ context 'with valid project' do
+ let!(:package) { create(:composer_package, :with_metadatum, name: package_name, project: project) }
+
+ where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'Composer package api request' | :success
+ 'PUBLIC' | :developer | true | false | 'Composer package api request' | :success
+ 'PUBLIC' | :developer | false | true | 'Composer package api request' | :success
+ 'PUBLIC' | :developer | false | false | 'Composer package api request' | :success
+ 'PUBLIC' | :guest | true | true | 'Composer package api request' | :success
+ 'PUBLIC' | :guest | true | false | 'Composer package api request' | :success
+ 'PUBLIC' | :guest | false | true | 'Composer package api request' | :success
+ 'PUBLIC' | :guest | false | false | 'Composer package api request' | :success
+ 'PUBLIC' | :anonymous | false | true | 'Composer package api request' | :success
+ 'PRIVATE' | :developer | true | true | 'Composer package api request' | :success
+ 'PRIVATE' | :developer | true | false | 'process Composer api request' | :not_found
+ 'PRIVATE' | :developer | false | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | :developer | false | false | 'process Composer api request' | :not_found
+ 'PRIVATE' | :guest | true | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | :guest | true | false | 'process Composer api request' | :not_found
+ 'PRIVATE' | :guest | false | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | :guest | false | false | 'process Composer api request' | :not_found
+ 'PRIVATE' | :anonymous | false | true | 'process Composer api request' | :not_found
+ end
- with_them do
- include_context 'Composer api group access', params[:project_visibility_level], params[:user_role], params[:user_token] do
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
- end
+ with_them do
+ include_context 'Composer api group access', params[:project_visibility_level], params[:user_role], params[:user_token] do
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
end
-
- it_behaves_like 'rejects Composer access with unknown group id'
end
+
+ it_behaves_like 'rejects Composer access with unknown group id'
end
describe 'POST /api/v4/projects/:id/packages/composer' do
@@ -158,44 +191,40 @@ RSpec.describe API::ComposerPackages do
subject { post api(url), headers: headers, params: params }
shared_examples 'composer package publish' do
- context 'without the need for a license' do
- context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'Composer package creation' | :created
- 'PUBLIC' | :guest | true | true | 'process Composer api request' | :forbidden
- 'PUBLIC' | :developer | true | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :guest | true | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :developer | false | true | 'process Composer api request' | :forbidden
- 'PUBLIC' | :guest | false | true | 'process Composer api request' | :forbidden
- 'PUBLIC' | :developer | false | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :guest | false | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :anonymous | false | true | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :developer | true | true | 'Composer package creation' | :created
- 'PRIVATE' | :guest | true | true | 'process Composer api request' | :forbidden
- 'PRIVATE' | :developer | true | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :developer | false | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'process Composer api request' | :unauthorized
- end
+ context 'with valid project' do
+ where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'Composer package creation' | :created
+ 'PUBLIC' | :developer | true | false | 'process Composer api request' | :unauthorized
+ 'PUBLIC' | :developer | false | true | 'process Composer api request' | :forbidden
+ 'PUBLIC' | :developer | false | false | 'process Composer api request' | :unauthorized
+ 'PUBLIC' | :guest | true | true | 'process Composer api request' | :forbidden
+ 'PUBLIC' | :guest | true | false | 'process Composer api request' | :unauthorized
+ 'PUBLIC' | :guest | false | true | 'process Composer api request' | :forbidden
+ 'PUBLIC' | :guest | false | false | 'process Composer api request' | :unauthorized
+ 'PUBLIC' | :anonymous | false | true | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :developer | true | true | 'Composer package creation' | :created
+ 'PRIVATE' | :developer | true | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :developer | false | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | :developer | false | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :guest | true | true | 'process Composer api request' | :forbidden
+ 'PRIVATE' | :guest | true | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :guest | false | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | :guest | false | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | 'process Composer api request' | :unauthorized
+ end
- with_them do
- include_context 'Composer api project access', params[:project_visibility_level], params[:user_role], params[:user_token] do
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
- end
+ with_them do
+ include_context 'Composer api project access', params[:project_visibility_level], params[:user_role], params[:user_token] do
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
end
-
- it_behaves_like 'rejects Composer access with unknown project id'
end
+
+ it_behaves_like 'rejects Composer access with unknown project id'
end
context 'with no tag or branch params' do
- let(:headers) { build_basic_auth_header(user.username, personal_access_token.token) }
+ let(:headers) { basic_auth_header(user.username, personal_access_token.token) }
it_behaves_like 'process Composer api request', :developer, :bad_request
end
@@ -209,7 +238,7 @@ RSpec.describe API::ComposerPackages do
context 'with a non existing tag' do
let(:params) { { tag: 'non-existing-tag' } }
- let(:headers) { build_basic_auth_header(user.username, personal_access_token.token) }
+ let(:headers) { basic_auth_header(user.username, personal_access_token.token) }
it_behaves_like 'process Composer api request', :developer, :not_found
end
@@ -224,7 +253,7 @@ RSpec.describe API::ComposerPackages do
context 'with a non existing branch' do
let(:params) { { branch: 'non-existing-branch' } }
- let(:headers) { build_basic_auth_header(user.username, personal_access_token.token) }
+ let(:headers) { basic_auth_header(user.username, personal_access_token.token) }
it_behaves_like 'process Composer api request', :developer, :not_found
end
@@ -238,65 +267,61 @@ RSpec.describe API::ComposerPackages do
subject { get api(url), headers: headers, params: params }
- context 'without the need for a license' do
- context 'with valid project' do
- let!(:package) { create(:composer_package, :with_metadatum, name: package_name, project: project) }
-
- context 'when the sha does not match the package name' do
- let(:sha) { '123' }
+ context 'with valid project' do
+ let!(:package) { create(:composer_package, :with_metadatum, name: package_name, project: project) }
- it_behaves_like 'process Composer api request', :anonymous, :not_found
- end
+ context 'when the sha does not match the package name' do
+ let(:sha) { '123' }
- context 'when the package name does not match the sha' do
- let(:branch) { project.repository.find_branch('master') }
- let(:sha) { branch.target }
- let(:url) { "/projects/#{project.id}/packages/composer/archives/unexisting-package-name.zip" }
+ it_behaves_like 'process Composer api request', :anonymous, :not_found
+ end
- it_behaves_like 'process Composer api request', :anonymous, :not_found
- end
+ context 'when the package name does not match the sha' do
+ let(:branch) { project.repository.find_branch('master') }
+ let(:sha) { branch.target }
+ let(:url) { "/projects/#{project.id}/packages/composer/archives/unexisting-package-name.zip" }
- context 'with a match package name and sha' do
- let(:branch) { project.repository.find_branch('master') }
- let(:sha) { branch.target }
-
- using RSpec::Parameterized::TableSyntax
-
- where(:project_visibility_level, :user_role, :member, :user_token, :expected_status) do
- 'PUBLIC' | :developer | true | true | :success
- 'PUBLIC' | :guest | true | true | :success
- 'PUBLIC' | :developer | true | false | :success
- 'PUBLIC' | :guest | true | false | :success
- 'PUBLIC' | :developer | false | true | :success
- 'PUBLIC' | :guest | false | true | :success
- 'PUBLIC' | :developer | false | false | :success
- 'PUBLIC' | :guest | false | false | :success
- 'PUBLIC' | :anonymous | false | true | :success
- 'PRIVATE' | :developer | true | true | :success
- 'PRIVATE' | :guest | true | true | :success
- 'PRIVATE' | :developer | true | false | :success
- 'PRIVATE' | :guest | true | false | :success
- 'PRIVATE' | :developer | false | true | :success
- 'PRIVATE' | :guest | false | true | :success
- 'PRIVATE' | :developer | false | false | :success
- 'PRIVATE' | :guest | false | false | :success
- 'PRIVATE' | :anonymous | false | true | :success
- end
+ it_behaves_like 'process Composer api request', :anonymous, :not_found
+ end
- with_them do
- let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
+ context 'with a match package name and sha' do
+ let(:branch) { project.repository.find_branch('master') }
+ let(:sha) { branch.target }
+
+ where(:project_visibility_level, :user_role, :member, :user_token, :expected_status) do
+ 'PUBLIC' | :developer | true | true | :success
+ 'PUBLIC' | :developer | true | false | :success
+ 'PUBLIC' | :developer | false | true | :success
+ 'PUBLIC' | :developer | false | false | :success
+ 'PUBLIC' | :guest | true | true | :success
+ 'PUBLIC' | :guest | true | false | :success
+ 'PUBLIC' | :guest | false | true | :success
+ 'PUBLIC' | :guest | false | false | :success
+ 'PUBLIC' | :anonymous | false | true | :success
+ 'PRIVATE' | :developer | true | true | :success
+ 'PRIVATE' | :developer | true | false | :success
+ 'PRIVATE' | :developer | false | true | :success
+ 'PRIVATE' | :developer | false | false | :success
+ 'PRIVATE' | :guest | true | true | :success
+ 'PRIVATE' | :guest | true | false | :success
+ 'PRIVATE' | :guest | false | true | :success
+ 'PRIVATE' | :guest | false | false | :success
+ 'PRIVATE' | :anonymous | false | true | :success
+ end
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
- end
+ with_them do
+ let(:token) { user_token ? personal_access_token.token : 'wrong' }
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
- it_behaves_like 'process Composer api request', params[:user_role], params[:expected_status], params[:member]
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
end
+
+ it_behaves_like 'process Composer api request', params[:user_role], params[:expected_status], params[:member]
end
end
-
- it_behaves_like 'rejects Composer access with unknown project id'
end
+
+ it_behaves_like 'rejects Composer access with unknown project id'
end
end
diff --git a/spec/requests/api/conan_packages_spec.rb b/spec/requests/api/conan_packages_spec.rb
index 1d88eaef79c..95798b060f1 100644
--- a/spec/requests/api/conan_packages_spec.rb
+++ b/spec/requests/api/conan_packages_spec.rb
@@ -3,6 +3,7 @@ require 'spec_helper'
RSpec.describe API::ConanPackages do
include WorkhorseHelpers
+ include HttpBasicAuthHelpers
include PackagesManagerApiSpecHelpers
let(:package) { create(:conan_package) }
@@ -330,6 +331,18 @@ RSpec.describe API::ConanPackages do
.and_return(presenter)
end
+ shared_examples 'rejects invalid upload_url params' do
+ context 'with unaccepted json format' do
+ let(:params) { %w[foo bar] }
+
+ it 'returns 400' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+ end
+
describe 'GET /api/v4/packages/conan/v1/conans/:package_name/package_version/:package_username/:package_channel' do
let(:recipe_path) { package.conan_recipe_path }
@@ -417,13 +430,14 @@ RSpec.describe API::ConanPackages do
let(:recipe_path) { package.conan_recipe_path }
let(:params) do
- { "conanfile.py": 24,
- "conanmanifext.txt": 123 }
+ { 'conanfile.py': 24,
+ 'conanmanifest.txt': 123 }
end
- subject { post api("/packages/conan/v1/conans/#{recipe_path}/upload_urls"), params: params, headers: headers }
+ subject { post api("/packages/conan/v1/conans/#{recipe_path}/upload_urls"), params: params.to_json, headers: headers }
it_behaves_like 'rejects invalid recipe'
+ it_behaves_like 'rejects invalid upload_url params'
it 'returns a set of upload urls for the files requested' do
subject
@@ -435,20 +449,58 @@ RSpec.describe API::ConanPackages do
expect(response.body).to eq(expected_response.to_json)
end
+
+ context 'with conan_sources and conan_export files' do
+ let(:params) do
+ { 'conan_sources.tgz': 345,
+ 'conan_export.tgz': 234,
+ 'conanmanifest.txt': 123 }
+ end
+
+ it 'returns upload urls for the additional files' do
+ subject
+
+ expected_response = {
+ 'conan_sources.tgz': "#{Settings.gitlab.base_url}/api/v4/packages/conan/v1/files/#{package.conan_recipe_path}/0/export/conan_sources.tgz",
+ 'conan_export.tgz': "#{Settings.gitlab.base_url}/api/v4/packages/conan/v1/files/#{package.conan_recipe_path}/0/export/conan_export.tgz",
+ 'conanmanifest.txt': "#{Settings.gitlab.base_url}/api/v4/packages/conan/v1/files/#{package.conan_recipe_path}/0/export/conanmanifest.txt"
+ }
+
+ expect(response.body).to eq(expected_response.to_json)
+ end
+ end
+
+ context 'with an invalid file' do
+ let(:params) do
+ { 'invalid_file.txt': 10,
+ 'conanmanifest.txt': 123 }
+ end
+
+ it 'does not return the invalid file as an upload_url' do
+ subject
+
+ expected_response = {
+ 'conanmanifest.txt': "#{Settings.gitlab.base_url}/api/v4/packages/conan/v1/files/#{package.conan_recipe_path}/0/export/conanmanifest.txt"
+ }
+
+ expect(response.body).to eq(expected_response.to_json)
+ end
+ end
end
describe 'POST /api/v4/packages/conan/v1/conans/:package_name/package_version/:package_username/:package_channel/packages/:conan_package_reference/upload_urls' do
let(:recipe_path) { package.conan_recipe_path }
let(:params) do
- { "conaninfo.txt": 24,
- "conanmanifext.txt": 123,
- "conan_package.tgz": 523 }
+ { 'conaninfo.txt': 24,
+ 'conanmanifest.txt': 123,
+ 'conan_package.tgz': 523 }
end
- subject { post api("/packages/conan/v1/conans/#{recipe_path}/packages/123456789/upload_urls"), params: params, headers: headers }
+ subject { post api("/packages/conan/v1/conans/#{recipe_path}/packages/123456789/upload_urls"), params: params.to_json, headers: headers }
it_behaves_like 'rejects invalid recipe'
+ it_behaves_like 'rejects invalid upload_url params'
it 'returns a set of upload urls for the files requested' do
expected_response = {
@@ -461,6 +513,23 @@ RSpec.describe API::ConanPackages do
expect(response.body).to eq(expected_response.to_json)
end
+
+ context 'with invalid files' do
+ let(:params) do
+ { 'conaninfo.txt': 24,
+ 'invalid_file.txt': 10 }
+ end
+
+ it 'returns upload urls only for the valid requested files' do
+ expected_response = {
+ 'conaninfo.txt': "#{Settings.gitlab.base_url}/api/v4/packages/conan/v1/files/#{package.conan_recipe_path}/0/package/123456789/0/conaninfo.txt"
+ }
+
+ subject
+
+ expect(response.body).to eq(expected_response.to_json)
+ end
+ end
end
describe 'DELETE /api/v4/packages/conan/v1/conans/:package_name/package_version/:package_username/:package_channel' do
diff --git a/spec/requests/api/deploy_keys_spec.rb b/spec/requests/api/deploy_keys_spec.rb
index 81cef653770..591d994fec9 100644
--- a/spec/requests/api/deploy_keys_spec.rb
+++ b/spec/requests/api/deploy_keys_spec.rb
@@ -165,6 +165,7 @@ RSpec.describe API::DeployKeys do
let(:deploy_keys_project) do
create(:deploy_keys_project, project: project, deploy_key: deploy_key)
end
+
let(:extra_params) { { title: 'new title', can_push: true } }
it 'updates the title of the deploy key' do
diff --git a/spec/requests/api/files_spec.rb b/spec/requests/api/files_spec.rb
index b50f63ed67c..d7571ad4bff 100644
--- a/spec/requests/api/files_spec.rb
+++ b/spec/requests/api/files_spec.rb
@@ -10,12 +10,14 @@ RSpec.describe API::Files do
let(:guest) { create(:user) { |u| project.add_guest(u) } }
let(:file_path) { "files%2Fruby%2Fpopen%2Erb" }
let(:rouge_file_path) { "%2e%2e%2f" }
+ let(:absolute_path) { "%2Fetc%2Fpasswd.rb" }
let(:invalid_file_message) { 'file_path should be a valid file path' }
let(:params) do
{
ref: 'master'
}
end
+
let(:author_email) { 'user@example.org' }
let(:author_name) { 'John Doe' }
@@ -57,6 +59,18 @@ RSpec.describe API::Files do
end
end
+ shared_examples 'when path is absolute' do
+ it 'returns 400 when file path is absolute' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+
+ if response.body.present?
+ expect(json_response['error']).to eq(invalid_file_message)
+ end
+ end
+ end
+
describe "HEAD /projects/:id/repository/files/:file_path" do
shared_examples_for 'repository files' do
it 'returns 400 when file path is invalid' do
@@ -65,6 +79,10 @@ RSpec.describe API::Files do
expect(response).to have_gitlab_http_status(:bad_request)
end
+ it_behaves_like 'when path is absolute' do
+ subject { head api(route(absolute_path), current_user), params: params }
+ end
+
it 'returns file attributes in headers' do
head api(route(file_path), current_user), params: params
@@ -165,6 +183,10 @@ RSpec.describe API::Files do
expect(json_response['error']).to eq(invalid_file_message)
end
+ it_behaves_like 'when path is absolute' do
+ subject { get api(route(absolute_path), api_user), params: params }
+ end
+
it 'returns file attributes as json' do
get api(route(file_path), api_user), params: params
@@ -350,6 +372,10 @@ RSpec.describe API::Files do
expect(json_response['error']).to eq(invalid_file_message)
end
+ it_behaves_like 'when path is absolute' do
+ subject { get api(route(absolute_path) + '/blame', current_user), params: params }
+ end
+
it 'returns blame file attributes as json' do
get api(route(file_path) + '/blame', current_user), params: params
@@ -473,6 +499,10 @@ RSpec.describe API::Files do
expect(json_response['error']).to eq(invalid_file_message)
end
+ it_behaves_like 'when path is absolute' do
+ subject { get api(route(absolute_path) + '/raw', current_user), params: params }
+ end
+
it 'returns raw file info' do
url = route(file_path) + "/raw"
expect(Gitlab::Workhorse).to receive(:send_git_blob)
@@ -597,6 +627,10 @@ RSpec.describe API::Files do
expect(json_response['error']).to eq(invalid_file_message)
end
+ it_behaves_like 'when path is absolute' do
+ subject { post api(route(absolute_path), user), params: params }
+ end
+
it "creates a new file in project repo" do
post api(route(file_path), user), params: params
@@ -735,6 +769,17 @@ RSpec.describe API::Files do
expect(json_response['error']).to eq(invalid_file_message)
end
+ it_behaves_like 'when path is absolute' do
+ let(:last_commit) do
+ Gitlab::Git::Commit
+ .last_for_path(project.repository, 'master', URI.unescape(file_path))
+ end
+
+ let(:params_with_correct_id) { params.merge(last_commit_id: last_commit.id) }
+
+ subject { put api(route(absolute_path), user), params: params_with_correct_id }
+ end
+
it "returns a 400 bad request if no params given" do
put api(route(file_path), user)
@@ -770,6 +815,10 @@ RSpec.describe API::Files do
expect(json_response['error']).to eq(invalid_file_message)
end
+ it_behaves_like 'when path is absolute' do
+ subject { delete api(route(absolute_path), user), params: params }
+ end
+
it "deletes existing file in project repo" do
delete api(route(file_path), user), params: params
@@ -821,6 +870,7 @@ RSpec.describe API::Files do
encoding: 'base64'
}
end
+
let(:get_params) do
{
ref: 'master'
diff --git a/spec/requests/api/go_proxy_spec.rb b/spec/requests/api/go_proxy_spec.rb
index 91e455dac19..2d7e319b0be 100644
--- a/spec/requests/api/go_proxy_spec.rb
+++ b/spec/requests/api/go_proxy_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe API::GoProxy do
include PackagesManagerApiSpecHelpers
+ include HttpBasicAuthHelpers
let_it_be(:user) { create :user }
let_it_be(:project) { create :project_empty_repo, creator: user, path: 'my-go-lib' }
@@ -108,6 +109,7 @@ RSpec.describe API::GoProxy do
project.repository.commit_by(oid: sha)
end
+
let(:resource) { "#{version}.info" }
it_behaves_like 'an unavailable resource'
@@ -386,7 +388,7 @@ RSpec.describe API::GoProxy do
end
it 'returns ok with a personal access token and basic authentication' do
- get_resource(headers: build_basic_auth_header(user.username, pa_token.token))
+ get_resource(headers: basic_auth_header(user.username, pa_token.token))
expect(response).to have_gitlab_http_status(:ok)
end
diff --git a/spec/requests/api/graphql/boards/board_list_issues_query_spec.rb b/spec/requests/api/graphql/boards/board_list_issues_query_spec.rb
new file mode 100644
index 00000000000..ae1abb50a40
--- /dev/null
+++ b/spec/requests/api/graphql/boards/board_list_issues_query_spec.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'get board lists' do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:unauth_user) { create(:user) }
+ let_it_be(:project) { create(:project, creator_id: user.id, namespace: user.namespace ) }
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:project_label) { create(:label, project: project, name: 'Development') }
+ let_it_be(:project_label2) { create(:label, project: project, name: 'Testing') }
+ let_it_be(:group_label) { create(:group_label, group: group, name: 'Development') }
+ let_it_be(:group_label2) { create(:group_label, group: group, name: 'Testing') }
+
+ let(:params) { '' }
+ let(:board) { }
+ let(:board_parent_type) { board_parent.class.to_s.downcase }
+ let(:board_data) { graphql_data[board_parent_type]['boards']['nodes'][0] }
+ let(:lists_data) { board_data['lists']['nodes'][0] }
+ let(:issues_data) { lists_data['issues']['nodes'] }
+
+ def query(list_params = params)
+ graphql_query_for(
+ board_parent_type,
+ { 'fullPath' => board_parent.full_path },
+ <<~BOARDS
+ boards(first: 1) {
+ nodes {
+ lists {
+ nodes {
+ issues {
+ count
+ nodes {
+ #{all_graphql_fields_for('issues'.classify)}
+ }
+ }
+ }
+ }
+ }
+ }
+ BOARDS
+ )
+ end
+
+ def issue_titles
+ issues_data.map { |i| i['title'] }
+ end
+
+ shared_examples 'group and project board list issues query' do
+ let!(:board) { create(:board, resource_parent: board_parent) }
+ let!(:label_list) { create(:list, board: board, label: label, position: 10) }
+ let!(:issue1) { create(:issue, project: issue_project, labels: [label], relative_position: 9) }
+ let!(:issue2) { create(:issue, project: issue_project, labels: [label], relative_position: 2) }
+ let!(:issue3) { create(:issue, project: issue_project, labels: [label], relative_position: 9) }
+ let!(:issue4) { create(:issue, project: issue_project, labels: [label2], relative_position: 432) }
+
+ context 'when the user does not have access to the board' do
+ it 'returns nil' do
+ post_graphql(query, current_user: unauth_user)
+
+ expect(graphql_data[board_parent_type]).to be_nil
+ end
+ end
+
+ context 'when user can read the board' do
+ before do
+ board_parent.add_reporter(user)
+ end
+
+ it 'can access the issues' do
+ post_graphql(query("id: \"#{global_id_of(label_list)}\""), current_user: user)
+
+ expect(issue_titles).to eq([issue2.title, issue3.title, issue1.title])
+ end
+ end
+ end
+
+ describe 'for a project' do
+ let(:board_parent) { project }
+ let(:label) { project_label }
+ let(:label2) { project_label2 }
+ let(:issue_project) { project }
+
+ it_behaves_like 'group and project board list issues query'
+ end
+
+ describe 'for a group' do
+ let(:board_parent) { group }
+ let(:label) { group_label }
+ let(:label2) { group_label2 }
+ let(:issue_project) { create(:project, :private, group: group) }
+
+ before do
+ allow(board_parent).to receive(:multiple_issue_boards_available?).and_return(false)
+ end
+
+ it_behaves_like 'group and project board list issues query'
+ end
+end
diff --git a/spec/requests/api/graphql/boards/board_lists_query_spec.rb b/spec/requests/api/graphql/boards/board_lists_query_spec.rb
index 8a89590c85a..0838900eaba 100644
--- a/spec/requests/api/graphql/boards/board_lists_query_spec.rb
+++ b/spec/requests/api/graphql/boards/board_lists_query_spec.rb
@@ -105,6 +105,20 @@ RSpec.describe 'get board lists' do
end
end
end
+
+ context 'when querying for a single list' do
+ before do
+ board_parent.add_reporter(user)
+ end
+
+ it 'finds the correct list' do
+ label_list = create(:list, board: board, label: label, position: 10)
+
+ post_graphql(query("id: \"#{global_id_of(label_list)}\""), current_user: user)
+
+ expect(lists_data[0]['node']['title']).to eq label_list.title
+ end
+ end
end
describe 'for a project' do
diff --git a/spec/requests/api/graphql/ci/groups_spec.rb b/spec/requests/api/graphql/ci/groups_spec.rb
new file mode 100644
index 00000000000..9e81358a152
--- /dev/null
+++ b/spec/requests/api/graphql/ci/groups_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'Query.project.pipeline.stages.groups' do
+ include GraphqlHelpers
+
+ let(:project) { create(:project, :repository, :public) }
+ let(:user) { create(:user) }
+ let(:pipeline) { create(:ci_pipeline, project: project, user: user) }
+ let(:group_graphql_data) { graphql_data.dig('project', 'pipeline', 'stages', 'nodes', 0, 'groups', 'nodes') }
+
+ let(:params) { {} }
+
+ let(:fields) do
+ <<~QUERY
+ nodes {
+ #{all_graphql_fields_for('CiGroup')}
+ }
+ QUERY
+ end
+
+ let(:query) do
+ %(
+ query {
+ project(fullPath: "#{project.full_path}") {
+ pipeline(iid: "#{pipeline.iid}") {
+ stages {
+ nodes {
+ groups {
+ #{fields}
+ }
+ }
+ }
+ }
+ }
+ }
+ )
+ end
+
+ before do
+ create(:commit_status, pipeline: pipeline, name: 'rspec 0 2')
+ create(:commit_status, pipeline: pipeline, name: 'rspec 0 1')
+ create(:commit_status, pipeline: pipeline, name: 'spinach 0 1')
+ post_graphql(query, current_user: user)
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it 'returns a array of jobs belonging to a pipeline' do
+ expect(group_graphql_data.map { |g| g.slice('name', 'size') }).to eq([
+ { 'name' => 'rspec', 'size' => 2 },
+ { 'name' => 'spinach', 'size' => 1 }
+ ])
+ end
+end
diff --git a/spec/requests/api/graphql/ci/jobs_spec.rb b/spec/requests/api/graphql/ci/jobs_spec.rb
new file mode 100644
index 00000000000..7d416f4720b
--- /dev/null
+++ b/spec/requests/api/graphql/ci/jobs_spec.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'Query.project.pipeline.stages.groups.jobs' do
+ include GraphqlHelpers
+
+ let(:project) { create(:project, :repository, :public) }
+ let(:user) { create(:user) }
+ let(:pipeline) do
+ pipeline = create(:ci_pipeline, project: project, user: user)
+ stage = create(:ci_stage_entity, pipeline: pipeline, name: 'first')
+ create(:commit_status, stage_id: stage.id, pipeline: pipeline, name: 'my test job')
+
+ pipeline
+ end
+
+ def first(field)
+ [field.pluralize, 'nodes', 0]
+ end
+
+ let(:jobs_graphql_data) { graphql_data.dig(*%w[project pipeline], *first('stage'), *first('group'), 'jobs', 'nodes') }
+
+ let(:query) do
+ %(
+ query {
+ project(fullPath: "#{project.full_path}") {
+ pipeline(iid: "#{pipeline.iid}") {
+ stages {
+ nodes {
+ name
+ groups {
+ nodes {
+ name
+ jobs {
+ nodes {
+ name
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ )
+ end
+
+ it 'returns the jobs of a pipeline stage' do
+ post_graphql(query, current_user: user)
+
+ expect(jobs_graphql_data).to contain_exactly(a_hash_including('name' => 'my test job'))
+ end
+
+ context 'when fetching jobs from the pipeline' do
+ it 'avoids N+1 queries' do
+ control_count = ActiveRecord::QueryRecorder.new do
+ post_graphql(query, current_user: user)
+ end
+
+ build_stage = create(:ci_stage_entity, name: 'build', pipeline: pipeline)
+ test_stage = create(:ci_stage_entity, name: 'test', pipeline: pipeline)
+ create(:commit_status, pipeline: pipeline, stage_id: build_stage.id, name: 'docker 1 2')
+ create(:commit_status, pipeline: pipeline, stage_id: build_stage.id, name: 'docker 2 2')
+ create(:commit_status, pipeline: pipeline, stage_id: test_stage.id, name: 'rspec 1 2')
+ create(:commit_status, pipeline: pipeline, stage_id: test_stage.id, name: 'rspec 2 2')
+
+ expect do
+ post_graphql(query, current_user: user)
+ end.not_to exceed_query_limit(control_count)
+
+ expect(response).to have_gitlab_http_status(:ok)
+
+ build_stage = graphql_data.dig('project', 'pipeline', 'stages', 'nodes').find do |stage|
+ stage['name'] == 'build'
+ end
+ test_stage = graphql_data.dig('project', 'pipeline', 'stages', 'nodes').find do |stage|
+ stage['name'] == 'test'
+ end
+ docker_group = build_stage.dig('groups', 'nodes').first
+ rspec_group = test_stage.dig('groups', 'nodes').first
+
+ expect(docker_group['name']).to eq('docker')
+ expect(rspec_group['name']).to eq('rspec')
+
+ docker_jobs = docker_group.dig('jobs', 'nodes')
+ rspec_jobs = rspec_group.dig('jobs', 'nodes')
+
+ expect(docker_jobs).to eq([{ 'name' => 'docker 1 2' }, { 'name' => 'docker 2 2' }])
+ expect(rspec_jobs).to eq([{ 'name' => 'rspec 1 2' }, { 'name' => 'rspec 2 2' }])
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/ci/stages_spec.rb b/spec/requests/api/graphql/ci/stages_spec.rb
new file mode 100644
index 00000000000..cd48a24b9c8
--- /dev/null
+++ b/spec/requests/api/graphql/ci/stages_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'Query.project.pipeline.stages' do
+ include GraphqlHelpers
+
+ let(:project) { create(:project, :repository, :public) }
+ let(:user) { create(:user) }
+ let(:pipeline) { create(:ci_pipeline, project: project, user: user) }
+ let(:stage_graphql_data) { graphql_data['project']['pipeline']['stages'] }
+
+ let(:params) { {} }
+
+ let(:fields) do
+ <<~QUERY
+ nodes {
+ #{all_graphql_fields_for('CiStage')}
+ }
+ QUERY
+ end
+
+ let(:query) do
+ %(
+ query {
+ project(fullPath: "#{project.full_path}") {
+ pipeline(iid: "#{pipeline.iid}") {
+ stages {
+ #{fields}
+ }
+ }
+ }
+ }
+ )
+ end
+
+ before do
+ create(:ci_stage_entity, pipeline: pipeline, name: 'deploy')
+ post_graphql(query, current_user: user)
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it 'returns the stage of a pipeline' do
+ expect(stage_graphql_data['nodes'].first['name']).to eq('deploy')
+ end
+end
diff --git a/spec/requests/api/graphql/issue_status_counts_spec.rb b/spec/requests/api/graphql/issue_status_counts_spec.rb
new file mode 100644
index 00000000000..3d8817c3bc5
--- /dev/null
+++ b/spec/requests/api/graphql/issue_status_counts_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'getting Issue counts by status' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:issue_opened) { create(:issue, project: project) }
+ let_it_be(:issue_closed) { create(:issue, :closed, project: project) }
+ let_it_be(:other_project_issue) { create(:issue) }
+ let(:params) { {} }
+
+ let(:fields) do
+ <<~QUERY
+ #{all_graphql_fields_for('IssueStatusCountsType'.classify)}
+ QUERY
+ end
+
+ let(:query) do
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => project.full_path },
+ query_graphql_field('issueStatusCounts', params, fields)
+ )
+ end
+
+ context 'with issue count data' do
+ let(:issue_counts) { graphql_data.dig('project', 'issueStatusCounts') }
+
+ context 'without project permissions' do
+ let(:user) { create(:user) }
+
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ it_behaves_like 'a working graphql query'
+ it { expect(issue_counts).to be nil }
+ end
+
+ context 'with project permissions' do
+ before do
+ project.add_developer(current_user)
+ post_graphql(query, current_user: current_user)
+ end
+
+ it_behaves_like 'a working graphql query'
+ it 'returns the correct counts for each status' do
+ expect(issue_counts).to eq(
+ 'all' => 2,
+ 'opened' => 1,
+ 'closed' => 1
+ )
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/metrics/dashboard/annotations_spec.rb b/spec/requests/api/graphql/metrics/dashboard/annotations_spec.rb
index c47920087dc..ca5a9165760 100644
--- a/spec/requests/api/graphql/metrics/dashboard/annotations_spec.rb
+++ b/spec/requests/api/graphql/metrics/dashboard/annotations_spec.rb
@@ -30,22 +30,22 @@ RSpec.describe 'Getting Metrics Dashboard Annotations' do
let(:query) do
%(
- query {
- project(fullPath:"#{project.full_path}") {
- environments(name: "#{environment.name}") {
- nodes {
- metricsDashboard(path: "#{path}"){
- annotations(#{args}){
- nodes {
- #{fields}
- }
- }
+ query {
+ project(fullPath: "#{project.full_path}") {
+ environments(name: "#{environment.name}") {
+ nodes {
+ metricsDashboard(path: "#{path}") {
+ annotations(#{args}) {
+ nodes {
+ #{fields}
}
}
}
}
}
- )
+ }
+ }
+ )
end
before do
diff --git a/spec/requests/api/graphql/milestone_spec.rb b/spec/requests/api/graphql/milestone_spec.rb
new file mode 100644
index 00000000000..59de116fa2b
--- /dev/null
+++ b/spec/requests/api/graphql/milestone_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Querying a Milestone' do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:milestone) { create(:milestone, project: project) }
+
+ let(:query) do
+ graphql_query_for('milestone', { id: milestone.to_global_id.to_s }, 'title')
+ end
+
+ subject { graphql_data['milestone'] }
+
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ context 'when the user has access to the milestone' do
+ before_all do
+ project.add_guest(current_user)
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it { is_expected.to include('title' => milestone.name) }
+ end
+
+ context 'when the user does not have access to the milestone' do
+ it_behaves_like 'a working graphql query'
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when ID argument is missing' do
+ let(:query) do
+ graphql_query_for('milestone', {}, 'title')
+ end
+
+ it 'raises an exception' do
+ expect(graphql_errors).to include(a_hash_including('message' => "Field 'milestone' is missing required arguments: id"))
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/boards/issues/issue_move_list_spec.rb b/spec/requests/api/graphql/mutations/boards/issues/issue_move_list_spec.rb
new file mode 100644
index 00000000000..e24ab0b07f2
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/boards/issues/issue_move_list_spec.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Reposition and move issue within board lists' do
+ include GraphqlHelpers
+
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:board) { create(:board, group: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:development) { create(:label, project: project, name: 'Development') }
+ let_it_be(:testing) { create(:label, project: project, name: 'Testing') }
+ let_it_be(:list1) { create(:list, board: board, label: development, position: 0) }
+ let_it_be(:list2) { create(:list, board: board, label: testing, position: 1) }
+ let_it_be(:existing_issue1) { create(:labeled_issue, project: project, labels: [testing], relative_position: 10) }
+ let_it_be(:existing_issue2) { create(:labeled_issue, project: project, labels: [testing], relative_position: 50) }
+ let_it_be(:issue1) { create(:labeled_issue, project: project, labels: [development]) }
+
+ let(:mutation_class) { Mutations::Boards::Issues::IssueMoveList }
+ let(:mutation_name) { mutation_class.graphql_name }
+ let(:mutation_result_identifier) { mutation_name.camelize(:lower) }
+ let(:current_user) { user }
+ let(:params) { { board_id: board.to_global_id.to_s, project_path: project.full_path, iid: issue1.iid.to_s } }
+ let(:issue_move_params) do
+ {
+ from_list_id: list1.id,
+ to_list_id: list2.id
+ }
+ end
+
+ before_all do
+ group.add_maintainer(user)
+ end
+
+ shared_examples 'returns an error' do
+ it 'fails with error' do
+ message = "The resource that you are attempting to access does not exist or you don't have "\
+ "permission to perform this action"
+
+ post_graphql_mutation(mutation(params), current_user: current_user)
+
+ expect(graphql_errors).to include(a_hash_including('message' => message))
+ end
+ end
+
+ context 'when user has access to resources' do
+ context 'when repositioning an issue' do
+ let(:issue_move_params) { { move_after_id: existing_issue1.id, move_before_id: existing_issue2.id } }
+
+ it 'repositions an issue' do
+ post_graphql_mutation(mutation(params), current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ response_issue = json_response['data'][mutation_result_identifier]['issue']
+ expect(response_issue['iid']).to eq(issue1.iid.to_s)
+ expect(response_issue['relativePosition']).to be > existing_issue1.relative_position
+ expect(response_issue['relativePosition']).to be < existing_issue2.relative_position
+ end
+ end
+
+ context 'when moving an issue to a different list' do
+ let(:issue_move_params) { { from_list_id: list1.id, to_list_id: list2.id } }
+
+ it 'moves issue to a different list' do
+ post_graphql_mutation(mutation(params), current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ response_issue = json_response['data'][mutation_result_identifier]['issue']
+ expect(response_issue['iid']).to eq(issue1.iid.to_s)
+ expect(response_issue['labels']['edges'][0]['node']['title']).to eq(testing.title)
+ end
+ end
+ end
+
+ context 'when user has no access to resources' do
+ context 'the user is not allowed to update the issue' do
+ let(:current_user) { create(:user) }
+
+ it_behaves_like 'returns an error'
+ end
+
+ context 'when the user can not read board' do
+ let(:board) { create(:board, group: create(:group, :private)) }
+
+ it_behaves_like 'returns an error'
+ end
+ end
+
+ def mutation(additional_params = {})
+ graphql_mutation(mutation_name, issue_move_params.merge(additional_params),
+ <<-QL.strip_heredoc
+ clientMutationId
+ issue {
+ iid,
+ relativePosition
+ labels {
+ edges {
+ node{
+ title
+ }
+ }
+ }
+ }
+ errors
+ QL
+ )
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/boards/lists/update_spec.rb b/spec/requests/api/graphql/mutations/boards/lists/update_spec.rb
new file mode 100644
index 00000000000..8a6d2cb3994
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/boards/lists/update_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Update of an existing board list' do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:board) { create(:board, group: group) }
+ let_it_be(:list) { create(:list, board: board, position: 0) }
+ let_it_be(:list2) { create(:list, board: board) }
+ let_it_be(:input) { { list_id: list.to_global_id.to_s, position: 1, collapsed: true } }
+ let(:mutation) { graphql_mutation(:update_board_list, input) }
+ let(:mutation_response) { graphql_mutation_response(:update_board_list) }
+
+ context 'the user is not allowed to read board lists' do
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: ['The resource that you are attempting to access does not exist or you don\'t have permission to perform this action']
+ end
+
+ before do
+ list.update_preferences_for(current_user, collapsed: false)
+ end
+
+ context 'when user has permissions to admin board lists' do
+ before do
+ group.add_reporter(current_user)
+ end
+
+ it 'updates the list position and collapsed state' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['list']).to include(
+ 'position' => 1,
+ 'collapsed' => true
+ )
+ end
+ end
+
+ context 'when user has permissions to read board lists' do
+ before do
+ group.add_guest(current_user)
+ end
+
+ it 'updates the list collapsed state but not the list position' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['list']).to include(
+ 'position' => 0,
+ 'collapsed' => true
+ )
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/container_expiration_policy/update_spec.rb b/spec/requests/api/graphql/mutations/container_expiration_policy/update_spec.rb
index bc1b42d68e6..7bef812bfec 100644
--- a/spec/requests/api/graphql/mutations/container_expiration_policy/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/container_expiration_policy/update_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe 'Updating the container expiration policy' do
older_than: 'FOURTEEN_DAYS'
}
end
+
let(:mutation) do
graphql_mutation(:update_container_expiration_policy, params,
<<~QL
@@ -32,6 +33,7 @@ RSpec.describe 'Updating the container expiration policy' do
QL
)
end
+
let(:mutation_response) { graphql_mutation_response(:update_container_expiration_policy) }
let(:container_expiration_policy_response) { mutation_response['containerExpirationPolicy'] }
diff --git a/spec/requests/api/graphql/mutations/design_management/move_spec.rb b/spec/requests/api/graphql/mutations/design_management/move_spec.rb
new file mode 100644
index 00000000000..dd121ec733e
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/design_management/move_spec.rb
@@ -0,0 +1,122 @@
+# frozen_string_literal: true
+require "spec_helper"
+
+RSpec.describe "moving designs" do
+ include GraphqlHelpers
+ include DesignManagementTestHelpers
+
+ let_it_be(:issue) { create(:issue) }
+ let_it_be(:designs) { create_list(:design, 3, :with_versions, :with_relative_position, issue: issue) }
+ let_it_be(:developer) { create(:user, developer_projects: [issue.project]) }
+
+ let(:user) { developer }
+
+ let(:current_design) { designs.first }
+ let(:previous_design) { designs.second }
+ let(:next_design) { designs.third }
+ let(:mutation_name) { :design_management_move }
+
+ let(:mutation) do
+ input = {
+ id: current_design.to_global_id.to_s,
+ previous: previous_design&.to_global_id&.to_s,
+ next: next_design&.to_global_id&.to_s
+ }.compact
+
+ graphql_mutation(mutation_name, input, <<~FIELDS)
+ errors
+ designCollection {
+ designs {
+ nodes {
+ filename
+ }
+ }
+ }
+ FIELDS
+ end
+
+ let(:move_designs) { post_graphql_mutation(mutation, current_user: user) }
+ let(:mutation_response) { graphql_mutation_response(mutation_name) }
+
+ before do
+ enable_design_management
+ designs.each(&:reset)
+ issue.reset
+ end
+
+ shared_examples 'a successful move' do
+ it 'does not error, and reports the current order' do
+ move_designs
+
+ expect(graphql_errors).not_to be_present
+
+ expect(mutation_response).to eq(
+ 'errors' => [],
+ 'designCollection' => {
+ 'designs' => {
+ 'nodes' => new_order.map { |d| { 'filename' => d.filename } }
+ }
+ }
+ )
+ end
+ end
+
+ context 'the user is not allowed to move designs' do
+ let(:user) { create(:user) }
+
+ it 'returns an error' do
+ move_designs
+
+ expect(graphql_errors).to be_present
+ end
+ end
+
+ context 'the neighbors do not have positions' do
+ let!(:previous_design) { create(:design, :with_versions, issue: issue) }
+ let!(:next_design) { create(:design, :with_versions, issue: issue) }
+
+ let(:new_order) do
+ [
+ designs.second,
+ designs.third,
+ previous_design, current_design, next_design
+ ]
+ end
+
+ it_behaves_like 'a successful move'
+
+ it 'maintains the correct order in the presence of other unpositioned designs' do
+ other_design = create(:design, :with_versions, issue: issue)
+
+ move_designs
+ moved_designs = mutation_response.dig('designCollection', 'designs', 'nodes')
+
+ expect(moved_designs.map { |d| d['filename'] })
+ .to eq([*new_order.map(&:filename), other_design.filename])
+ end
+ end
+
+ context 'moving a design between two others' do
+ let(:new_order) { [designs.second, designs.first, designs.third] }
+
+ it_behaves_like 'a successful move'
+ end
+
+ context 'moving a design to the start' do
+ let(:current_design) { designs.last }
+ let(:next_design) { designs.first }
+ let(:previous_design) { nil }
+ let(:new_order) { [designs.last, designs.first, designs.second] }
+
+ it_behaves_like 'a successful move'
+ end
+
+ context 'moving a design to the end' do
+ let(:current_design) { designs.first }
+ let(:next_design) { nil }
+ let(:previous_design) { designs.last }
+ let(:new_order) { [designs.second, designs.third, designs.first] }
+
+ it_behaves_like 'a successful move'
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/discussions/toggle_resolve_spec.rb b/spec/requests/api/graphql/mutations/discussions/toggle_resolve_spec.rb
index e83da830935..457c37e900b 100644
--- a/spec/requests/api/graphql/mutations/discussions/toggle_resolve_spec.rb
+++ b/spec/requests/api/graphql/mutations/discussions/toggle_resolve_spec.rb
@@ -10,9 +10,11 @@ RSpec.describe 'Toggling the resolve status of a discussion' do
let(:discussion) do
create(:diff_note_on_merge_request, noteable: noteable, project: project).to_discussion
end
+
let(:mutation) do
graphql_mutation(:discussion_toggle_resolve, { id: discussion.to_global_id.to_s, resolve: true })
end
+
let(:mutation_response) { graphql_mutation_response(:discussion_toggle_resolve) }
context 'when the user does not have permission' do
diff --git a/spec/requests/api/graphql/mutations/issues/set_due_date_spec.rb b/spec/requests/api/graphql/mutations/issues/set_due_date_spec.rb
index 3dd1225db5a..b3c9b9d4995 100644
--- a/spec/requests/api/graphql/mutations/issues/set_due_date_spec.rb
+++ b/spec/requests/api/graphql/mutations/issues/set_due_date_spec.rb
@@ -49,13 +49,13 @@ RSpec.describe 'Setting Due Date of an issue' do
expect(mutation_response['issue']['dueDate']).to eq(2.days.since.to_date.to_s)
end
- context 'when passing due date without a date value' do
+ context 'when the due date value is not a valid time' do
let(:input) { { due_date: 'test' } }
- it 'returns internal server error' do
+ it 'returns a coercion error' do
post_graphql_mutation(mutation, current_user: current_user)
- expect(graphql_errors).to include(a_hash_including('message' => 'Internal server error'))
+ expect(graphql_errors).to include(a_hash_including('message' => /provided invalid value for dueDate/))
end
end
end
diff --git a/spec/requests/api/graphql/mutations/issues/set_subscription_spec.rb b/spec/requests/api/graphql/mutations/issues/set_subscription_spec.rb
new file mode 100644
index 00000000000..1edc1e0553b
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/issues/set_subscription_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Setting subscribed status of an issue' do
+ include GraphqlHelpers
+
+ it_behaves_like 'a subscribable resource api' do
+ let_it_be(:resource) { create(:issue) }
+ let(:mutation_name) { :issue_set_subscription }
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/issues/update_spec.rb b/spec/requests/api/graphql/mutations/issues/update_spec.rb
new file mode 100644
index 00000000000..fd983c683be
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/issues/update_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Update of an existing issue' do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let(:input) do
+ {
+ project_path: project.full_path,
+ iid: issue.iid.to_s,
+ locked: true
+ }
+ end
+
+ let(:mutation) { graphql_mutation(:update_issue, input) }
+ let(:mutation_response) { graphql_mutation_response(:update_issue) }
+
+ context 'the user is not allowed to update issue' do
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: ['The resource that you are attempting to access does not exist or you don\'t have permission to perform this action']
+ end
+
+ context 'when user has permissions to update issue' do
+ before do
+ project.add_developer(current_user)
+ end
+
+ it 'updates the issue' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['issue']).to include(
+ 'discussionLocked' => true
+ )
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/merge_requests/create_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/create_spec.rb
index d4ac639e226..9297ca054c7 100644
--- a/spec/requests/api/graphql/mutations/merge_requests/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/merge_requests/create_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe 'Creation of a new merge request' do
target_branch: target_branch
}
end
+
let(:title) { 'MergeRequest' }
let(:source_branch) { 'new_branch' }
let(:target_branch) { 'master' }
diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_subscription_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_subscription_spec.rb
index 6b3035fbf48..d90faa605c0 100644
--- a/spec/requests/api/graphql/mutations/merge_requests/set_subscription_spec.rb
+++ b/spec/requests/api/graphql/mutations/merge_requests/set_subscription_spec.rb
@@ -5,59 +5,8 @@ require 'spec_helper'
RSpec.describe 'Setting subscribed status of a merge request' do
include GraphqlHelpers
- let(:current_user) { create(:user) }
- let(:merge_request) { create(:merge_request) }
- let(:project) { merge_request.project }
- let(:input) { { subscribed_state: true } }
-
- let(:mutation) do
- variables = {
- project_path: project.full_path,
- iid: merge_request.iid.to_s
- }
- graphql_mutation(:merge_request_set_subscription, variables.merge(input),
- <<-QL.strip_heredoc
- clientMutationId
- errors
- mergeRequest {
- id
- subscribed
- }
- QL
- )
- end
-
- def mutation_response
- graphql_mutation_response(:merge_request_set_subscription)['mergeRequest']['subscribed']
- end
-
- before do
- project.add_developer(current_user)
- end
-
- it 'returns an error if the user is not allowed to update the merge request' do
- post_graphql_mutation(mutation, current_user: create(:user))
-
- expect(graphql_errors).not_to be_empty
- end
-
- it 'marks the merge request as WIP' do
- post_graphql_mutation(mutation, current_user: current_user)
-
- expect(response).to have_gitlab_http_status(:success)
- expect(mutation_response).to eq(true)
- end
-
- context 'when passing subscribe false as input' do
- let(:input) { { subscribed_state: false } }
-
- it 'unmarks the merge request as subscribed' do
- merge_request.subscribe(current_user, project)
-
- post_graphql_mutation(mutation, current_user: current_user)
-
- expect(response).to have_gitlab_http_status(:success)
- expect(mutation_response).to eq(false)
- end
+ it_behaves_like 'a subscribable resource api' do
+ let_it_be(:resource) { create(:merge_request) }
+ let(:mutation_name) { :merge_request_set_subscription }
end
end
diff --git a/spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb b/spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb
index f7be671e5f3..463a872d95d 100644
--- a/spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb
+++ b/spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb
@@ -33,6 +33,7 @@ RSpec.describe 'Updating an image DiffNote' do
y: updated_y
}
end
+
let!(:diff_note) do
create(:image_diff_note_on_merge_request,
noteable: noteable,
@@ -40,6 +41,7 @@ RSpec.describe 'Updating an image DiffNote' do
note: original_body,
position: original_position)
end
+
let(:mutation) do
variables = {
id: GitlabSchema.id_from_object(diff_note).to_s,
diff --git a/spec/requests/api/graphql/mutations/notes/update/note_spec.rb b/spec/requests/api/graphql/mutations/notes/update/note_spec.rb
index 38378310d9f..0d93afe9434 100644
--- a/spec/requests/api/graphql/mutations/notes/update/note_spec.rb
+++ b/spec/requests/api/graphql/mutations/notes/update/note_spec.rb
@@ -8,11 +8,9 @@ RSpec.describe 'Updating a Note' do
let!(:note) { create(:note, note: original_body) }
let(:original_body) { 'Initial body text' }
let(:updated_body) { 'Updated body text' }
+ let(:params) { { body: updated_body, confidential: true } }
let(:mutation) do
- variables = {
- id: GitlabSchema.id_from_object(note).to_s,
- body: updated_body
- }
+ variables = params.merge(id: GitlabSchema.id_from_object(note).to_s)
graphql_mutation(:update_note, variables)
end
@@ -31,6 +29,7 @@ RSpec.describe 'Updating a Note' do
post_graphql_mutation(mutation, current_user: current_user)
expect(note.reload.note).to eq(original_body)
+ expect(note.confidential).to be_falsey
end
end
@@ -43,12 +42,40 @@ RSpec.describe 'Updating a Note' do
post_graphql_mutation(mutation, current_user: current_user)
expect(note.reload.note).to eq(updated_body)
+ expect(note.confidential).to be_truthy
end
it 'returns the updated Note' do
post_graphql_mutation(mutation, current_user: current_user)
expect(mutation_response['note']['body']).to eq(updated_body)
+ expect(mutation_response['note']['confidential']).to be_truthy
+ end
+
+ context 'when only confidential param is present' do
+ let(:params) { { confidential: true } }
+
+ it 'updates only the note confidentiality' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(note.reload.note).to eq(original_body)
+ expect(note.confidential).to be_truthy
+ end
+ end
+
+ context 'when only body param is present' do
+ let(:params) { { body: updated_body } }
+
+ before do
+ note.update_column(:confidential, true)
+ end
+
+ it 'updates only the note body' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(note.reload.note).to eq(updated_body)
+ expect(note.confidential).to be_truthy
+ end
end
context 'when there are ActiveRecord validation errors' do
@@ -60,12 +87,14 @@ RSpec.describe 'Updating a Note' do
post_graphql_mutation(mutation, current_user: current_user)
expect(note.reload.note).to eq(original_body)
+ expect(note.confidential).to be_falsey
end
- it 'returns the Note with its original body' do
+ it 'returns the original Note' do
post_graphql_mutation(mutation, current_user: current_user)
expect(mutation_response['note']['body']).to eq(original_body)
+ expect(mutation_response['note']['confidential']).to be_falsey
end
end
diff --git a/spec/requests/api/graphql/mutations/snippets/create_spec.rb b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
index e2474e1bcce..56a5f4907c1 100644
--- a/spec/requests/api/graphql/mutations/snippets/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
@@ -149,7 +149,7 @@ RSpec.describe 'Creating a Snippet' do
visibility_level: visibility_level,
project_path: project_path,
title: title,
- files: actions
+ blob_actions: actions
}
end
diff --git a/spec/requests/api/graphql/mutations/snippets/update_spec.rb b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
index 3b2f9dc0f19..3f39c0ab851 100644
--- a/spec/requests/api/graphql/mutations/snippets/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
@@ -26,6 +26,7 @@ RSpec.describe 'Updating a Snippet' do
title: updated_title
}
end
+
let(:mutation) do
graphql_mutation(:update_snippet, mutation_vars)
end
@@ -157,7 +158,7 @@ RSpec.describe 'Updating a Snippet' do
let(:mutation_vars) do
{
id: snippet_gid,
- files: [
+ blob_actions: [
{ action: :update, filePath: updated_file, content: updated_content },
{ action: :delete, filePath: deleted_file }
]
diff --git a/spec/requests/api/graphql/project/alert_management/alert/todos_spec.rb b/spec/requests/api/graphql/project/alert_management/alert/todos_spec.rb
new file mode 100644
index 00000000000..3a9077061ad
--- /dev/null
+++ b/spec/requests/api/graphql/project/alert_management/alert/todos_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'getting Alert Management Alert Assignees' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:alert) { create(:alert_management_alert, project: project) }
+ let_it_be(:other_alert) { create(:alert_management_alert, project: project) }
+ let_it_be(:todo) { create(:todo, :pending, target: alert, user: current_user, project: project) }
+ let_it_be(:other_todo) { create(:todo, :pending, target: other_alert, user: current_user, project: project) }
+
+ let(:fields) do
+ <<~QUERY
+ nodes {
+ iid
+ todos {
+ nodes {
+ id
+ }
+ }
+ }
+ QUERY
+ end
+
+ let(:graphql_query) do
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => project.full_path },
+ query_graphql_field('alertManagementAlerts', {}, fields)
+ )
+ end
+
+ let(:gql_alerts) { graphql_data.dig('project', 'alertManagementAlerts', 'nodes') }
+ let(:gql_todos) { gql_alerts.map { |gql_alert| [gql_alert['iid'], gql_alert['todos']['nodes']] }.to_h }
+ let(:gql_alert_todo) { gql_todos[alert.iid.to_s].first }
+ let(:gql_other_alert_todo) { gql_todos[other_alert.iid.to_s].first }
+
+ before do
+ project.add_developer(current_user)
+ end
+
+ it 'includes the correct metrics dashboard url' do
+ post_graphql(graphql_query, current_user: current_user)
+
+ expect(gql_alert_todo['id']).to eq(todo.to_global_id.to_s)
+ expect(gql_other_alert_todo['id']).to eq(other_todo.to_global_id.to_s)
+ end
+end
diff --git a/spec/requests/api/graphql/project/alert_management/alert_status_counts_spec.rb b/spec/requests/api/graphql/project/alert_management/alert_status_counts_spec.rb
index b62215f43fb..9fbf5aaa41f 100644
--- a/spec/requests/api/graphql/project/alert_management/alert_status_counts_spec.rb
+++ b/spec/requests/api/graphql/project/alert_management/alert_status_counts_spec.rb
@@ -6,8 +6,8 @@ RSpec.describe 'getting Alert Management Alert counts by status' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:current_user) { create(:user) }
- let_it_be(:alert_1) { create(:alert_management_alert, :resolved, project: project) }
- let_it_be(:alert_2) { create(:alert_management_alert, project: project) }
+ let_it_be(:alert_resolved) { create(:alert_management_alert, :resolved, project: project) }
+ let_it_be(:alert_triggered) { create(:alert_management_alert, project: project) }
let_it_be(:other_project_alert) { create(:alert_management_alert) }
let(:params) { {} }
@@ -58,7 +58,7 @@ RSpec.describe 'getting Alert Management Alert counts by status' do
end
context 'with search criteria' do
- let(:params) { { search: alert_1.title } }
+ let(:params) { { search: alert_resolved.title } }
it_behaves_like 'a working graphql query'
it 'returns the correct counts for each status' do
diff --git a/spec/requests/api/graphql/project/alert_management/alerts_spec.rb b/spec/requests/api/graphql/project/alert_management/alerts_spec.rb
index f050c6873f3..d3a2e6a1deb 100644
--- a/spec/requests/api/graphql/project/alert_management/alerts_spec.rb
+++ b/spec/requests/api/graphql/project/alert_management/alerts_spec.rb
@@ -4,12 +4,12 @@ require 'spec_helper'
RSpec.describe 'getting Alert Management Alerts' do
include GraphqlHelpers
- let_it_be(:payload) { { 'custom' => { 'alert' => 'payload' } } }
+ let_it_be(:payload) { { 'custom' => { 'alert' => 'payload' }, 'runbook' => 'runbook' } }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:current_user) { create(:user) }
- let_it_be(:resolved_alert) { create(:alert_management_alert, :all_fields, :resolved, project: project, issue: nil, severity: :low) }
- let_it_be(:triggered_alert) { create(:alert_management_alert, :all_fields, project: project, severity: :critical, payload: payload) }
- let_it_be(:other_project_alert) { create(:alert_management_alert, :all_fields) }
+ let_it_be(:resolved_alert) { create(:alert_management_alert, :all_fields, :resolved, project: project, issue: nil, severity: :low).present }
+ let_it_be(:triggered_alert) { create(:alert_management_alert, :all_fields, project: project, severity: :critical, payload: payload).present }
+ let_it_be(:other_project_alert) { create(:alert_management_alert, :all_fields).present }
let(:params) { {} }
@@ -71,10 +71,13 @@ RSpec.describe 'getting Alert Management Alerts' do
'eventCount' => triggered_alert.events,
'startedAt' => triggered_alert.started_at.strftime('%Y-%m-%dT%H:%M:%SZ'),
'endedAt' => nil,
- 'details' => { 'custom.alert' => 'payload' },
+ 'details' => { 'custom.alert' => 'payload', 'runbook' => 'runbook' },
'createdAt' => triggered_alert.created_at.strftime('%Y-%m-%dT%H:%M:%SZ'),
'updatedAt' => triggered_alert.updated_at.strftime('%Y-%m-%dT%H:%M:%SZ'),
- 'metricsDashboardUrl' => nil
+ 'metricsDashboardUrl' => nil,
+ 'detailsUrl' => triggered_alert.details_url,
+ 'prometheusAlert' => nil,
+ 'runbook' => 'runbook'
)
expect(second_alert).to include(
diff --git a/spec/requests/api/graphql/project/container_expiration_policy_spec.rb b/spec/requests/api/graphql/project/container_expiration_policy_spec.rb
index b064e4d43e9..dc16847a669 100644
--- a/spec/requests/api/graphql/project/container_expiration_policy_spec.rb
+++ b/spec/requests/api/graphql/project/container_expiration_policy_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe 'getting a repository in a project' do
#{all_graphql_fields_for('container_expiration_policy'.classify)}
QUERY
end
+
let(:query) do
graphql_query_for(
'project',
diff --git a/spec/requests/api/graphql/project/issue/designs/designs_spec.rb b/spec/requests/api/graphql/project/issue/designs/designs_spec.rb
index e47c025f8b2..decab900a43 100644
--- a/spec/requests/api/graphql/project/issue/designs/designs_spec.rb
+++ b/spec/requests/api/graphql/project/issue/designs/designs_spec.rb
@@ -24,12 +24,14 @@ RSpec.describe 'Getting designs related to an issue' do
}
NODE
end
+
let(:issue) { design.issue }
let(:project) { issue.project }
let(:query) { make_query }
let(:design_collection) do
graphql_data_at(:project, :issue, :design_collection)
end
+
let(:design_response) do
design_collection.dig('designs', 'edges').first['node']
end
@@ -185,6 +187,7 @@ RSpec.describe 'Getting designs related to an issue' do
}
NODE
end
+
let(:design_response) do
design_collection['designs']['edges']
end
diff --git a/spec/requests/api/graphql/project/issues_spec.rb b/spec/requests/api/graphql/project/issues_spec.rb
index cdfff2f50d4..06e613a09bc 100644
--- a/spec/requests/api/graphql/project/issues_spec.rb
+++ b/spec/requests/api/graphql/project/issues_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe 'getting an issue list for a project' do
[create(:issue, project: project, discussion_locked: true),
create(:issue, project: project)]
end
+
let(:fields) do
<<~QUERY
edges {
diff --git a/spec/requests/api/graphql/project/jira_import_spec.rb b/spec/requests/api/graphql/project/jira_import_spec.rb
index 814965262b6..1cc30b95162 100644
--- a/spec/requests/api/graphql/project/jira_import_spec.rb
+++ b/spec/requests/api/graphql/project/jira_import_spec.rb
@@ -53,6 +53,7 @@ RSpec.describe 'query Jira import data' do
}
)
end
+
let(:jira_imports) { graphql_data.dig('project', 'jiraImports', 'nodes')}
let(:jira_import_status) { graphql_data.dig('project', 'jiraImportStatus')}
@@ -106,7 +107,7 @@ RSpec.describe 'query Jira import data' do
let(:query) do
%(
query {
- project(fullPath:"#{project.full_path}") {
+ project(fullPath: "#{project.full_path}") {
jiraImports(first: 1) {
nodes {
jiraProjectKey
@@ -132,7 +133,7 @@ RSpec.describe 'query Jira import data' do
let(:query) do
%(
query {
- project(fullPath:"#{project.full_path}") {
+ project(fullPath: "#{project.full_path}") {
jiraImports(last: 1) {
nodes {
jiraProjectKey
diff --git a/spec/requests/api/graphql/project/jira_projects_spec.rb b/spec/requests/api/graphql/project/jira_projects_spec.rb
index d5f59711ab1..410d5b21505 100644
--- a/spec/requests/api/graphql/project/jira_projects_spec.rb
+++ b/spec/requests/api/graphql/project/jira_projects_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'query Jira projects' do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
- include_context 'jira projects request context'
+ include_context 'Jira projects request context'
let(:services) { graphql_data_at(:project, :services, :edges) }
let(:jira_projects) { services.first.dig('node', 'projects', 'nodes') }
diff --git a/spec/requests/api/graphql/project/merge_requests_spec.rb b/spec/requests/api/graphql/project/merge_requests_spec.rb
index e2255fdb048..bb63a5994b0 100644
--- a/spec/requests/api/graphql/project/merge_requests_spec.rb
+++ b/spec/requests/api/graphql/project/merge_requests_spec.rb
@@ -171,4 +171,43 @@ RSpec.describe 'getting merge request listings nested in a project' do
it_behaves_like 'searching with parameters'
end
+
+ describe 'fields' do
+ let(:requested_fields) { nil }
+ let(:extra_iid_for_second_query) { merge_request_c.iid.to_s }
+ let(:search_params) { { iids: [merge_request_a.iid.to_s, merge_request_b.iid.to_s] } }
+
+ def execute_query
+ query = query_merge_requests(requested_fields)
+ post_graphql(query, current_user: current_user)
+ end
+
+ context 'when requesting `commit_count`' do
+ let(:requested_fields) { [:commit_count] }
+
+ it 'exposes `commit_count`' do
+ merge_request_a.metrics.update!(commits_count: 5)
+
+ execute_query
+
+ expect(results).to include(a_hash_including('commitCount' => 5))
+ end
+
+ include_examples 'N+1 query check'
+ end
+
+ context 'when requesting `merged_at`' do
+ let(:requested_fields) { [:merged_at] }
+
+ before do
+ # make the MRs "merged"
+ [merge_request_a, merge_request_b, merge_request_c].each do |mr|
+ mr.update_column(:state_id, MergeRequest.available_states[:merged])
+ mr.metrics.update_column(:merged_at, Time.now)
+ end
+ end
+
+ include_examples 'N+1 query check'
+ end
+ end
end
diff --git a/spec/requests/api/graphql/project/packages_spec.rb b/spec/requests/api/graphql/project/packages_spec.rb
index 88f97f9256b..5df98ed1e6b 100644
--- a/spec/requests/api/graphql/project/packages_spec.rb
+++ b/spec/requests/api/graphql/project/packages_spec.rb
@@ -28,42 +28,40 @@ RSpec.describe 'getting a package list for a project' do
)
end
- context 'without the need for a license' do
- context 'when user has access to the project' do
- before do
- project.add_reporter(current_user)
- post_graphql(query, current_user: current_user)
- end
+ context 'when user has access to the project' do
+ before do
+ project.add_reporter(current_user)
+ post_graphql(query, current_user: current_user)
+ end
- it_behaves_like 'a working graphql query'
+ it_behaves_like 'a working graphql query'
- it 'returns packages successfully' do
- expect(packages_data[0]['node']['name']).to eq package.name
- end
+ it 'returns packages successfully' do
+ expect(packages_data[0]['node']['name']).to eq package.name
end
+ end
- context 'when the user does not have access to the project/packages' do
- before do
- post_graphql(query, current_user: current_user)
- end
+ context 'when the user does not have access to the project/packages' do
+ before do
+ post_graphql(query, current_user: current_user)
+ end
- it_behaves_like 'a working graphql query'
+ it_behaves_like 'a working graphql query'
- it 'returns nil' do
- expect(graphql_data['project']).to be_nil
- end
+ it 'returns nil' do
+ expect(graphql_data['project']).to be_nil
end
+ end
- context 'when the user is not autenthicated' do
- before do
- post_graphql(query)
- end
+ context 'when the user is not autenthicated' do
+ before do
+ post_graphql(query)
+ end
- it_behaves_like 'a working graphql query'
+ it_behaves_like 'a working graphql query'
- it 'returns nil' do
- expect(graphql_data['project']).to be_nil
- end
+ it 'returns nil' do
+ expect(graphql_data['project']).to be_nil
end
end
end
diff --git a/spec/requests/api/graphql/project/pipeline_spec.rb b/spec/requests/api/graphql/project/pipeline_spec.rb
index 57b9de25c3d..fef0e7e160c 100644
--- a/spec/requests/api/graphql/project/pipeline_spec.rb
+++ b/spec/requests/api/graphql/project/pipeline_spec.rb
@@ -29,4 +29,10 @@ RSpec.describe 'getting pipeline information nested in a project' do
expect(pipeline_graphql_data).not_to be_nil
end
+
+ it 'contains configSource' do
+ post_graphql(query, current_user: current_user)
+
+ expect(pipeline_graphql_data.dig('configSource')).to eq('UNKNOWN_SOURCE')
+ end
end
diff --git a/spec/requests/api/graphql/project/repository_spec.rb b/spec/requests/api/graphql/project/repository_spec.rb
index bd719a69647..a4984688557 100644
--- a/spec/requests/api/graphql/project/repository_spec.rb
+++ b/spec/requests/api/graphql/project/repository_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe 'getting a repository in a project' do
#{all_graphql_fields_for('repository'.classify)}
QUERY
end
+
let(:query) do
graphql_query_for(
'project',
diff --git a/spec/requests/api/graphql/project/tree/tree_spec.rb b/spec/requests/api/graphql/project/tree/tree_spec.rb
index bce63d57c38..f4cd316da96 100644
--- a/spec/requests/api/graphql/project/tree/tree_spec.rb
+++ b/spec/requests/api/graphql/project/tree/tree_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe 'getting a tree in a project' do
}
QUERY
end
+
let(:query) do
graphql_query_for(
'project',
diff --git a/spec/requests/api/graphql/project_query_spec.rb b/spec/requests/api/graphql/project_query_spec.rb
index b115030afbc..c6049e098be 100644
--- a/spec/requests/api/graphql/project_query_spec.rb
+++ b/spec/requests/api/graphql/project_query_spec.rb
@@ -76,16 +76,16 @@ RSpec.describe 'getting project information' do
def run_query(number)
q = <<~GQL
- query {
- project(fullPath: "#{project.full_path}") {
- mergeRequests(first: #{number}) {
- nodes {
- assignees { nodes { username } }
- headPipeline { status }
+ query {
+ project(fullPath: "#{project.full_path}") {
+ mergeRequests(first: #{number}) {
+ nodes {
+ assignees { nodes { username } }
+ headPipeline { status }
+ }
}
}
}
- }
GQL
post_graphql(q, current_user: current_user)
diff --git a/spec/requests/api/graphql/user_query_spec.rb b/spec/requests/api/graphql/user_query_spec.rb
index 7ba1788a9ef..2f4dc0a9160 100644
--- a/spec/requests/api/graphql/user_query_spec.rb
+++ b/spec/requests/api/graphql/user_query_spec.rb
@@ -75,7 +75,9 @@ RSpec.describe 'getting user information' do
'name' => presenter.name,
'username' => presenter.username,
'webUrl' => presenter.web_url,
- 'avatarUrl' => presenter.avatar_url
+ 'avatarUrl' => presenter.avatar_url,
+ 'status' => presenter.status,
+ 'email' => presenter.email
))
end
@@ -83,6 +85,7 @@ RSpec.describe 'getting user information' do
let(:user_fields) do
query_graphql_field(:assigned_merge_requests, mr_args, 'nodes { id }')
end
+
let(:mr_args) { nil }
it_behaves_like 'a working graphql query'
@@ -145,6 +148,7 @@ RSpec.describe 'getting user information' do
let(:user_fields) do
query_graphql_field(:authored_merge_requests, mr_args, 'nodes { id }')
end
+
let(:mr_args) { nil }
it_behaves_like 'a working graphql query'
diff --git a/spec/requests/api/group_import_spec.rb b/spec/requests/api/group_import_spec.rb
index ad67f737725..cb63206fcb8 100644
--- a/spec/requests/api/group_import_spec.rb
+++ b/spec/requests/api/group_import_spec.rb
@@ -217,12 +217,14 @@ RSpec.describe API::GroupImport do
let!(:fog_connection) do
stub_uploads_object_storage(ImportExportUploader, direct_upload: true)
end
+
let(:tmp_object) do
fog_connection.directories.new(key: 'uploads').files.create(
key: "tmp/uploads/#{file_name}",
body: file_upload
)
end
+
let(:fog_file) { fog_to_uploaded_file(tmp_object) }
let(:params) do
{
diff --git a/spec/requests/api/group_milestones_spec.rb b/spec/requests/api/group_milestones_spec.rb
index 2b361f2b503..7ed6e1a295f 100644
--- a/spec/requests/api/group_milestones_spec.rb
+++ b/spec/requests/api/group_milestones_spec.rb
@@ -3,15 +3,65 @@
require 'spec_helper'
RSpec.describe API::GroupMilestones do
- let(:user) { create(:user) }
- let(:group) { create(:group, :private) }
- let(:project) { create(:project, namespace: group) }
- let!(:group_member) { create(:group_member, group: group, user: user) }
- let!(:closed_milestone) { create(:closed_milestone, group: group, title: 'version1', description: 'closed milestone') }
- let!(:milestone) { create(:milestone, group: group, title: 'version2', description: 'open milestone') }
-
- it_behaves_like 'group and project milestones', "/groups/:id/milestones" do
- let(:route) { "/groups/#{group.id}/milestones" }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:project) { create(:project, namespace: group) }
+ let_it_be(:group_member) { create(:group_member, group: group, user: user) }
+ let_it_be(:closed_milestone) { create(:closed_milestone, group: group, title: 'version1', description: 'closed milestone') }
+ let_it_be(:milestone) { create(:milestone, group: group, title: 'version2', description: 'open milestone') }
+ let(:route) { "/groups/#{group.id}/milestones" }
+
+ it_behaves_like 'group and project milestones', "/groups/:id/milestones"
+
+ describe 'GET /groups/:id/milestones' do
+ context 'when include_parent_milestones is true' do
+ let_it_be(:ancestor_group) { create(:group, :private) }
+ let_it_be(:ancestor_group_milestone) { create(:milestone, group: ancestor_group) }
+ let_it_be(:params) { { include_parent_milestones: true } }
+
+ before_all do
+ group.update(parent: ancestor_group)
+ end
+
+ shared_examples 'listing all milestones' do
+ it 'returns correct list of milestones' do
+ get api(route, user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.size).to eq(milestones.size)
+ expect(json_response.map { |entry| entry["id"] }).to eq(milestones.map(&:id))
+ end
+ end
+
+ context 'when user has access to ancestor groups' do
+ let(:milestones) { [ancestor_group_milestone, milestone, closed_milestone] }
+
+ before do
+ ancestor_group.add_guest(user)
+ group.add_guest(user)
+ end
+
+ it_behaves_like 'listing all milestones'
+
+ context 'when iids param is present' do
+ let_it_be(:params) { { include_parent_milestones: true, iids: [milestone.iid] } }
+
+ it_behaves_like 'listing all milestones'
+ end
+ end
+
+ context 'when user has no access to ancestor groups' do
+ let(:user) { create(:user) }
+
+ before do
+ group.add_guest(user)
+ end
+
+ it_behaves_like 'listing all milestones' do
+ let(:milestones) { [milestone, closed_milestone] }
+ end
+ end
+ end
end
def setup_for_group
diff --git a/spec/requests/api/group_packages_spec.rb b/spec/requests/api/group_packages_spec.rb
index 7c7e8da3fb1..e02f6099637 100644
--- a/spec/requests/api/group_packages_spec.rb
+++ b/spec/requests/api/group_packages_spec.rb
@@ -13,135 +13,133 @@ RSpec.describe API::GroupPackages do
let(:url) { "/groups/#{group.id}/packages" }
let(:package_schema) { 'public_api/v4/packages/group_packages' }
- context 'without the need for a license' do
- context 'with sorting' do
- let_it_be(:package1) { create(:npm_package, project: project, version: '3.1.0', name: "@#{project.root_namespace.path}/foo1") }
- let_it_be(:package2) { create(:nuget_package, project: project, version: '2.0.4') }
- let(:package3) { create(:maven_package, project: project, version: '1.1.1', name: 'zzz') }
-
- before do
- travel_to(1.day.ago) do
- package3
- end
+ context 'with sorting' do
+ let_it_be(:package1) { create(:npm_package, project: project, version: '3.1.0', name: "@#{project.root_namespace.path}/foo1") }
+ let_it_be(:package2) { create(:nuget_package, project: project, version: '2.0.4') }
+ let(:package3) { create(:maven_package, project: project, version: '1.1.1', name: 'zzz') }
+
+ before do
+ travel_to(1.day.ago) do
+ package3
end
+ end
- context 'without sorting params' do
- let(:packages) { [package3, package1, package2] }
+ context 'without sorting params' do
+ let(:packages) { [package3, package1, package2] }
- it 'sorts by created_at asc' do
- subject
+ it 'sorts by created_at asc' do
+ subject
- expect(json_response.map { |package| package['id'] }).to eq(packages.map(&:id))
- end
+ expect(json_response.map { |package| package['id'] }).to eq(packages.map(&:id))
end
+ end
- it_behaves_like 'package sorting', 'name' do
- let(:packages) { [package1, package2, package3] }
- end
+ it_behaves_like 'package sorting', 'name' do
+ let(:packages) { [package1, package2, package3] }
+ end
- it_behaves_like 'package sorting', 'created_at' do
- let(:packages) { [package3, package1, package2] }
- end
+ it_behaves_like 'package sorting', 'created_at' do
+ let(:packages) { [package3, package1, package2] }
+ end
- it_behaves_like 'package sorting', 'version' do
- let(:packages) { [package3, package2, package1] }
- end
+ it_behaves_like 'package sorting', 'version' do
+ let(:packages) { [package3, package2, package1] }
+ end
- it_behaves_like 'package sorting', 'type' do
- let(:packages) { [package3, package1, package2] }
- end
+ it_behaves_like 'package sorting', 'type' do
+ let(:packages) { [package3, package1, package2] }
+ end
- it_behaves_like 'package sorting', 'project_path' do
- let(:another_project) { create(:project, :public, namespace: group, name: 'project B') }
- let!(:package4) { create(:npm_package, project: another_project, version: '3.1.0', name: "@#{project.root_namespace.path}/bar") }
+ it_behaves_like 'package sorting', 'project_path' do
+ let(:another_project) { create(:project, :public, namespace: group, name: 'project B') }
+ let!(:package4) { create(:npm_package, project: another_project, version: '3.1.0', name: "@#{project.root_namespace.path}/bar") }
- let(:packages) { [package1, package2, package3, package4] }
- end
+ let(:packages) { [package1, package2, package3, package4] }
end
+ end
- context 'with private group' do
- let!(:package1) { create(:package, project: project) }
- let!(:package2) { create(:package, project: project) }
+ context 'with private group' do
+ let!(:package1) { create(:package, project: project) }
+ let!(:package2) { create(:package, project: project) }
- let(:group) { create(:group, :private) }
- let(:subgroup) { create(:group, :private, parent: group) }
- let(:project) { create(:project, :private, namespace: group) }
- let(:subproject) { create(:project, :private, namespace: subgroup) }
+ let(:group) { create(:group, :private) }
+ let(:subgroup) { create(:group, :private, parent: group) }
+ let(:project) { create(:project, :private, namespace: group) }
+ let(:subproject) { create(:project, :private, namespace: subgroup) }
- context 'with unauthenticated user' do
- it_behaves_like 'rejects packages access', :group, :no_type, :not_found
- end
+ context 'with unauthenticated user' do
+ it_behaves_like 'rejects packages access', :group, :no_type, :not_found
+ end
+
+ context 'with authenticated user' do
+ subject { get api(url, user) }
+
+ it_behaves_like 'returns packages', :group, :owner
+ it_behaves_like 'returns packages', :group, :maintainer
+ it_behaves_like 'returns packages', :group, :developer
+ it_behaves_like 'rejects packages access', :group, :reporter, :forbidden
+ it_behaves_like 'rejects packages access', :group, :guest, :forbidden
- context 'with authenticated user' do
- subject { get api(url, user) }
+ context 'with subgroup' do
+ let(:subgroup) { create(:group, :private, parent: group) }
+ let(:subproject) { create(:project, :private, namespace: subgroup) }
+ let!(:package3) { create(:npm_package, project: subproject) }
- it_behaves_like 'returns packages', :group, :owner
- it_behaves_like 'returns packages', :group, :maintainer
- it_behaves_like 'returns packages', :group, :developer
+ it_behaves_like 'returns packages with subgroups', :group, :owner
+ it_behaves_like 'returns packages with subgroups', :group, :maintainer
+ it_behaves_like 'returns packages with subgroups', :group, :developer
it_behaves_like 'rejects packages access', :group, :reporter, :forbidden
it_behaves_like 'rejects packages access', :group, :guest, :forbidden
- context 'with subgroup' do
- let(:subgroup) { create(:group, :private, parent: group) }
- let(:subproject) { create(:project, :private, namespace: subgroup) }
- let!(:package3) { create(:npm_package, project: subproject) }
+ context 'excluding subgroup' do
+ let(:url) { "/groups/#{group.id}/packages?exclude_subgroups=true" }
- it_behaves_like 'returns packages with subgroups', :group, :owner
- it_behaves_like 'returns packages with subgroups', :group, :maintainer
- it_behaves_like 'returns packages with subgroups', :group, :developer
+ it_behaves_like 'returns packages', :group, :owner
+ it_behaves_like 'returns packages', :group, :maintainer
+ it_behaves_like 'returns packages', :group, :developer
it_behaves_like 'rejects packages access', :group, :reporter, :forbidden
it_behaves_like 'rejects packages access', :group, :guest, :forbidden
-
- context 'excluding subgroup' do
- let(:url) { "/groups/#{group.id}/packages?exclude_subgroups=true" }
-
- it_behaves_like 'returns packages', :group, :owner
- it_behaves_like 'returns packages', :group, :maintainer
- it_behaves_like 'returns packages', :group, :developer
- it_behaves_like 'rejects packages access', :group, :reporter, :forbidden
- it_behaves_like 'rejects packages access', :group, :guest, :forbidden
- end
end
end
end
+ end
- context 'with public group' do
- let_it_be(:package1) { create(:package, project: project) }
- let_it_be(:package2) { create(:package, project: project) }
+ context 'with public group' do
+ let_it_be(:package1) { create(:package, project: project) }
+ let_it_be(:package2) { create(:package, project: project) }
- context 'with unauthenticated user' do
- it_behaves_like 'returns packages', :group, :no_type
- end
+ context 'with unauthenticated user' do
+ it_behaves_like 'returns packages', :group, :no_type
+ end
- context 'with authenticated user' do
- subject { get api(url, user) }
+ context 'with authenticated user' do
+ subject { get api(url, user) }
- it_behaves_like 'returns packages', :group, :owner
- it_behaves_like 'returns packages', :group, :maintainer
- it_behaves_like 'returns packages', :group, :developer
- it_behaves_like 'returns packages', :group, :reporter
- it_behaves_like 'returns packages', :group, :guest
- end
+ it_behaves_like 'returns packages', :group, :owner
+ it_behaves_like 'returns packages', :group, :maintainer
+ it_behaves_like 'returns packages', :group, :developer
+ it_behaves_like 'returns packages', :group, :reporter
+ it_behaves_like 'returns packages', :group, :guest
end
+ end
- context 'with pagination params' do
- let_it_be(:package1) { create(:package, project: project) }
- let_it_be(:package2) { create(:package, project: project) }
- let_it_be(:package3) { create(:npm_package, project: project) }
- let_it_be(:package4) { create(:npm_package, project: project) }
+ context 'with pagination params' do
+ let_it_be(:package1) { create(:package, project: project) }
+ let_it_be(:package2) { create(:package, project: project) }
+ let_it_be(:package3) { create(:npm_package, project: project) }
+ let_it_be(:package4) { create(:npm_package, project: project) }
- it_behaves_like 'returns paginated packages'
- end
+ it_behaves_like 'returns paginated packages'
+ end
- it_behaves_like 'filters on each package_type', is_project: false
+ it_behaves_like 'filters on each package_type', is_project: false
- context 'does not accept non supported package_type value' do
- include_context 'package filter context'
+ context 'does not accept non supported package_type value' do
+ include_context 'package filter context'
- let(:url) { group_filter_url(:type, 'foo') }
+ let(:url) { group_filter_url(:type, 'foo') }
- it_behaves_like 'returning response status', :bad_request
- end
+ it_behaves_like 'returning response status', :bad_request
end
end
end
diff --git a/spec/requests/api/group_variables_spec.rb b/spec/requests/api/group_variables_spec.rb
index c6d6ae1615b..41b013f49ee 100644
--- a/spec/requests/api/group_variables_spec.rb
+++ b/spec/requests/api/group_variables_spec.rb
@@ -169,6 +169,14 @@ RSpec.describe API::GroupVariables do
expect(response).to have_gitlab_http_status(:not_found)
end
+
+ it 'responds with 400 if the update fails' do
+ put api("/groups/#{group.id}/variables/#{variable.key}", user), params: { value: 'shrt', masked: true }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(variable.reload.masked).to eq(false)
+ expect(json_response['message']).to eq('value' => ['is invalid'])
+ end
end
context 'authorized user with invalid permissions' do
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index fac9f4dfe00..da423e986c3 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -860,6 +860,66 @@ RSpec.describe API::Groups do
end
end
+ context 'with similarity ordering' do
+ let_it_be(:group_with_projects) { create(:group) }
+ let_it_be(:project_1) { create(:project, name: 'Project', path: 'project', group: group_with_projects) }
+ let_it_be(:project_2) { create(:project, name: 'Test Project', path: 'test-project', group: group_with_projects) }
+ let_it_be(:project_3) { create(:project, name: 'Test', path: 'test', group: group_with_projects) }
+
+ let(:params) { { order_by: 'similarity', search: 'test' } }
+
+ subject { get api("/groups/#{group_with_projects.id}/projects", user1), params: params }
+
+ before do
+ group_with_projects.add_owner(user1)
+ end
+
+ it 'returns items based ordered by similarity' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response.length).to eq(2)
+
+ project_names = json_response.map { |proj| proj['name'] }
+ expect(project_names).to eq(['Test', 'Test Project'])
+ end
+
+ context 'when `search` parameter is not given' do
+ before do
+ params.delete(:search)
+ end
+
+ it 'returns items ordered by name' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response.length).to eq(3)
+
+ project_names = json_response.map { |proj| proj['name'] }
+ expect(project_names).to eq(['Project', 'Test', 'Test Project'])
+ end
+ end
+
+ context 'when `similarity_search` feature flag is off' do
+ before do
+ stub_feature_flags(similarity_search: false)
+ end
+
+ it 'returns items ordered by name' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response.length).to eq(2)
+
+ project_names = json_response.map { |proj| proj['name'] }
+ expect(project_names).to eq(['Test', 'Test Project'])
+ end
+ end
+ end
+
it "returns the group's projects with simple representation" do
get api("/groups/#{group1.id}/projects", user1), params: { simple: true }
@@ -1012,6 +1072,7 @@ RSpec.describe API::Groups do
let!(:project4) do
create(:project, namespace: group2, path: 'test_project', visibility_level: Gitlab::VisibilityLevel::PRIVATE)
end
+
let(:path) { "/groups/#{group1.id}/projects/shared" }
before do
diff --git a/spec/requests/api/helpers_spec.rb b/spec/requests/api/helpers_spec.rb
index 12cd5ace84e..fefa7105327 100644
--- a/spec/requests/api/helpers_spec.rb
+++ b/spec/requests/api/helpers_spec.rb
@@ -24,6 +24,7 @@ RSpec.describe API::Helpers do
'CONTENT_TYPE' => 'text/plain;charset=utf-8'
}
end
+
let(:header) { }
let(:request) { Grape::Request.new(env)}
let(:params) { request.params }
diff --git a/spec/requests/api/import_bitbucket_server_spec.rb b/spec/requests/api/import_bitbucket_server_spec.rb
index 5828dab3080..dac139064da 100644
--- a/spec/requests/api/import_bitbucket_server_spec.rb
+++ b/spec/requests/api/import_bitbucket_server_spec.rb
@@ -96,7 +96,7 @@ RSpec.describe API::ImportBitbucketServer do
Grape::Endpoint.before_each nil
end
- it 'returns 400 response due to a blcoked URL' do
+ it 'returns 400 response due to a blocked URL' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
.to receive(:new).with(project_key, repo_slug, anything, project.name, user.namespace, user, anything)
.and_return(double(execute: project))
diff --git a/spec/requests/api/import_github_spec.rb b/spec/requests/api/import_github_spec.rb
index f026314f7a8..bbfb17fe753 100644
--- a/spec/requests/api/import_github_spec.rb
+++ b/spec/requests/api/import_github_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe API::ImportGithub do
before do
Grape::Endpoint.before_each do |endpoint|
- allow(endpoint).to receive(:client).and_return(double('client', user: provider_user, repo: provider_repo).as_null_object)
+ allow(endpoint).to receive(:client).and_return(double('client', user: provider_user, repository: provider_repo).as_null_object)
end
end
diff --git a/spec/requests/api/internal/base_spec.rb b/spec/requests/api/internal/base_spec.rb
index 7d219954e9d..873189af397 100644
--- a/spec/requests/api/internal/base_spec.rb
+++ b/spec/requests/api/internal/base_spec.rb
@@ -120,6 +120,138 @@ RSpec.describe API::Internal::Base do
end
end
+ describe 'POST /internal/personal_access_token' do
+ it 'returns an error message when the key does not exist' do
+ post api('/internal/personal_access_token'),
+ params: {
+ secret_token: secret_token,
+ key_id: non_existing_record_id
+ }
+
+ expect(json_response['success']).to be_falsey
+ expect(json_response['message']).to eq('Could not find the given key')
+ end
+
+ it 'returns an error message when the key is a deploy key' do
+ deploy_key = create(:deploy_key)
+
+ post api('/internal/personal_access_token'),
+ params: {
+ secret_token: secret_token,
+ key_id: deploy_key.id
+ }
+
+ expect(json_response['success']).to be_falsey
+ expect(json_response['message']).to eq('Deploy keys cannot be used to create personal access tokens')
+ end
+
+ it 'returns an error message when the user does not exist' do
+ key_without_user = create(:key, user: nil)
+
+ post api('/internal/personal_access_token'),
+ params: {
+ secret_token: secret_token,
+ key_id: key_without_user.id
+ }
+
+ expect(json_response['success']).to be_falsey
+ expect(json_response['message']).to eq('Could not find a user for the given key')
+ expect(json_response['token']).to be_nil
+ end
+
+ it 'returns an error message when given an non existent user' do
+ post api('/internal/personal_access_token'),
+ params: {
+ secret_token: secret_token,
+ user_id: 0
+ }
+
+ expect(json_response['success']).to be_falsey
+ expect(json_response['message']).to eq("Could not find the given user")
+ end
+
+ it 'returns an error message when no name parameter is received' do
+ post api('/internal/personal_access_token'),
+ params: {
+ secret_token: secret_token,
+ key_id: key.id
+ }
+
+ expect(json_response['success']).to be_falsey
+ expect(json_response['message']).to eq("No token name specified")
+ end
+
+ it 'returns an error message when no scopes parameter is received' do
+ post api('/internal/personal_access_token'),
+ params: {
+ secret_token: secret_token,
+ key_id: key.id,
+ name: 'newtoken'
+ }
+
+ expect(json_response['success']).to be_falsey
+ expect(json_response['message']).to eq("No token scopes specified")
+ end
+
+ it 'returns an error message when expires_at contains an invalid date' do
+ post api('/internal/personal_access_token'),
+ params: {
+ secret_token: secret_token,
+ key_id: key.id,
+ name: 'newtoken',
+ scopes: ['api'],
+ expires_at: 'invalid-date'
+ }
+
+ expect(json_response['success']).to be_falsey
+ expect(json_response['message']).to eq("Invalid token expiry date: 'invalid-date'")
+ end
+
+ it 'returns an error message when it receives an invalid scope' do
+ post api('/internal/personal_access_token'),
+ params: {
+ secret_token: secret_token,
+ key_id: key.id,
+ name: 'newtoken',
+ scopes: %w(read_api badscope read_repository)
+ }
+
+ expect(json_response['success']).to be_falsey
+ expect(json_response['message']).to match(/\AInvalid scope: 'badscope'. Valid scopes are: /)
+ end
+
+ it 'returns a token without expiry when the expires_at parameter is missing' do
+ post api('/internal/personal_access_token'),
+ params: {
+ secret_token: secret_token,
+ key_id: key.id,
+ name: 'newtoken',
+ scopes: %w(read_api read_repository)
+ }
+
+ expect(json_response['success']).to be_truthy
+ expect(json_response['token']).to match(/\A\S{20}\z/)
+ expect(json_response['scopes']).to match_array(%w(read_api read_repository))
+ expect(json_response['expires_at']).to be_nil
+ end
+
+ it 'returns a token with expiry when it receives a valid expires_at parameter' do
+ post api('/internal/personal_access_token'),
+ params: {
+ secret_token: secret_token,
+ key_id: key.id,
+ name: 'newtoken',
+ scopes: %w(read_api read_repository),
+ expires_at: '9001-11-17'
+ }
+
+ expect(json_response['success']).to be_truthy
+ expect(json_response['token']).to match(/\A\S{20}\z/)
+ expect(json_response['scopes']).to match_array(%w(read_api read_repository))
+ expect(json_response['expires_at']).to eq('9001-11-17')
+ end
+ end
+
describe "POST /internal/lfs_authenticate" do
before do
project.add_developer(user)
@@ -321,6 +453,8 @@ RSpec.describe API::Internal::Base do
expect(json_response["status"]).to be_truthy
expect(json_response["gl_project_path"]).to eq(project.wiki.full_path)
expect(json_response["gl_repository"]).to eq("wiki-#{project.id}")
+ expect(json_response["gl_key_type"]).to eq("key")
+ expect(json_response["gl_key_id"]).to eq(key.id)
expect(user.reload.last_activity_on).to be_nil
end
@@ -444,6 +578,8 @@ RSpec.describe API::Internal::Base do
expect(json_response["status"]).to be_truthy
expect(json_response["gl_repository"]).to eq("project-#{project.id}")
expect(json_response["gl_project_path"]).to eq(project.full_path)
+ expect(json_response["gl_key_type"]).to eq("key")
+ expect(json_response["gl_key_id"]).to eq(key.id)
expect(json_response["gitaly"]).not_to be_nil
expect(json_response["gitaly"]["repository"]).not_to be_nil
expect(json_response["gitaly"]["repository"]["storage_name"]).to eq(project.repository.gitaly_repository.storage_name)
@@ -547,6 +683,7 @@ RSpec.describe API::Internal::Base do
}
}
end
+
let(:console_messages) { ['informational message'] }
let(:custom_action_result) { Gitlab::GitAccessResult::CustomAction.new(payload, console_messages) }
@@ -706,6 +843,8 @@ RSpec.describe API::Internal::Base do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response["status"]).to be_truthy
expect(json_response["gitaly"]).not_to be_nil
+ expect(json_response["gl_key_type"]).to eq("deploy_key")
+ expect(json_response["gl_key_id"]).to eq(key.id)
expect(json_response["gitaly"]["repository"]).not_to be_nil
expect(json_response["gitaly"]["repository"]["storage_name"]).to eq(project.repository.gitaly_repository.storage_name)
expect(json_response["gitaly"]["repository"]["relative_path"]).to eq(project.repository.gitaly_repository.relative_path)
diff --git a/spec/requests/api/internal/kubernetes_spec.rb b/spec/requests/api/internal/kubernetes_spec.rb
new file mode 100644
index 00000000000..555ca441fe7
--- /dev/null
+++ b/spec/requests/api/internal/kubernetes_spec.rb
@@ -0,0 +1,154 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Internal::Kubernetes do
+ describe "GET /internal/kubernetes/agent_info" do
+ context 'kubernetes_agent_internal_api feature flag disabled' do
+ before do
+ stub_feature_flags(kubernetes_agent_internal_api: false)
+ end
+
+ it 'returns 404' do
+ get api('/internal/kubernetes/agent_info')
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ it 'returns 403 if Authorization header not sent' do
+ get api('/internal/kubernetes/agent_info')
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ context 'an agent is found' do
+ let!(:agent_token) { create(:cluster_agent_token) }
+
+ let(:agent) { agent_token.agent }
+ let(:project) { agent.project }
+
+ it 'returns expected data', :aggregate_failures do
+ get api('/internal/kubernetes/agent_info'), headers: { 'Authorization' => "Bearer #{agent_token.token}" }
+
+ expect(response).to have_gitlab_http_status(:success)
+
+ expect(json_response).to match(
+ a_hash_including(
+ 'project_id' => project.id,
+ 'agent_id' => agent.id,
+ 'agent_name' => agent.name,
+ 'gitaly_info' => a_hash_including(
+ 'address' => match(/\.socket$/),
+ 'token' => 'secret',
+ 'features' => {}
+ ),
+ 'gitaly_repository' => a_hash_including(
+ 'storage_name' => project.repository_storage,
+ 'relative_path' => project.disk_path + '.git',
+ 'gl_repository' => "project-#{project.id}",
+ 'gl_project_path' => project.full_path
+ )
+ )
+ )
+ end
+ end
+
+ context 'no such agent exists' do
+ it 'returns 404' do
+ get api('/internal/kubernetes/agent_info'), headers: { 'Authorization' => 'Bearer ABCD' }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
+ describe 'GET /internal/kubernetes/project_info' do
+ context 'kubernetes_agent_internal_api feature flag disabled' do
+ before do
+ stub_feature_flags(kubernetes_agent_internal_api: false)
+ end
+
+ it 'returns 404' do
+ get api('/internal/kubernetes/project_info')
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ it 'returns 403 if Authorization header not sent' do
+ get api('/internal/kubernetes/project_info')
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ context 'no such agent exists' do
+ it 'returns 404' do
+ get api('/internal/kubernetes/project_info'), headers: { 'Authorization' => 'Bearer ABCD' }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'an agent is found' do
+ let!(:agent_token) { create(:cluster_agent_token) }
+
+ let(:agent) { agent_token.agent }
+
+ context 'project is public' do
+ let(:project) { create(:project, :public) }
+
+ it 'returns expected data', :aggregate_failures do
+ get api('/internal/kubernetes/project_info'), params: { id: project.id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" }
+
+ expect(response).to have_gitlab_http_status(:success)
+
+ expect(json_response).to match(
+ a_hash_including(
+ 'project_id' => project.id,
+ 'gitaly_info' => a_hash_including(
+ 'address' => match(/\.socket$/),
+ 'token' => 'secret',
+ 'features' => {}
+ ),
+ 'gitaly_repository' => a_hash_including(
+ 'storage_name' => project.repository_storage,
+ 'relative_path' => project.disk_path + '.git',
+ 'gl_repository' => "project-#{project.id}",
+ 'gl_project_path' => project.full_path
+ )
+ )
+ )
+ end
+ end
+
+ context 'project is private' do
+ let(:project) { create(:project, :private) }
+
+ it 'returns 404' do
+ get api('/internal/kubernetes/project_info'), params: { id: project.id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'project is internal' do
+ let(:project) { create(:project, :internal) }
+
+ it 'returns 404' do
+ get api('/internal/kubernetes/project_info'), params: { id: project.id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'project does not exist' do
+ it 'returns 404' do
+ get api('/internal/kubernetes/project_info'), params: { id: 0 }, headers: { 'Authorization' => "Bearer #{agent_token.token}" }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/internal/pages_spec.rb b/spec/requests/api/internal/pages_spec.rb
index 48fc95b6574..e58eba02132 100644
--- a/spec/requests/api/internal/pages_spec.rb
+++ b/spec/requests/api/internal/pages_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe API::Internal::Pages do
jwt_token = JWT.encode({ 'iss' => 'gitlab-pages' }, Gitlab::Pages.secret, 'HS256')
{ Gitlab::Pages::INTERNAL_API_REQUEST_HEADER => jwt_token }
end
+
let(:pages_secret) { SecureRandom.random_bytes(Gitlab::Pages::SECRET_LENGTH) }
before do
diff --git a/spec/requests/api/issues/get_group_issues_spec.rb b/spec/requests/api/issues/get_group_issues_spec.rb
index b53fac3679d..b0fbf3bf66d 100644
--- a/spec/requests/api/issues/get_group_issues_spec.rb
+++ b/spec/requests/api/issues/get_group_issues_spec.rb
@@ -36,6 +36,7 @@ RSpec.describe API::Issues do
updated_at: 3.hours.ago,
created_at: 1.day.ago
end
+
let!(:group_confidential_issue) do
create :issue,
:confidential,
@@ -45,6 +46,7 @@ RSpec.describe API::Issues do
updated_at: 2.hours.ago,
created_at: 2.days.ago
end
+
let!(:group_issue) do
create :issue,
author: user,
@@ -56,14 +58,17 @@ RSpec.describe API::Issues do
description: issue_description,
created_at: 5.days.ago
end
+
let!(:group_label) do
create(:label, title: 'group_lbl', color: '#FFAABB', project: group_project)
end
+
let!(:group_label_link) { create(:label_link, label: group_label, target: group_issue) }
let!(:group_milestone) { create(:milestone, title: '3.0.0', project: group_project) }
let!(:group_empty_milestone) do
create(:milestone, title: '4.0.0', project: group_project)
end
+
let!(:group_note) { create(:note_on_issue, author: user, project: group_project, noteable: group_issue) }
let(:base_url) { "/groups/#{group.id}/issues" }
@@ -246,6 +251,7 @@ RSpec.describe API::Issues do
target_project: private_mrs_project,
description: "closes #{group_issue.to_reference(private_mrs_project)}")
end
+
let!(:merge_request2) do
create(:merge_request,
:simple,
diff --git a/spec/requests/api/issues/get_project_issues_spec.rb b/spec/requests/api/issues/get_project_issues_spec.rb
index 7ff07bf580d..4228ca2d5fd 100644
--- a/spec/requests/api/issues/get_project_issues_spec.rb
+++ b/spec/requests/api/issues/get_project_issues_spec.rb
@@ -28,6 +28,7 @@ RSpec.describe API::Issues do
updated_at: 3.hours.ago,
closed_at: 1.hour.ago
end
+
let!(:confidential_issue) do
create :issue,
:confidential,
@@ -37,6 +38,7 @@ RSpec.describe API::Issues do
created_at: generate(:past_time),
updated_at: 2.hours.ago
end
+
let!(:issue) do
create :issue,
author: user,
@@ -48,6 +50,7 @@ RSpec.describe API::Issues do
title: issue_title,
description: issue_description
end
+
let_it_be(:label) do
create(:label, title: 'label', color: '#FFAABB', project: project)
end
@@ -69,6 +72,7 @@ RSpec.describe API::Issues do
target_project: project,
description: "closes #{issue.to_reference}")
end
+
let!(:merge_request2) do
create(:merge_request,
:simple,
@@ -180,12 +184,15 @@ RSpec.describe API::Issues do
it 'avoids N+1 queries' do
get api("/projects/#{project.id}/issues", user)
- create_list(:issue, 3, project: project)
+ create_list(:issue, 3, project: project, closed_by: user)
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api("/projects/#{project.id}/issues", user)
end.count
+ milestone = create(:milestone, project: project)
+ create(:issue, project: project, milestone: milestone, closed_by: create(:user))
+
expect do
get api("/projects/#{project.id}/issues", user)
end.not_to exceed_all_query_limit(control_count)
diff --git a/spec/requests/api/issues/issues_spec.rb b/spec/requests/api/issues/issues_spec.rb
index 519bea22501..b638a65d65e 100644
--- a/spec/requests/api/issues/issues_spec.rb
+++ b/spec/requests/api/issues/issues_spec.rb
@@ -28,6 +28,7 @@ RSpec.describe API::Issues do
updated_at: 3.hours.ago,
closed_at: 1.hour.ago
end
+
let!(:confidential_issue) do
create :issue,
:confidential,
@@ -37,6 +38,7 @@ RSpec.describe API::Issues do
created_at: generate(:past_time),
updated_at: 2.hours.ago
end
+
let!(:issue) do
create :issue,
author: user,
@@ -48,6 +50,7 @@ RSpec.describe API::Issues do
title: issue_title,
description: issue_description
end
+
let_it_be(:label) do
create(:label, title: 'label', color: '#FFAABB', project: project)
end
@@ -384,6 +387,60 @@ RSpec.describe API::Issues do
end
end
+ context 'filtering by due date' do
+ # This date chosen because it is the beginning of a week + near the beginning of a month
+ let_it_be(:frozen_time) { DateTime.parse('2020-08-03 12:00') }
+
+ let_it_be(:issue2) { create(:issue, project: project, author: user, due_date: frozen_time + 3.days) }
+ let_it_be(:issue3) { create(:issue, project: project, author: user, due_date: frozen_time + 10.days) }
+ let_it_be(:issue4) { create(:issue, project: project, author: user, due_date: frozen_time + 34.days) }
+ let_it_be(:issue5) { create(:issue, project: project, author: user, due_date: frozen_time - 8.days) }
+
+ before do
+ travel_to(frozen_time)
+ end
+
+ after do
+ travel_back
+ end
+
+ it 'returns them all when argument is empty' do
+ get api('/issues?due_date=', user)
+
+ expect_paginated_array_response(issue5.id, issue4.id, issue3.id, issue2.id, issue.id, closed_issue.id)
+ end
+
+ it 'returns issues without due date' do
+ get api('/issues?due_date=0', user)
+
+ expect_paginated_array_response(issue.id, closed_issue.id)
+ end
+
+ it 'returns issues due for this week' do
+ get api('/issues?due_date=week', user)
+
+ expect_paginated_array_response(issue2.id)
+ end
+
+ it 'returns issues due for this month' do
+ get api('/issues?due_date=month', user)
+
+ expect_paginated_array_response(issue3.id, issue2.id)
+ end
+
+ it 'returns issues that are due previous two weeks and next month' do
+ get api('/issues?due_date=next_month_and_previous_two_weeks', user)
+
+ expect_paginated_array_response(issue5.id, issue4.id, issue3.id, issue2.id)
+ end
+
+ it 'returns issues that are overdue' do
+ get api('/issues?due_date=overdue', user)
+
+ expect_paginated_array_response(issue5.id)
+ end
+ end
+
context 'filter by labels or label_name param' do
context 'N+1' do
let(:label_b) { create(:label, title: 'foo', project: project) }
@@ -807,6 +864,7 @@ RSpec.describe API::Issues do
target_project: private_mrs_project,
description: "closes #{issue.to_reference(private_mrs_project)}")
end
+
let!(:merge_request2) do
create(:merge_request,
:simple,
diff --git a/spec/requests/api/issues/post_projects_issues_spec.rb b/spec/requests/api/issues/post_projects_issues_spec.rb
index e2f1bb2cd1a..a7fe4d4509a 100644
--- a/spec/requests/api/issues/post_projects_issues_spec.rb
+++ b/spec/requests/api/issues/post_projects_issues_spec.rb
@@ -27,6 +27,7 @@ RSpec.describe API::Issues do
updated_at: 3.hours.ago,
closed_at: 1.hour.ago
end
+
let!(:confidential_issue) do
create :issue,
:confidential,
@@ -36,6 +37,7 @@ RSpec.describe API::Issues do
created_at: generate(:past_time),
updated_at: 2.hours.ago
end
+
let!(:issue) do
create :issue,
author: user,
@@ -47,6 +49,7 @@ RSpec.describe API::Issues do
title: issue_title,
description: issue_description
end
+
let_it_be(:label) do
create(:label, title: 'label', color: '#FFAABB', project: project)
end
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index 53c57931d36..77d5a4f26a8 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -239,6 +239,18 @@ RSpec.describe API::Jobs do
end
end
+ context 'when config source not ci' do
+ let(:non_ci_config_source) { ::Ci::PipelineEnums.non_ci_config_source_values.first }
+ let(:pipeline) do
+ create(:ci_pipeline, config_source: non_ci_config_source, project: project)
+ end
+
+ it 'returns the specified pipeline' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response[0]['pipeline']['sha']).to eq(pipeline.sha.to_s)
+ end
+ end
+
it 'avoids N+1 queries' do
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), params: query
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 68f1a0f1ba1..d4c05b4b198 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -1551,25 +1551,33 @@ RSpec.describe API::MergeRequests do
it "returns 422 when source_branch equals target_branch" do
post api("/projects/#{project.id}/merge_requests", user),
params: { title: "Test merge_request", source_branch: "master", target_branch: "master", author: user }
+
expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(json_response['message']).to eq(["You can't use same project/branch for source and target"])
end
it "returns 400 when source_branch is missing" do
post api("/projects/#{project.id}/merge_requests", user),
params: { title: "Test merge_request", target_branch: "master", author: user }
+
expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('source_branch is missing')
end
it "returns 400 when target_branch is missing" do
post api("/projects/#{project.id}/merge_requests", user),
params: { title: "Test merge_request", source_branch: "markdown", author: user }
+
expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('target_branch is missing')
end
it "returns 400 when title is missing" do
post api("/projects/#{project.id}/merge_requests", user),
params: { target_branch: 'master', source_branch: 'markdown' }
+
expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('title is missing')
end
context 'with existing MR' do
@@ -1594,7 +1602,9 @@ RSpec.describe API::MergeRequests do
author: user
}
end.to change { MergeRequest.count }.by(0)
+
expect(response).to have_gitlab_http_status(:conflict)
+ expect(json_response['message']).to eq(["Another open merge request already exists for this source branch: !5"])
end
end
diff --git a/spec/requests/api/notes_spec.rb b/spec/requests/api/notes_spec.rb
index 1510d31a1a6..ca4ebd3689f 100644
--- a/spec/requests/api/notes_spec.rb
+++ b/spec/requests/api/notes_spec.rb
@@ -55,6 +55,7 @@ RSpec.describe API::Notes do
create(:project, namespace: private_user.namespace)
.tap { |p| p.add_maintainer(private_user) }
end
+
let(:private_issue) { create(:issue, project: private_project) }
let(:ext_proj) { create(:project, :public) }
diff --git a/spec/requests/api/notification_settings_spec.rb b/spec/requests/api/notification_settings_spec.rb
index 73cb4948524..7b4a58e63da 100644
--- a/spec/requests/api/notification_settings_spec.rb
+++ b/spec/requests/api/notification_settings_spec.rb
@@ -70,12 +70,13 @@ RSpec.describe API::NotificationSettings do
describe "PUT /projects/:id/notification_settings" do
it "updates project level notification settings for the current user" do
- put api("/projects/#{project.id}/notification_settings", user), params: { level: 'custom', new_note: true }
+ put api("/projects/#{project.id}/notification_settings", user), params: { level: 'custom', new_note: true, moved_project: true }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['level']).to eq(user.reload.notification_settings_for(project).level)
expect(json_response['events']['new_note']).to be_truthy
expect(json_response['events']['new_issue']).to be_falsey
+ expect(json_response['events']['moved_project']).to be_truthy
end
end
diff --git a/spec/requests/api/npm_packages_spec.rb b/spec/requests/api/npm_packages_spec.rb
index 98a1ca978a8..94647123df0 100644
--- a/spec/requests/api/npm_packages_spec.rb
+++ b/spec/requests/api/npm_packages_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe API::NpmPackages do
include PackagesManagerApiSpecHelpers
+ include HttpBasicAuthHelpers
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
@@ -407,39 +408,37 @@ RSpec.describe API::NpmPackages do
subject { get api(url) }
- context 'without the need for a license' do
- context 'with public project' do
- context 'with authenticated user' do
- subject { get api(url, personal_access_token: personal_access_token) }
+ context 'with public project' do
+ context 'with authenticated user' do
+ subject { get api(url, personal_access_token: personal_access_token) }
- it_behaves_like 'returns package tags', :maintainer
- it_behaves_like 'returns package tags', :developer
- it_behaves_like 'returns package tags', :reporter
- it_behaves_like 'returns package tags', :guest
- end
+ it_behaves_like 'returns package tags', :maintainer
+ it_behaves_like 'returns package tags', :developer
+ it_behaves_like 'returns package tags', :reporter
+ it_behaves_like 'returns package tags', :guest
+ end
- context 'with unauthenticated user' do
- it_behaves_like 'returns package tags', :no_type
- end
+ context 'with unauthenticated user' do
+ it_behaves_like 'returns package tags', :no_type
end
+ end
- context 'with private project' do
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
- end
+ context 'with private project' do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ end
- context 'with authenticated user' do
- subject { get api(url, personal_access_token: personal_access_token) }
+ context 'with authenticated user' do
+ subject { get api(url, personal_access_token: personal_access_token) }
- it_behaves_like 'returns package tags', :maintainer
- it_behaves_like 'returns package tags', :developer
- it_behaves_like 'returns package tags', :reporter
- it_behaves_like 'rejects package tags access', :guest, :forbidden
- end
+ it_behaves_like 'returns package tags', :maintainer
+ it_behaves_like 'returns package tags', :developer
+ it_behaves_like 'returns package tags', :reporter
+ it_behaves_like 'rejects package tags access', :guest, :forbidden
+ end
- context 'with unauthenticated user' do
- it_behaves_like 'rejects package tags access', :no_type, :forbidden
- end
+ context 'with unauthenticated user' do
+ it_behaves_like 'rejects package tags access', :no_type, :forbidden
end
end
end
@@ -453,39 +452,37 @@ RSpec.describe API::NpmPackages do
subject { put api(url), env: { 'api.request.body': version } }
- context 'without the need for a license' do
- context 'with public project' do
- context 'with authenticated user' do
- subject { put api(url, personal_access_token: personal_access_token), env: { 'api.request.body': version } }
+ context 'with public project' do
+ context 'with authenticated user' do
+ subject { put api(url, personal_access_token: personal_access_token), env: { 'api.request.body': version } }
- it_behaves_like 'create package tag', :maintainer
- it_behaves_like 'create package tag', :developer
- it_behaves_like 'rejects package tags access', :reporter, :forbidden
- it_behaves_like 'rejects package tags access', :guest, :forbidden
- end
+ it_behaves_like 'create package tag', :maintainer
+ it_behaves_like 'create package tag', :developer
+ it_behaves_like 'rejects package tags access', :reporter, :forbidden
+ it_behaves_like 'rejects package tags access', :guest, :forbidden
+ end
- context 'with unauthenticated user' do
- it_behaves_like 'rejects package tags access', :no_type, :unauthorized
- end
+ context 'with unauthenticated user' do
+ it_behaves_like 'rejects package tags access', :no_type, :unauthorized
end
+ end
- context 'with private project' do
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
- end
+ context 'with private project' do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ end
- context 'with authenticated user' do
- subject { put api(url, personal_access_token: personal_access_token), env: { 'api.request.body': version } }
+ context 'with authenticated user' do
+ subject { put api(url, personal_access_token: personal_access_token), env: { 'api.request.body': version } }
- it_behaves_like 'create package tag', :maintainer
- it_behaves_like 'create package tag', :developer
- it_behaves_like 'rejects package tags access', :reporter, :forbidden
- it_behaves_like 'rejects package tags access', :guest, :forbidden
- end
+ it_behaves_like 'create package tag', :maintainer
+ it_behaves_like 'create package tag', :developer
+ it_behaves_like 'rejects package tags access', :reporter, :forbidden
+ it_behaves_like 'rejects package tags access', :guest, :forbidden
+ end
- context 'with unauthenticated user' do
- it_behaves_like 'rejects package tags access', :no_type, :unauthorized
- end
+ context 'with unauthenticated user' do
+ it_behaves_like 'rejects package tags access', :no_type, :unauthorized
end
end
end
@@ -499,39 +496,37 @@ RSpec.describe API::NpmPackages do
subject { delete api(url) }
- context 'without the need for a license' do
- context 'with public project' do
- context 'with authenticated user' do
- subject { delete api(url, personal_access_token: personal_access_token) }
+ context 'with public project' do
+ context 'with authenticated user' do
+ subject { delete api(url, personal_access_token: personal_access_token) }
- it_behaves_like 'delete package tag', :maintainer
- it_behaves_like 'rejects package tags access', :developer, :forbidden
- it_behaves_like 'rejects package tags access', :reporter, :forbidden
- it_behaves_like 'rejects package tags access', :guest, :forbidden
- end
+ it_behaves_like 'delete package tag', :maintainer
+ it_behaves_like 'rejects package tags access', :developer, :forbidden
+ it_behaves_like 'rejects package tags access', :reporter, :forbidden
+ it_behaves_like 'rejects package tags access', :guest, :forbidden
+ end
- context 'with unauthenticated user' do
- it_behaves_like 'rejects package tags access', :no_type, :unauthorized
- end
+ context 'with unauthenticated user' do
+ it_behaves_like 'rejects package tags access', :no_type, :unauthorized
end
+ end
- context 'with private project' do
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
- end
+ context 'with private project' do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ end
- context 'with authenticated user' do
- subject { delete api(url, personal_access_token: personal_access_token) }
+ context 'with authenticated user' do
+ subject { delete api(url, personal_access_token: personal_access_token) }
- it_behaves_like 'delete package tag', :maintainer
- it_behaves_like 'rejects package tags access', :developer, :forbidden
- it_behaves_like 'rejects package tags access', :reporter, :forbidden
- it_behaves_like 'rejects package tags access', :guest, :forbidden
- end
+ it_behaves_like 'delete package tag', :maintainer
+ it_behaves_like 'rejects package tags access', :developer, :forbidden
+ it_behaves_like 'rejects package tags access', :reporter, :forbidden
+ it_behaves_like 'rejects package tags access', :guest, :forbidden
+ end
- context 'with unauthenticated user' do
- it_behaves_like 'rejects package tags access', :no_type, :unauthorized
- end
+ context 'with unauthenticated user' do
+ it_behaves_like 'rejects package tags access', :no_type, :unauthorized
end
end
end
diff --git a/spec/requests/api/nuget_packages_spec.rb b/spec/requests/api/nuget_packages_spec.rb
index 43aa65d1f76..ab537a61058 100644
--- a/spec/requests/api/nuget_packages_spec.rb
+++ b/spec/requests/api/nuget_packages_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe API::NugetPackages do
include WorkhorseHelpers
include PackagesManagerApiSpecHelpers
+ include HttpBasicAuthHelpers
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :public) }
@@ -20,38 +21,76 @@ RSpec.describe API::NugetPackages do
context 'with valid project' do
using RSpec::Parameterized::TableSyntax
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'process nuget service index request' | :success
- 'PUBLIC' | :guest | true | true | 'process nuget service index request' | :success
- 'PUBLIC' | :developer | true | false | 'process nuget service index request' | :success
- 'PUBLIC' | :guest | true | false | 'process nuget service index request' | :success
- 'PUBLIC' | :developer | false | true | 'process nuget service index request' | :success
- 'PUBLIC' | :guest | false | true | 'process nuget service index request' | :success
- 'PUBLIC' | :developer | false | false | 'process nuget service index request' | :success
- 'PUBLIC' | :guest | false | false | 'process nuget service index request' | :success
- 'PUBLIC' | :anonymous | false | true | 'process nuget service index request' | :success
- 'PRIVATE' | :developer | true | true | 'process nuget service index request' | :success
- 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
- 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
- end
+ context 'personal token' do
+ where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'process nuget service index request' | :success
+ 'PUBLIC' | :guest | true | true | 'process nuget service index request' | :success
+ 'PUBLIC' | :developer | true | false | 'process nuget service index request' | :success
+ 'PUBLIC' | :guest | true | false | 'process nuget service index request' | :success
+ 'PUBLIC' | :developer | false | true | 'process nuget service index request' | :success
+ 'PUBLIC' | :guest | false | true | 'process nuget service index request' | :success
+ 'PUBLIC' | :developer | false | false | 'process nuget service index request' | :success
+ 'PUBLIC' | :guest | false | false | 'process nuget service index request' | :success
+ 'PUBLIC' | :anonymous | false | true | 'process nuget service index request' | :success
+ 'PRIVATE' | :developer | true | true | 'process nuget service index request' | :success
+ 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
+ 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
+ end
- with_them do
- let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
+ with_them do
+ let(:token) { user_token ? personal_access_token.token : 'wrong' }
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
- subject { get api(url), headers: headers }
+ subject { get api(url), headers: headers }
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
+ end
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ context 'with job token' do
+ where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'process nuget service index request' | :success
+ 'PUBLIC' | :guest | true | true | 'process nuget service index request' | :success
+ 'PUBLIC' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :developer | false | true | 'process nuget service index request' | :success
+ 'PUBLIC' | :guest | false | true | 'process nuget service index request' | :success
+ 'PUBLIC' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :anonymous | false | true | 'process nuget service index request' | :success
+ 'PRIVATE' | :developer | true | true | 'process nuget service index request' | :success
+ 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
+ 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
+ end
+
+ with_them do
+ let(:job) { user_token ? create(:ci_build, project: project, user: user) : double(token: 'wrong') }
+ let(:headers) { user_role == :anonymous ? {} : job_basic_auth_header(job) }
+
+ subject { get api(url), headers: headers }
+
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ end
end
end
@@ -98,7 +137,7 @@ RSpec.describe API::NugetPackages do
with_them do
let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:user_headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
+ let(:user_headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
let(:headers) { user_headers.merge(workhorse_header) }
before do
@@ -165,7 +204,7 @@ RSpec.describe API::NugetPackages do
with_them do
let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:user_headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
+ let(:user_headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
let(:headers) { user_headers.merge(workhorse_header) }
before do
@@ -225,7 +264,7 @@ RSpec.describe API::NugetPackages do
with_them do
let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
subject { get api(url), headers: headers }
@@ -286,7 +325,7 @@ RSpec.describe API::NugetPackages do
with_them do
let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
subject { get api(url), headers: headers }
@@ -342,7 +381,7 @@ RSpec.describe API::NugetPackages do
with_them do
let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
subject { get api(url), headers: headers }
@@ -397,7 +436,7 @@ RSpec.describe API::NugetPackages do
with_them do
let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
subject { get api(url), headers: headers }
@@ -460,7 +499,7 @@ RSpec.describe API::NugetPackages do
with_them do
let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
subject { get api(url), headers: headers }
diff --git a/spec/requests/api/pages_domains_spec.rb b/spec/requests/api/pages_domains_spec.rb
index b6838a39257..75183156c9d 100644
--- a/spec/requests/api/pages_domains_spec.rb
+++ b/spec/requests/api/pages_domains_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe API::PagesDomains do
build(:pages_domain, :without_key, :without_certificate, domain: 'www.other-domain.test', auto_ssl_enabled: true)
.slice(:domain, :auto_ssl_enabled)
end
+
let(:pages_domain_secure_params) { build(:pages_domain, domain: 'ssl.other-domain.test', project: project).slice(:domain, :certificate, :key) }
let(:pages_domain_secure_key_missmatch_params) {build(:pages_domain, :with_trusted_chain, project: project).slice(:domain, :certificate, :key) }
let(:pages_domain_secure_missing_chain_params) {build(:pages_domain, :with_missing_chain, project: project).slice(:certificate) }
diff --git a/spec/requests/api/performance_bar_spec.rb b/spec/requests/api/performance_bar_spec.rb
new file mode 100644
index 00000000000..a4dbb3d17b8
--- /dev/null
+++ b/spec/requests/api/performance_bar_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Performance Bar for API requests', :request_store, :clean_gitlab_redis_cache do
+ context 'with user that has access to the performance bar' do
+ let_it_be(:admin) { create(:admin) }
+
+ context 'when cookie is set to true' do
+ before do
+ cookies[:perf_bar_enabled] = 'true'
+ end
+
+ it 'stores performance data' do
+ get api("/users/#{admin.id}", admin)
+
+ expect(Peek.adapter.get(headers['X-Request-Id'])).not_to be_empty
+ end
+ end
+
+ context 'when cookie is missing' do
+ it 'does not store performance data' do
+ get api("/users/#{admin.id}", admin)
+
+ expect(Peek.adapter.get(headers['X-Request-Id'])).to be_nil
+ end
+ end
+ end
+
+ context 'with user that does not have access to the performance bar' do
+ let(:user) { create(:user) }
+
+ it 'does not store performance data' do
+ cookies[:perf_bar_enabled] = 'true'
+
+ get api("/users/#{user.id}", user)
+
+ expect(Peek.adapter.get(headers['X-Request-Id'])).to be_nil
+ end
+ end
+end
diff --git a/spec/requests/api/project_export_spec.rb b/spec/requests/api/project_export_spec.rb
index d7ba3b4e158..09d295afbea 100644
--- a/spec/requests/api/project_export_spec.rb
+++ b/spec/requests/api/project_export_spec.rb
@@ -338,6 +338,16 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache do
end
context 'with download strategy' do
+ before do
+ Grape::Endpoint.before_each do |endpoint|
+ allow(endpoint).to receive(:user_project).and_return(project)
+ end
+ end
+
+ after do
+ Grape::Endpoint.before_each nil
+ end
+
it 'starts' do
expect_any_instance_of(Gitlab::ImportExport::AfterExportStrategies::WebUploadStrategy).not_to receive(:send_file)
@@ -345,6 +355,12 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache do
expect(response).to have_gitlab_http_status(:accepted)
end
+
+ it 'removes previously exported archive file' do
+ expect(project).to receive(:remove_exports).once
+
+ post api(path, user)
+ end
end
end
diff --git a/spec/requests/api/project_hooks_spec.rb b/spec/requests/api/project_hooks_spec.rb
index 8ab90e26a51..3b2a7895630 100644
--- a/spec/requests/api/project_hooks_spec.rb
+++ b/spec/requests/api/project_hooks_spec.rb
@@ -40,6 +40,7 @@ RSpec.describe API::ProjectHooks, 'ProjectHooks' do
expect(json_response.first['job_events']).to eq(true)
expect(json_response.first['pipeline_events']).to eq(true)
expect(json_response.first['wiki_page_events']).to eq(true)
+ expect(json_response.first['deployment_events']).to eq(true)
expect(json_response.first['enable_ssl_verification']).to eq(true)
expect(json_response.first['push_events_branch_filter']).to eq('master')
end
@@ -71,6 +72,7 @@ RSpec.describe API::ProjectHooks, 'ProjectHooks' do
expect(json_response['job_events']).to eq(hook.job_events)
expect(json_response['pipeline_events']).to eq(hook.pipeline_events)
expect(json_response['wiki_page_events']).to eq(hook.wiki_page_events)
+ expect(json_response['deployment_events']).to eq(true)
expect(json_response['enable_ssl_verification']).to eq(hook.enable_ssl_verification)
end
@@ -92,8 +94,11 @@ RSpec.describe API::ProjectHooks, 'ProjectHooks' do
describe "POST /projects/:id/hooks" do
it "adds hook to project" do
expect do
- post api("/projects/#{project.id}/hooks", user),
- params: { url: "http://example.com", issues_events: true, confidential_issues_events: true, wiki_page_events: true, job_events: true, push_events_branch_filter: 'some-feature-branch' }
+ post(api("/projects/#{project.id}/hooks", user),
+ params: { url: "http://example.com", issues_events: true,
+ confidential_issues_events: true, wiki_page_events: true,
+ job_events: true, deployment_events: true,
+ push_events_branch_filter: 'some-feature-branch' })
end.to change {project.hooks.count}.by(1)
expect(response).to have_gitlab_http_status(:created)
@@ -108,6 +113,7 @@ RSpec.describe API::ProjectHooks, 'ProjectHooks' do
expect(json_response['job_events']).to eq(true)
expect(json_response['pipeline_events']).to eq(false)
expect(json_response['wiki_page_events']).to eq(true)
+ expect(json_response['deployment_events']).to eq(true)
expect(json_response['enable_ssl_verification']).to eq(true)
expect(json_response['push_events_branch_filter']).to eq('some-feature-branch')
expect(json_response).not_to include('token')
diff --git a/spec/requests/api/project_milestones_spec.rb b/spec/requests/api/project_milestones_spec.rb
index b238949ce47..d1e5df66b3f 100644
--- a/spec/requests/api/project_milestones_spec.rb
+++ b/spec/requests/api/project_milestones_spec.rb
@@ -3,17 +3,68 @@
require 'spec_helper'
RSpec.describe API::ProjectMilestones do
- let(:user) { create(:user) }
- let!(:project) { create(:project, namespace: user.namespace ) }
- let!(:closed_milestone) { create(:closed_milestone, project: project, title: 'version1', description: 'closed milestone') }
- let!(:milestone) { create(:milestone, project: project, title: 'version2', description: 'open milestone') }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, namespace: user.namespace ) }
+ let_it_be(:closed_milestone) { create(:closed_milestone, project: project, title: 'version1', description: 'closed milestone') }
+ let_it_be(:milestone) { create(:milestone, project: project, title: 'version2', description: 'open milestone') }
+ let_it_be(:route) { "/projects/#{project.id}/milestones" }
before do
project.add_developer(user)
end
- it_behaves_like 'group and project milestones', "/projects/:id/milestones" do
- let(:route) { "/projects/#{project.id}/milestones" }
+ it_behaves_like 'group and project milestones', "/projects/:id/milestones"
+
+ describe 'GET /projects/:id/milestones' do
+ context 'when include_parent_milestones is true' do
+ let_it_be(:ancestor_group) { create(:group, :private) }
+ let_it_be(:group) { create(:group, :private, parent: ancestor_group) }
+ let_it_be(:ancestor_group_milestone) { create(:milestone, group: ancestor_group) }
+ let_it_be(:group_milestone) { create(:milestone, group: group) }
+ let(:params) { { include_parent_milestones: true } }
+
+ shared_examples 'listing all milestones' do
+ it 'returns correct list of milestones' do
+ get api(route, user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.size).to eq(milestones.size)
+ expect(json_response.map { |entry| entry["id"] }).to eq(milestones.map(&:id))
+ end
+ end
+
+ context 'when project parent is a namespace' do
+ it_behaves_like 'listing all milestones' do
+ let(:milestones) { [milestone, closed_milestone] }
+ end
+ end
+
+ context 'when project parent is a group' do
+ let(:milestones) { [group_milestone, ancestor_group_milestone, milestone, closed_milestone] }
+
+ before_all do
+ project.update(namespace: group)
+ end
+
+ it_behaves_like 'listing all milestones'
+
+ context 'when iids param is present' do
+ let(:params) { { include_parent_milestones: true, iids: [group_milestone.iid] } }
+
+ it_behaves_like 'listing all milestones'
+ end
+
+ context 'when user is not a member of the private project' do
+ let(:external_user) { create(:user) }
+
+ it 'returns a 404 error' do
+ get api(route, external_user), params: params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
end
describe 'DELETE /projects/:id/milestones/:milestone_id' do
@@ -45,10 +96,11 @@ RSpec.describe API::ProjectMilestones do
describe 'PUT /projects/:id/milestones/:milestone_id to test observer on close' do
it 'creates an activity event when a milestone is closed' do
- expect(Event).to receive(:create!)
+ path = "/projects/#{project.id}/milestones/#{milestone.id}"
- put api("/projects/#{project.id}/milestones/#{milestone.id}", user),
- params: { state_event: 'close' }
+ expect do
+ put api(path, user), params: { state_event: 'close' }
+ end.to change(Event, :count).by(1)
end
end
diff --git a/spec/requests/api/project_snippets_spec.rb b/spec/requests/api/project_snippets_spec.rb
index fbb0e3e109f..9b876edae24 100644
--- a/spec/requests/api/project_snippets_spec.rb
+++ b/spec/requests/api/project_snippets_spec.rb
@@ -123,16 +123,19 @@ RSpec.describe API::ProjectSnippets do
end
describe 'POST /projects/:project_id/snippets/' do
- let(:params) do
+ let(:base_params) do
{
title: 'Test Title',
- file_name: 'test.rb',
description: 'test description',
- content: 'puts "hello world"',
visibility: 'public'
}
end
+ let(:file_path) { 'file_1.rb' }
+ let(:file_content) { 'puts "hello world"' }
+ let(:params) { base_params.merge(file_params) }
+ let(:file_params) { { files: [{ file_path: file_path, content: file_content }] } }
+
shared_examples 'project snippet repository actions' do
let(:snippet) { ProjectSnippet.find(json_response['id']) }
@@ -145,9 +148,9 @@ RSpec.describe API::ProjectSnippets do
it 'commit the files to the repository' do
subject
- blob = snippet.repository.blob_at('master', params[:file_name])
+ blob = snippet.repository.blob_at('master', file_path)
- expect(blob.data).to eq params[:content]
+ expect(blob.data).to eq file_content
end
end
@@ -184,63 +187,60 @@ RSpec.describe API::ProjectSnippets do
params['visibility'] = 'internal'
end
+ subject { post api("/projects/#{project.id}/snippets/", user), params: params }
+
it 'creates a new snippet' do
- post api("/projects/#{project.id}/snippets/", user), params: params
+ subject
expect(response).to have_gitlab_http_status(:created)
snippet = ProjectSnippet.find(json_response['id'])
- expect(snippet.content).to eq(params[:content])
+ expect(snippet.content).to eq(file_content)
expect(snippet.description).to eq(params[:description])
expect(snippet.title).to eq(params[:title])
- expect(snippet.file_name).to eq(params[:file_name])
+ expect(snippet.file_name).to eq(file_path)
expect(snippet.visibility_level).to eq(Snippet::INTERNAL)
end
- it_behaves_like 'project snippet repository actions' do
- subject { post api("/projects/#{project.id}/snippets/", user), params: params }
- end
+ it_behaves_like 'project snippet repository actions'
end
- it 'creates a new snippet' do
- post api("/projects/#{project.id}/snippets/", admin), params: params
+ context 'with an admin' do
+ subject { post api("/projects/#{project.id}/snippets/", admin), params: params }
- expect(response).to have_gitlab_http_status(:created)
- snippet = ProjectSnippet.find(json_response['id'])
- expect(snippet.content).to eq(params[:content])
- expect(snippet.description).to eq(params[:description])
- expect(snippet.title).to eq(params[:title])
- expect(snippet.file_name).to eq(params[:file_name])
- expect(snippet.visibility_level).to eq(Snippet::PUBLIC)
- end
+ it 'creates a new snippet' do
+ subject
- it_behaves_like 'project snippet repository actions' do
- subject { post api("/projects/#{project.id}/snippets/", admin), params: params }
- end
+ expect(response).to have_gitlab_http_status(:created)
+ snippet = ProjectSnippet.find(json_response['id'])
+ expect(snippet.content).to eq(file_content)
+ expect(snippet.description).to eq(params[:description])
+ expect(snippet.title).to eq(params[:title])
+ expect(snippet.file_name).to eq(file_path)
+ expect(snippet.visibility_level).to eq(Snippet::PUBLIC)
+ end
- it 'returns 400 for missing parameters' do
- params.delete(:title)
+ it_behaves_like 'project snippet repository actions'
- post api("/projects/#{project.id}/snippets/", admin), params: params
+ it 'returns 400 for missing parameters' do
+ params.delete(:title)
- expect(response).to have_gitlab_http_status(:bad_request)
- end
+ subject
- it 'returns 400 if content is blank' do
- params[:content] = ''
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
- post api("/projects/#{project.id}/snippets/", admin), params: params
+ it_behaves_like 'snippet creation with files parameter'
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['error']).to eq 'content is empty'
- end
+ it_behaves_like 'snippet creation without files parameter'
- it 'returns 400 if title is blank' do
- params[:title] = ''
+ it 'returns 400 if title is blank' do
+ params[:title] = ''
- post api("/projects/#{project.id}/snippets/", admin), params: params
+ subject
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['error']).to eq 'title is empty'
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq 'title is empty'
+ end
end
context 'when save fails because the repository could not be created' do
diff --git a/spec/requests/api/project_templates_spec.rb b/spec/requests/api/project_templates_spec.rb
index 59b2b09f0bf..d242d49fc1b 100644
--- a/spec/requests/api/project_templates_spec.rb
+++ b/spec/requests/api/project_templates_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe API::ProjectTemplates do
- let_it_be(:public_project) { create(:project, :public, path: 'path.with.dot') }
- let_it_be(:private_project) { create(:project, :private) }
+ let_it_be(:public_project) { create(:project, :public, :repository, create_templates: :merge_request, path: 'path.with.dot') }
+ let_it_be(:private_project) { create(:project, :private, :repository, create_templates: :issue) }
let_it_be(:developer) { create(:user) }
let(:url_encoded_path) { "#{public_project.namespace.path}%2F#{public_project.path}" }
@@ -62,6 +62,33 @@ RSpec.describe API::ProjectTemplates do
expect(json_response).to satisfy_one { |template| template['key'] == 'mit' }
end
+ it 'returns metrics_dashboard_ymls' do
+ get api("/projects/#{public_project.id}/templates/metrics_dashboard_ymls")
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(response).to match_response_schema('public_api/v4/template_list')
+ expect(json_response).to satisfy_one { |template| template['key'] == 'Default' }
+ end
+
+ it 'returns issue templates' do
+ get api("/projects/#{private_project.id}/templates/issues", developer)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(response).to match_response_schema('public_api/v4/template_list')
+ expect(json_response.map {|t| t['key']}).to match_array(%w(bug feature_proposal template_test))
+ end
+
+ it 'returns merge request templates' do
+ get api("/projects/#{public_project.id}/templates/merge_requests")
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(response).to match_response_schema('public_api/v4/template_list')
+ expect(json_response.map {|t| t['key']}).to match_array(%w(bug feature_proposal template_test))
+ end
+
it 'returns 400 for an unknown template type' do
get api("/projects/#{public_project.id}/templates/unknown")
@@ -136,6 +163,14 @@ RSpec.describe API::ProjectTemplates do
expect(json_response['name']).to eq('Android')
end
+ it 'returns a specific metrics_dashboard_yml' do
+ get api("/projects/#{public_project.id}/templates/metrics_dashboard_ymls/Default")
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/template')
+ expect(json_response['name']).to eq('Default')
+ end
+
it 'returns a specific license' do
get api("/projects/#{public_project.id}/templates/licenses/mit")
@@ -143,12 +178,42 @@ RSpec.describe API::ProjectTemplates do
expect(response).to match_response_schema('public_api/v4/license')
end
+ it 'returns a specific issue template' do
+ get api("/projects/#{private_project.id}/templates/issues/bug", developer)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/template')
+ expect(json_response['name']).to eq('bug')
+ expect(json_response['content']).to eq('something valid')
+ end
+
+ it 'returns a specific merge request template' do
+ get api("/projects/#{public_project.id}/templates/merge_requests/feature_proposal")
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/template')
+ expect(json_response['name']).to eq('feature_proposal')
+ expect(json_response['content']).to eq('feature_proposal') # Content is identical to filename here
+ end
+
it 'returns 404 for an unknown specific template' do
get api("/projects/#{public_project.id}/templates/licenses/unknown")
expect(response).to have_gitlab_http_status(:not_found)
end
+ it 'returns 404 for an unknown issue template' do
+ get api("/projects/#{public_project.id}/templates/issues/unknown")
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns 404 for an unknown merge request template' do
+ get api("/projects/#{public_project.id}/templates/merge_requests/unknown")
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
it 'denies access to an anonymous user on a private project' do
get api("/projects/#{private_project.id}/templates/licenses/mit")
@@ -166,6 +231,10 @@ RSpec.describe API::ProjectTemplates do
subject { get api("/projects/#{url_encoded_path}/templates/gitlab_ci_ymls/Android") }
end
+ it_behaves_like 'accepts project paths with dots' do
+ subject { get api("/projects/#{url_encoded_path}/templates/metrics_dashboard_ymls/Default") }
+ end
+
shared_examples 'path traversal attempt' do |template_type|
it 'rejects invalid filenames' do
get api("/projects/#{public_project.id}/templates/#{template_type}/%2e%2e%2fPython%2ea")
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 76b0c04e32d..46340f86f69 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -70,12 +70,14 @@ RSpec.describe API::Projects do
builds_enabled: false,
snippets_enabled: false)
end
+
let(:project_member2) do
create(:project_member,
user: user4,
project: project3,
access_level: ProjectMember::MAINTAINER)
end
+
let(:project4) do
create(:project,
name: 'third_project',
@@ -386,6 +388,14 @@ RSpec.describe API::Projects do
let(:current_user) { user }
let(:projects) { [public_project, project, project2, project3].select { |p| p.id > project2.id } }
end
+
+ context 'regression: empty string is ignored' do
+ it_behaves_like 'projects response' do
+ let(:filter) { { id_after: '' } }
+ let(:current_user) { user }
+ let(:projects) { [public_project, project, project2, project3] }
+ end
+ end
end
context 'and using id_before' do
@@ -394,6 +404,14 @@ RSpec.describe API::Projects do
let(:current_user) { user }
let(:projects) { [public_project, project, project2, project3].select { |p| p.id < project2.id } }
end
+
+ context 'regression: empty string is ignored' do
+ it_behaves_like 'projects response' do
+ let(:filter) { { id_before: '' } }
+ let(:current_user) { user }
+ let(:projects) { [public_project, project, project2, project3] }
+ end
+ end
end
context 'and using both id_after and id_before' do
@@ -1586,6 +1604,7 @@ RSpec.describe API::Projects do
expect(json_response['ci_default_git_depth']).to eq(project.ci_default_git_depth)
expect(json_response['merge_method']).to eq(project.merge_method.to_s)
expect(json_response['readme_url']).to eq(project.readme_url)
+ expect(json_response).to have_key 'packages_enabled'
end
it 'returns a group link with expiration date' do
@@ -2339,6 +2358,20 @@ RSpec.describe API::Projects do
expect(project_member).to be_persisted
end
+ describe 'updating packages_enabled attribute' do
+ it 'is enabled by default' do
+ expect(project.packages_enabled).to be true
+ end
+
+ it 'disables project packages feature' do
+ put(api("/projects/#{project.id}", user), params: { packages_enabled: false })
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(project.reload.packages_enabled).to be false
+ expect(json_response['packages_enabled']).to eq(false)
+ end
+ end
+
it 'returns 400 when nothing sent' do
project_param = {}
diff --git a/spec/requests/api/pypi_packages_spec.rb b/spec/requests/api/pypi_packages_spec.rb
index b4e83c8caab..e2cfd87b507 100644
--- a/spec/requests/api/pypi_packages_spec.rb
+++ b/spec/requests/api/pypi_packages_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe API::PypiPackages do
include WorkhorseHelpers
include PackagesManagerApiSpecHelpers
+ include HttpBasicAuthHelpers
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :public) }
@@ -17,49 +18,47 @@ RSpec.describe API::PypiPackages do
subject { get api(url) }
- context 'without the need for a license' do
- context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'PyPi package versions' | :success
- 'PUBLIC' | :guest | true | true | 'PyPi package versions' | :success
- 'PUBLIC' | :developer | true | false | 'PyPi package versions' | :success
- 'PUBLIC' | :guest | true | false | 'PyPi package versions' | :success
- 'PUBLIC' | :developer | false | true | 'PyPi package versions' | :success
- 'PUBLIC' | :guest | false | true | 'PyPi package versions' | :success
- 'PUBLIC' | :developer | false | false | 'PyPi package versions' | :success
- 'PUBLIC' | :guest | false | false | 'PyPi package versions' | :success
- 'PUBLIC' | :anonymous | false | true | 'PyPi package versions' | :success
- 'PRIVATE' | :developer | true | true | 'PyPi package versions' | :success
- 'PRIVATE' | :guest | true | true | 'process PyPi api request' | :forbidden
- 'PRIVATE' | :developer | true | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'process PyPi api request' | :not_found
- 'PRIVATE' | :guest | false | true | 'process PyPi api request' | :not_found
- 'PRIVATE' | :developer | false | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
- end
-
- with_them do
- let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
+ context 'with valid project' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'PyPi package versions' | :success
+ 'PUBLIC' | :guest | true | true | 'PyPi package versions' | :success
+ 'PUBLIC' | :developer | true | false | 'PyPi package versions' | :success
+ 'PUBLIC' | :guest | true | false | 'PyPi package versions' | :success
+ 'PUBLIC' | :developer | false | true | 'PyPi package versions' | :success
+ 'PUBLIC' | :guest | false | true | 'PyPi package versions' | :success
+ 'PUBLIC' | :developer | false | false | 'PyPi package versions' | :success
+ 'PUBLIC' | :guest | false | false | 'PyPi package versions' | :success
+ 'PUBLIC' | :anonymous | false | true | 'PyPi package versions' | :success
+ 'PRIVATE' | :developer | true | true | 'PyPi package versions' | :success
+ 'PRIVATE' | :guest | true | true | 'process PyPi api request' | :forbidden
+ 'PRIVATE' | :developer | true | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :guest | true | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :developer | false | true | 'process PyPi api request' | :not_found
+ 'PRIVATE' | :guest | false | true | 'process PyPi api request' | :not_found
+ 'PRIVATE' | :developer | false | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :guest | false | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
+ end
- subject { get api(url), headers: headers }
+ with_them do
+ let(:token) { user_token ? personal_access_token.token : 'wrong' }
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
- end
+ subject { get api(url), headers: headers }
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
+ end
- it_behaves_like 'deploy token for package GET requests'
+ it_behaves_like 'deploy token for package GET requests'
- it_behaves_like 'rejects PyPI access with unknown project id'
- end
+ it_behaves_like 'rejects PyPI access with unknown project id'
end
describe 'POST /api/v4/projects/:id/packages/pypi/authorize' do
@@ -70,48 +69,46 @@ RSpec.describe API::PypiPackages do
subject { post api(url), headers: headers }
- context 'without the need for a license' do
- context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'process PyPi api request' | :success
- 'PUBLIC' | :guest | true | true | 'process PyPi api request' | :forbidden
- 'PUBLIC' | :developer | true | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :guest | true | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :developer | false | true | 'process PyPi api request' | :forbidden
- 'PUBLIC' | :guest | false | true | 'process PyPi api request' | :forbidden
- 'PUBLIC' | :developer | false | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :guest | false | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :developer | true | true | 'process PyPi api request' | :success
- 'PRIVATE' | :guest | true | true | 'process PyPi api request' | :forbidden
- 'PRIVATE' | :developer | true | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'process PyPi api request' | :not_found
- 'PRIVATE' | :guest | false | true | 'process PyPi api request' | :not_found
- 'PRIVATE' | :developer | false | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
- end
-
- with_them do
- let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:user_headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
- let(:headers) { user_headers.merge(workhorse_header) }
+ context 'with valid project' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'process PyPi api request' | :success
+ 'PUBLIC' | :guest | true | true | 'process PyPi api request' | :forbidden
+ 'PUBLIC' | :developer | true | false | 'process PyPi api request' | :unauthorized
+ 'PUBLIC' | :guest | true | false | 'process PyPi api request' | :unauthorized
+ 'PUBLIC' | :developer | false | true | 'process PyPi api request' | :forbidden
+ 'PUBLIC' | :guest | false | true | 'process PyPi api request' | :forbidden
+ 'PUBLIC' | :developer | false | false | 'process PyPi api request' | :unauthorized
+ 'PUBLIC' | :guest | false | false | 'process PyPi api request' | :unauthorized
+ 'PUBLIC' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :developer | true | true | 'process PyPi api request' | :success
+ 'PRIVATE' | :guest | true | true | 'process PyPi api request' | :forbidden
+ 'PRIVATE' | :developer | true | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :guest | true | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :developer | false | true | 'process PyPi api request' | :not_found
+ 'PRIVATE' | :guest | false | true | 'process PyPi api request' | :not_found
+ 'PRIVATE' | :developer | false | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :guest | false | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
+ end
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
- end
+ with_them do
+ let(:token) { user_token ? personal_access_token.token : 'wrong' }
+ let(:user_headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
+ let(:headers) { user_headers.merge(workhorse_header) }
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
+ end
- it_behaves_like 'deploy token for package uploads'
+ it_behaves_like 'deploy token for package uploads'
- it_behaves_like 'rejects PyPI access with unknown project id'
- end
+ it_behaves_like 'rejects PyPI access with unknown project id'
end
describe 'POST /api/v4/projects/:id/packages/pypi' do
@@ -135,61 +132,59 @@ RSpec.describe API::PypiPackages do
)
end
- context 'without the need for a license' do
- context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'PyPi package creation' | :created
- 'PUBLIC' | :guest | true | true | 'process PyPi api request' | :forbidden
- 'PUBLIC' | :developer | true | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :guest | true | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :developer | false | true | 'process PyPi api request' | :forbidden
- 'PUBLIC' | :guest | false | true | 'process PyPi api request' | :forbidden
- 'PUBLIC' | :developer | false | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :guest | false | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :developer | true | true | 'process PyPi api request' | :created
- 'PRIVATE' | :guest | true | true | 'process PyPi api request' | :forbidden
- 'PRIVATE' | :developer | true | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'process PyPi api request' | :not_found
- 'PRIVATE' | :guest | false | true | 'process PyPi api request' | :not_found
- 'PRIVATE' | :developer | false | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
- end
-
- with_them do
- let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:user_headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
- let(:headers) { user_headers.merge(workhorse_header) }
-
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
- end
-
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
- end
+ context 'with valid project' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'PyPi package creation' | :created
+ 'PUBLIC' | :guest | true | true | 'process PyPi api request' | :forbidden
+ 'PUBLIC' | :developer | true | false | 'process PyPi api request' | :unauthorized
+ 'PUBLIC' | :guest | true | false | 'process PyPi api request' | :unauthorized
+ 'PUBLIC' | :developer | false | true | 'process PyPi api request' | :forbidden
+ 'PUBLIC' | :guest | false | true | 'process PyPi api request' | :forbidden
+ 'PUBLIC' | :developer | false | false | 'process PyPi api request' | :unauthorized
+ 'PUBLIC' | :guest | false | false | 'process PyPi api request' | :unauthorized
+ 'PUBLIC' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :developer | true | true | 'process PyPi api request' | :created
+ 'PRIVATE' | :guest | true | true | 'process PyPi api request' | :forbidden
+ 'PRIVATE' | :developer | true | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :guest | true | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :developer | false | true | 'process PyPi api request' | :not_found
+ 'PRIVATE' | :guest | false | true | 'process PyPi api request' | :not_found
+ 'PRIVATE' | :developer | false | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :guest | false | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
end
- context 'with an invalid package' do
- let(:token) { personal_access_token.token }
- let(:user_headers) { build_basic_auth_header(user.username, token) }
+ with_them do
+ let(:token) { user_token ? personal_access_token.token : 'wrong' }
+ let(:user_headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
let(:headers) { user_headers.merge(workhorse_header) }
before do
- params[:name] = '.$/@!^*'
- project.add_developer(user)
+ project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
end
- it_behaves_like 'returning response status', :bad_request
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
+ end
- it_behaves_like 'deploy token for package uploads'
+ context 'with an invalid package' do
+ let(:token) { personal_access_token.token }
+ let(:user_headers) { basic_auth_header(user.username, token) }
+ let(:headers) { user_headers.merge(workhorse_header) }
+
+ before do
+ params[:name] = '.$/@!^*'
+ project.add_developer(user)
+ end
- it_behaves_like 'rejects PyPI access with unknown project id'
+ it_behaves_like 'returning response status', :bad_request
end
+
+ it_behaves_like 'deploy token for package uploads'
+
+ it_behaves_like 'rejects PyPI access with unknown project id'
end
describe 'GET /api/v4/projects/:id/packages/pypi/files/:sha256/*file_identifier' do
@@ -200,60 +195,58 @@ RSpec.describe API::PypiPackages do
subject { get api(url) }
- context 'without the need for a license' do
- context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'PyPi package download' | :success
- 'PUBLIC' | :guest | true | true | 'PyPi package download' | :success
- 'PUBLIC' | :developer | true | false | 'PyPi package download' | :success
- 'PUBLIC' | :guest | true | false | 'PyPi package download' | :success
- 'PUBLIC' | :developer | false | true | 'PyPi package download' | :success
- 'PUBLIC' | :guest | false | true | 'PyPi package download' | :success
- 'PUBLIC' | :developer | false | false | 'PyPi package download' | :success
- 'PUBLIC' | :guest | false | false | 'PyPi package download' | :success
- 'PUBLIC' | :anonymous | false | true | 'PyPi package download' | :success
- 'PRIVATE' | :developer | true | true | 'PyPi package download' | :success
- 'PRIVATE' | :guest | true | true | 'PyPi package download' | :success
- 'PRIVATE' | :developer | true | false | 'PyPi package download' | :success
- 'PRIVATE' | :guest | true | false | 'PyPi package download' | :success
- 'PRIVATE' | :developer | false | true | 'PyPi package download' | :success
- 'PRIVATE' | :guest | false | true | 'PyPi package download' | :success
- 'PRIVATE' | :developer | false | false | 'PyPi package download' | :success
- 'PRIVATE' | :guest | false | false | 'PyPi package download' | :success
- 'PRIVATE' | :anonymous | false | true | 'PyPi package download' | :success
- end
-
- with_them do
- let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
+ context 'with valid project' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'PyPi package download' | :success
+ 'PUBLIC' | :guest | true | true | 'PyPi package download' | :success
+ 'PUBLIC' | :developer | true | false | 'PyPi package download' | :success
+ 'PUBLIC' | :guest | true | false | 'PyPi package download' | :success
+ 'PUBLIC' | :developer | false | true | 'PyPi package download' | :success
+ 'PUBLIC' | :guest | false | true | 'PyPi package download' | :success
+ 'PUBLIC' | :developer | false | false | 'PyPi package download' | :success
+ 'PUBLIC' | :guest | false | false | 'PyPi package download' | :success
+ 'PUBLIC' | :anonymous | false | true | 'PyPi package download' | :success
+ 'PRIVATE' | :developer | true | true | 'PyPi package download' | :success
+ 'PRIVATE' | :guest | true | true | 'PyPi package download' | :success
+ 'PRIVATE' | :developer | true | false | 'PyPi package download' | :success
+ 'PRIVATE' | :guest | true | false | 'PyPi package download' | :success
+ 'PRIVATE' | :developer | false | true | 'PyPi package download' | :success
+ 'PRIVATE' | :guest | false | true | 'PyPi package download' | :success
+ 'PRIVATE' | :developer | false | false | 'PyPi package download' | :success
+ 'PRIVATE' | :guest | false | false | 'PyPi package download' | :success
+ 'PRIVATE' | :anonymous | false | true | 'PyPi package download' | :success
+ end
- subject { get api(url), headers: headers }
+ with_them do
+ let(:token) { user_token ? personal_access_token.token : 'wrong' }
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
- end
+ subject { get api(url), headers: headers }
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
+ end
- context 'with deploy token headers' do
- let(:headers) { build_basic_auth_header(deploy_token.username, deploy_token.token) }
+ context 'with deploy token headers' do
+ let(:headers) { basic_auth_header(deploy_token.username, deploy_token.token) }
- context 'valid token' do
- it_behaves_like 'returning response status', :success
- end
+ context 'valid token' do
+ it_behaves_like 'returning response status', :success
+ end
- context 'invalid token' do
- let(:headers) { build_basic_auth_header('foo', 'bar') }
+ context 'invalid token' do
+ let(:headers) { basic_auth_header('foo', 'bar') }
- it_behaves_like 'returning response status', :success
- end
+ it_behaves_like 'returning response status', :success
end
-
- it_behaves_like 'rejects PyPI access with unknown project id'
end
+
+ it_behaves_like 'rejects PyPI access with unknown project id'
end
end
diff --git a/spec/requests/api/releases_spec.rb b/spec/requests/api/releases_spec.rb
index 5e8353d74c3..a9a92a4d3cd 100644
--- a/spec/requests/api/releases_spec.rb
+++ b/spec/requests/api/releases_spec.rb
@@ -420,7 +420,17 @@ RSpec.describe API::Releases do
{
name: 'New release',
tag_name: 'v0.1',
- description: 'Super nice release'
+ description: 'Super nice release',
+ assets: {
+ links: [
+ {
+ name: 'An example runbook link',
+ url: 'https://example.com/runbook',
+ link_type: 'runbook',
+ filepath: '/permanent/path/to/runbook'
+ }
+ ]
+ }
}
end
@@ -435,9 +445,17 @@ RSpec.describe API::Releases do
post api("/projects/#{project.id}/releases", maintainer), params: params
end.to change { Release.count }.by(1)
- expect(project.releases.last.name).to eq('New release')
- expect(project.releases.last.tag).to eq('v0.1')
- expect(project.releases.last.description).to eq('Super nice release')
+ release = project.releases.last
+
+ aggregate_failures do
+ expect(release.name).to eq('New release')
+ expect(release.tag).to eq('v0.1')
+ expect(release.description).to eq('Super nice release')
+ expect(release.links.last.name).to eq('An example runbook link')
+ expect(release.links.last.url).to eq('https://example.com/runbook')
+ expect(release.links.last.link_type).to eq('runbook')
+ expect(release.links.last.filepath).to eq('/permanent/path/to/runbook')
+ end
end
it 'creates a new release without description' do
diff --git a/spec/requests/api/snippets_spec.rb b/spec/requests/api/snippets_spec.rb
index e676eb94337..4e2f6e108eb 100644
--- a/spec/requests/api/snippets_spec.rb
+++ b/spec/requests/api/snippets_spec.rb
@@ -84,8 +84,8 @@ RSpec.describe API::Snippets do
public_snippet.id,
public_snippet_other.id)
expect(json_response.map { |snippet| snippet['web_url']} ).to contain_exactly(
- "http://localhost/snippets/#{public_snippet.id}",
- "http://localhost/snippets/#{public_snippet_other.id}")
+ "http://localhost/-/snippets/#{public_snippet.id}",
+ "http://localhost/-/snippets/#{public_snippet_other.id}")
expect(json_response[0]['files'].first).to eq snippet_blob_file(public_snippet_other.blobs.first)
expect(json_response[1]['files'].first).to eq snippet_blob_file(public_snippet.blobs.first)
end
@@ -229,13 +229,16 @@ RSpec.describe API::Snippets do
let(:base_params) do
{
title: 'Test Title',
- file_name: 'test.rb',
description: 'test description',
- content: 'puts "hello world"',
visibility: 'public'
}
end
- let(:params) { base_params.merge(extra_params) }
+
+ let(:file_path) { 'file_1.rb' }
+ let(:file_content) { 'puts "hello world"' }
+
+ let(:params) { base_params.merge(file_params, extra_params) }
+ let(:file_params) { { files: [{ file_path: file_path, content: file_content }] } }
let(:extra_params) { {} }
subject { post api("/snippets/", user), params: params }
@@ -251,7 +254,7 @@ RSpec.describe API::Snippets do
expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq(params[:title])
expect(json_response['description']).to eq(params[:description])
- expect(json_response['file_name']).to eq(params[:file_name])
+ expect(json_response['file_name']).to eq(file_path)
expect(json_response['files']).to eq(snippet.blobs.map { |blob| snippet_blob_file(blob) })
expect(json_response['visibility']).to eq(params[:visibility])
end
@@ -265,12 +268,31 @@ RSpec.describe API::Snippets do
it 'commit the files to the repository' do
subject
- blob = snippet.repository.blob_at('master', params[:file_name])
+ blob = snippet.repository.blob_at('master', file_path)
- expect(blob.data).to eq params[:content]
+ expect(blob.data).to eq file_content
end
end
+ context 'with files parameter' do
+ it_behaves_like 'snippet creation with files parameter'
+
+ context 'with multiple files' do
+ let(:file_params) do
+ {
+ files: [
+ { file_path: 'file_1.rb', content: 'puts "hello world"' },
+ { file_path: 'file_2.rb', content: 'puts "hello world 2"' }
+ ]
+ }
+ end
+
+ it_behaves_like 'snippet creation'
+ end
+ end
+
+ it_behaves_like 'snippet creation without files parameter'
+
context 'with restricted visibility settings' do
before do
stub_application_setting(restricted_visibility_levels:
@@ -305,15 +327,6 @@ RSpec.describe API::Snippets do
expect(response).to have_gitlab_http_status(:bad_request)
end
- it 'returns 400 if content is blank' do
- params[:content] = ''
-
- subject
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['error']).to eq 'content is empty'
- end
-
it 'returns 400 if title is blank' do
params[:title] = ''
diff --git a/spec/requests/api/suggestions_spec.rb b/spec/requests/api/suggestions_spec.rb
index 34d3c54d700..78a2688ac5e 100644
--- a/spec/requests/api/suggestions_spec.rb
+++ b/spec/requests/api/suggestions_spec.rb
@@ -74,7 +74,7 @@ RSpec.describe API::Suggestions do
put api(url, user)
expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response).to eq({ 'message' => 'A suggestion is not applicable.' })
+ expect(json_response).to eq({ 'message' => "Can't apply as this line was changed in a more recent version." })
end
end
@@ -133,7 +133,7 @@ RSpec.describe API::Suggestions do
params: { ids: [suggestion.id, unappliable_suggestion.id] }
expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response).to eq({ 'message' => 'A suggestion is not applicable.' })
+ expect(json_response).to eq({ 'message' => "Can't apply as this line was changed in a more recent version." })
end
end
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index 17f9112c1d5..6c6497a240b 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -64,6 +64,7 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
expect(json_response).to have_key('note')
expect(json_response['note']).to eq(user.note)
+ expect(json_response).to have_key('sign_in_count')
end
end
@@ -72,6 +73,7 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
get api("/users/#{user.id}", user)
expect(json_response).not_to have_key('note')
+ expect(json_response).not_to have_key('sign_in_count')
end
end
end