Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-02-24 15:09:00 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2020-02-24 15:09:00 +0300
commitae78b85a25cb0c19c3d6a2e4e6c7ca91ed50787d (patch)
treec53ad0fcdab26725814f1dc5267f6a04ebe4cf73 /spec
parent38149afcf95e7669a7a99828c579d185b70c04dc (diff)
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'spec')
-rw-r--r--spec/finders/serverless_domain_finder_spec.rb26
-rw-r--r--spec/fixtures/api/schemas/internal/serverless/lookup_path.json28
-rw-r--r--spec/fixtures/api/schemas/internal/serverless/virtual_domain.json14
-rw-r--r--spec/frontend/create_cluster/gke_cluster/helpers.js64
-rw-r--r--spec/frontend/create_cluster/gke_cluster/stores/actions_spec.js (renamed from spec/javascripts/create_cluster/gke_cluster/stores/actions_spec.js)7
-rw-r--r--spec/frontend/self_monitor/components/self_monitor_form_spec.js8
-rw-r--r--spec/javascripts/create_cluster/gke_cluster/helpers.js49
-rw-r--r--spec/lib/gitlab/ci/config/entry/processable_spec.rb265
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb4
-rw-r--r--spec/lib/gitlab/danger/helper_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/members_mapper_spec.rb294
-rw-r--r--spec/lib/gitlab/kubernetes/helm/pod_spec.rb2
-rw-r--r--spec/lib/gitlab/reference_counter_spec.rb62
-rw-r--r--spec/migrations/update_application_setting_npm_package_requests_forwarding_default_spec.rb38
-rw-r--r--spec/requests/api/internal/pages_spec.rb82
-rw-r--r--spec/services/ci/register_job_service_spec.rb26
16 files changed, 752 insertions, 219 deletions
diff --git a/spec/finders/serverless_domain_finder_spec.rb b/spec/finders/serverless_domain_finder_spec.rb
index 3fe82264cda..c41f09535d3 100644
--- a/spec/finders/serverless_domain_finder_spec.rb
+++ b/spec/finders/serverless_domain_finder_spec.rb
@@ -5,12 +5,34 @@ require 'spec_helper'
describe ServerlessDomainFinder do
let(:function_name) { 'test-function' }
let(:pages_domain_name) { 'serverless.gitlab.io' }
- let(:pages_domain) { create(:pages_domain, :instance_serverless, domain: pages_domain_name) }
- let!(:serverless_domain_cluster) { create(:serverless_domain_cluster, uuid: 'abcdef12345678', pages_domain: pages_domain) }
let(:valid_cluster_uuid) { 'aba1cdef123456f278' }
let(:invalid_cluster_uuid) { 'aba1cdef123456f178' }
let!(:environment) { create(:environment, name: 'test') }
+ let(:pages_domain) do
+ create(
+ :pages_domain,
+ :instance_serverless,
+ domain: pages_domain_name
+ )
+ end
+
+ let(:knative_with_ingress) do
+ create(
+ :clusters_applications_knative,
+ external_ip: '10.0.0.1'
+ )
+ end
+
+ let!(:serverless_domain_cluster) do
+ create(
+ :serverless_domain_cluster,
+ uuid: 'abcdef12345678',
+ pages_domain: pages_domain,
+ knative: knative_with_ingress
+ )
+ end
+
let(:valid_uri) { "https://#{function_name}-#{valid_cluster_uuid}#{"%x" % environment.id}-#{environment.slug}.#{pages_domain_name}" }
let(:valid_fqdn) { "#{function_name}-#{valid_cluster_uuid}#{"%x" % environment.id}-#{environment.slug}.#{pages_domain_name}" }
let(:invalid_uri) { "https://#{function_name}-#{invalid_cluster_uuid}#{"%x" % environment.id}-#{environment.slug}.#{pages_domain_name}" }
diff --git a/spec/fixtures/api/schemas/internal/serverless/lookup_path.json b/spec/fixtures/api/schemas/internal/serverless/lookup_path.json
new file mode 100644
index 00000000000..c20ea926587
--- /dev/null
+++ b/spec/fixtures/api/schemas/internal/serverless/lookup_path.json
@@ -0,0 +1,28 @@
+{
+ "type": "object",
+ "required": [
+ "source"
+ ],
+ "properties": {
+ "source": { "type": "object",
+ "required": ["type", "service", "cluster"],
+ "properties" : {
+ "type": { "type": "string", "enum": ["serverless"] },
+ "service": { "type": "string" },
+ "cluster": { "type": "object",
+ "required": ["hostname", "address", "port", "cert", "key"],
+ "properties": {
+ "hostname": { "type": "string" },
+ "address": { "type": "string" },
+ "port": { "type": "integer" },
+ "cert": { "type": "string" },
+ "key": { "type": "string" }
+ },
+ "additionalProperties": false
+ }
+ },
+ "additionalProperties": false
+ }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/internal/serverless/virtual_domain.json b/spec/fixtures/api/schemas/internal/serverless/virtual_domain.json
new file mode 100644
index 00000000000..50e899ef2f8
--- /dev/null
+++ b/spec/fixtures/api/schemas/internal/serverless/virtual_domain.json
@@ -0,0 +1,14 @@
+{
+ "type": "object",
+ "required": [
+ "lookup_paths",
+ "certificate",
+ "key"
+ ],
+ "properties": {
+ "certificate": { "type": ["string", "null"] },
+ "key": { "type": ["string", "null"] },
+ "lookup_paths": { "type": "array", "items": { "$ref": "lookup_path.json" } }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/frontend/create_cluster/gke_cluster/helpers.js b/spec/frontend/create_cluster/gke_cluster/helpers.js
new file mode 100644
index 00000000000..52b43b82698
--- /dev/null
+++ b/spec/frontend/create_cluster/gke_cluster/helpers.js
@@ -0,0 +1,64 @@
+import {
+ gapiProjectsResponseMock,
+ gapiZonesResponseMock,
+ gapiMachineTypesResponseMock,
+} from './mock_data';
+
+const cloudbilling = {
+ projects: {
+ getBillingInfo: jest.fn(
+ () =>
+ new Promise(resolve => {
+ resolve({
+ result: { billingEnabled: true },
+ });
+ }),
+ ),
+ },
+};
+
+const cloudresourcemanager = {
+ projects: {
+ list: jest.fn(
+ () =>
+ new Promise(resolve => {
+ resolve({
+ result: { ...gapiProjectsResponseMock },
+ });
+ }),
+ ),
+ },
+};
+
+const compute = {
+ zones: {
+ list: jest.fn(
+ () =>
+ new Promise(resolve => {
+ resolve({
+ result: { ...gapiZonesResponseMock },
+ });
+ }),
+ ),
+ },
+ machineTypes: {
+ list: jest.fn(
+ () =>
+ new Promise(resolve => {
+ resolve({
+ result: { ...gapiMachineTypesResponseMock },
+ });
+ }),
+ ),
+ },
+};
+
+const gapi = {
+ client: {
+ cloudbilling,
+ cloudresourcemanager,
+ compute,
+ },
+};
+
+export { gapi as default };
diff --git a/spec/javascripts/create_cluster/gke_cluster/stores/actions_spec.js b/spec/frontend/create_cluster/gke_cluster/stores/actions_spec.js
index 7ceaeace82f..8c3525207d6 100644
--- a/spec/javascripts/create_cluster/gke_cluster/stores/actions_spec.js
+++ b/spec/frontend/create_cluster/gke_cluster/stores/actions_spec.js
@@ -1,7 +1,7 @@
-import testAction from 'spec/helpers/vuex_action_helper';
+import testAction from 'helpers/vuex_action_helper';
import * as actions from '~/create_cluster/gke_cluster/store/actions';
import { createStore } from '~/create_cluster/gke_cluster/store';
-import { gapi } from '../helpers';
+import gapi from '../helpers';
import { selectedProjectMock, selectedZoneMock, selectedMachineTypeMock } from '../mock_data';
describe('GCP Cluster Dropdown Store Actions', () => {
@@ -65,9 +65,10 @@ describe('GCP Cluster Dropdown Store Actions', () => {
describe('async fetch methods', () => {
let originalGapi;
+
beforeAll(() => {
originalGapi = window.gapi;
- window.gapi = gapi();
+ window.gapi = gapi;
});
afterAll(() => {
diff --git a/spec/frontend/self_monitor/components/self_monitor_form_spec.js b/spec/frontend/self_monitor/components/self_monitor_form_spec.js
index 50b97ae914d..6532c6ed2c7 100644
--- a/spec/frontend/self_monitor/components/self_monitor_form_spec.js
+++ b/spec/frontend/self_monitor/components/self_monitor_form_spec.js
@@ -72,11 +72,17 @@ describe('self monitor component', () => {
selfMonitoringProjectExists: true,
createSelfMonitoringProjectPath: '/create',
deleteSelfMonitoringProjectPath: '/delete',
+ selfMonitoringProjectFullPath: 'instance-administrators-random/gitlab-self-monitoring',
});
wrapper = shallowMount(SelfMonitor, { store });
- expect(wrapper.vm.selfMonitoringFormText).toContain('<a href="http://localhost/">');
+ expect(
+ wrapper
+ .find({ ref: 'selfMonitoringFormText' })
+ .find('a')
+ .attributes('href'),
+ ).toEqual('http://localhost/instance-administrators-random/gitlab-self-monitoring');
});
});
});
diff --git a/spec/javascripts/create_cluster/gke_cluster/helpers.js b/spec/javascripts/create_cluster/gke_cluster/helpers.js
deleted file mode 100644
index 6df511e9157..00000000000
--- a/spec/javascripts/create_cluster/gke_cluster/helpers.js
+++ /dev/null
@@ -1,49 +0,0 @@
-import {
- gapiProjectsResponseMock,
- gapiZonesResponseMock,
- gapiMachineTypesResponseMock,
-} from './mock_data';
-
-// eslint-disable-next-line import/prefer-default-export
-export const gapi = () => ({
- client: {
- cloudbilling: {
- projects: {
- getBillingInfo: () =>
- new Promise(resolve => {
- resolve({
- result: { billingEnabled: true },
- });
- }),
- },
- },
- cloudresourcemanager: {
- projects: {
- list: () =>
- new Promise(resolve => {
- resolve({
- result: { ...gapiProjectsResponseMock },
- });
- }),
- },
- },
- compute: {
- zones: {
- list: () =>
- new Promise(resolve => {
- resolve({
- result: { ...gapiZonesResponseMock },
- });
- }),
- },
- machineTypes: {
- list: () =>
- new Promise(resolve => {
- resolve({
- result: { ...gapiMachineTypesResponseMock },
- });
- }),
- },
- },
- },
-});
diff --git a/spec/lib/gitlab/ci/config/entry/processable_spec.rb b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
new file mode 100644
index 00000000000..410aef1cd53
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
@@ -0,0 +1,265 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Config::Entry::Processable do
+ let(:node_class) do
+ Class.new(::Gitlab::Config::Entry::Node) do
+ include Gitlab::Ci::Config::Entry::Processable
+
+ def self.name
+ 'job'
+ end
+ end
+ end
+
+ let(:entry) { node_class.new(config, name: :rspec) }
+
+ describe 'validations' do
+ before do
+ entry.compose!
+ end
+
+ context 'when entry config value is correct' do
+ let(:config) { { stage: 'test' } }
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ context 'when job name is empty' do
+ let(:entry) { node_class.new(config, name: ''.to_sym) }
+
+ it 'reports error' do
+ expect(entry.errors).to include "job name can't be blank"
+ end
+ end
+ end
+
+ context 'when entry value is not correct' do
+ context 'incorrect config value type' do
+ let(:config) { ['incorrect'] }
+
+ describe '#errors' do
+ it 'reports error about a config type' do
+ expect(entry.errors)
+ .to include 'job config should be a hash'
+ end
+ end
+ end
+
+ context 'when config is empty' do
+ let(:config) { {} }
+
+ describe '#valid' do
+ it 'is invalid' do
+ expect(entry).not_to be_valid
+ end
+ end
+ end
+
+ context 'when extends key is not a string' do
+ let(:config) { { extends: 123 } }
+
+ it 'returns error about wrong value type' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include "job extends should be an array of strings or a string"
+ end
+ end
+
+ context 'when it uses both "when:" and "rules:"' do
+ let(:config) do
+ {
+ script: 'echo',
+ when: 'on_failure',
+ rules: [{ if: '$VARIABLE', when: 'on_success' }]
+ }
+ end
+
+ it 'returns an error about when: being combined with rules' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include 'job config key may not be used with `rules`: when'
+ end
+ end
+
+ context 'when only: is used with rules:' do
+ let(:config) { { only: ['merge_requests'], rules: [{ if: '$THIS' }] } }
+
+ it 'returns error about mixing only: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`/
+ end
+
+ context 'and only: is blank' do
+ let(:config) { { only: nil, rules: [{ if: '$THIS' }] } }
+
+ it 'returns error about mixing only: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`/
+ end
+ end
+
+ context 'and rules: is blank' do
+ let(:config) { { only: ['merge_requests'], rules: nil } }
+
+ it 'returns error about mixing only: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`/
+ end
+ end
+ end
+
+ context 'when except: is used with rules:' do
+ let(:config) { { except: { refs: %w[master] }, rules: [{ if: '$THIS' }] } }
+
+ it 'returns error about mixing except: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`/
+ end
+
+ context 'and except: is blank' do
+ let(:config) { { except: nil, rules: [{ if: '$THIS' }] } }
+
+ it 'returns error about mixing except: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`/
+ end
+ end
+
+ context 'and rules: is blank' do
+ let(:config) { { except: { refs: %w[master] }, rules: nil } }
+
+ it 'returns error about mixing except: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`/
+ end
+ end
+ end
+
+ context 'when only: and except: are both used with rules:' do
+ let(:config) do
+ {
+ only: %w[merge_requests],
+ except: { refs: %w[master] },
+ rules: [{ if: '$THIS' }]
+ }
+ end
+
+ it 'returns errors about mixing both only: and except: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`/
+ expect(entry.errors).to include /may not be used with `rules`/
+ end
+
+ context 'when only: and except: as both blank' do
+ let(:config) do
+ { only: nil, except: nil, rules: [{ if: '$THIS' }] }
+ end
+
+ it 'returns errors about mixing both only: and except: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`/
+ expect(entry.errors).to include /may not be used with `rules`/
+ end
+ end
+
+ context 'when rules: is blank' do
+ let(:config) do
+ { only: %w[merge_requests], except: { refs: %w[master] }, rules: nil }
+ end
+
+ it 'returns errors about mixing both only: and except: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`/
+ expect(entry.errors).to include /may not be used with `rules`/
+ end
+ end
+ end
+ end
+ end
+
+ describe '#relevant?' do
+ it 'is a relevant entry' do
+ entry = node_class.new({ stage: 'test' }, name: :rspec)
+
+ expect(entry).to be_relevant
+ end
+ end
+
+ describe '#compose!' do
+ let(:specified) do
+ double('specified', 'specified?' => true, value: 'specified')
+ end
+
+ let(:unspecified) { double('unspecified', 'specified?' => false) }
+ let(:default) { double('default', '[]' => unspecified) }
+ let(:workflow) { double('workflow', 'has_rules?' => false) }
+ let(:deps) { double('deps', 'default' => default, '[]' => unspecified, 'workflow' => workflow) }
+
+ context 'with workflow rules' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:name, :has_workflow_rules?, :only, :rules, :result) do
+ "uses default only" | false | nil | nil | { refs: %w[branches tags] }
+ "uses user only" | false | %w[branches] | nil | { refs: %w[branches] }
+ "does not define only" | false | nil | [] | nil
+ "does not define only" | true | nil | nil | nil
+ "uses user only" | true | %w[branches] | nil | { refs: %w[branches] }
+ "does not define only" | true | nil | [] | nil
+ end
+
+ with_them do
+ let(:config) { { script: 'ls', rules: rules, only: only }.compact }
+
+ it "#{name}" do
+ expect(workflow).to receive(:has_rules?) { has_workflow_rules? }
+
+ entry.compose!(deps)
+
+ expect(entry.only_value).to eq(result)
+ end
+ end
+ end
+
+ context 'when workflow rules is used' do
+ context 'when rules are used' do
+ let(:config) { { script: 'ls', cache: { key: 'test' }, rules: [] } }
+
+ it 'does not define only' do
+ expect(entry).not_to be_only_defined
+ end
+ end
+
+ context 'when rules are not used' do
+ let(:config) { { script: 'ls', cache: { key: 'test' }, only: [] } }
+
+ it 'does not define only' do
+ expect(entry).not_to be_only_defined
+ end
+ end
+ end
+ end
+
+ context 'when composed' do
+ before do
+ entry.compose!
+ end
+
+ describe '#value' do
+ context 'when entry is correct' do
+ let(:config) do
+ { stage: 'test' }
+ end
+
+ it 'returns correct value' do
+ expect(entry.value)
+ .to eq(name: :rspec,
+ stage: 'test',
+ only: { refs: %w[branches tags] })
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index cfc3d852de0..e303557bd00 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -2419,7 +2419,9 @@ module Gitlab
it 'returns errors and empty configuration' do
expect(subject.valid?).to eq(false)
- expect(subject.errors).to eq(['jobs:rspec config contains unknown keys: bad_tags', 'jobs:rspec rules should be an array of hashes'])
+ expect(subject.errors).to contain_exactly(
+ 'jobs:rspec config contains unknown keys: bad_tags',
+ 'jobs:rspec rules should be an array of hashes')
expect(subject.content).to be_blank
end
end
diff --git a/spec/lib/gitlab/danger/helper_spec.rb b/spec/lib/gitlab/danger/helper_spec.rb
index 2561e763429..4b378936965 100644
--- a/spec/lib/gitlab/danger/helper_spec.rb
+++ b/spec/lib/gitlab/danger/helper_spec.rb
@@ -218,6 +218,8 @@ describe Gitlab::Danger::Helper do
'scripts/foo' | :engineering_productivity
'lib/gitlab/danger/foo' | :engineering_productivity
'ee/lib/gitlab/danger/foo' | :engineering_productivity
+ '.overcommit.yml.example' | :engineering_productivity
+ 'tooling/overcommit/foo' | :engineering_productivity
'lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml' | :backend
diff --git a/spec/lib/gitlab/import_export/members_mapper_spec.rb b/spec/lib/gitlab/import_export/members_mapper_spec.rb
index 01a7901062a..7e2b5ed534f 100644
--- a/spec/lib/gitlab/import_export/members_mapper_spec.rb
+++ b/spec/lib/gitlab/import_export/members_mapper_spec.rb
@@ -4,167 +4,191 @@ require 'spec_helper'
describe Gitlab::ImportExport::MembersMapper do
describe 'map members' do
- let(:user) { create(:admin) }
- let(:project) { create(:project, :public, name: 'searchable_project') }
- let(:user2) { create(:user) }
- let(:exported_user_id) { 99 }
- let(:exported_members) do
- [{
- "id" => 2,
- "access_level" => 40,
- "source_id" => 14,
- "source_type" => "Project",
- "notification_level" => 3,
- "created_at" => "2016-03-11T10:21:44.822Z",
- "updated_at" => "2016-03-11T10:21:44.822Z",
- "created_by_id" => nil,
- "invite_email" => nil,
- "invite_token" => nil,
- "invite_accepted_at" => nil,
- "user" =>
- {
- "id" => exported_user_id,
- "email" => user2.email,
- "username" => 'test'
- },
- "user_id" => 19
- },
- {
- "id" => 3,
- "access_level" => 40,
- "source_id" => 14,
- "source_type" => "Project",
- "user_id" => nil,
- "notification_level" => 3,
- "created_at" => "2016-03-11T10:21:44.822Z",
- "updated_at" => "2016-03-11T10:21:44.822Z",
- "created_by_id" => 1,
- "invite_email" => 'invite@test.com',
- "invite_token" => 'token',
- "invite_accepted_at" => nil
- }]
- end
-
- let(:members_mapper) do
- described_class.new(
- exported_members: exported_members, user: user, importable: project)
- end
-
- it 'includes the exported user ID in the map' do
- expect(members_mapper.map.keys).to include(exported_user_id)
- end
-
- it 'maps a project member' do
- expect(members_mapper.map[exported_user_id]).to eq(user2.id)
- end
-
- it 'defaults to importer project member if it does not exist' do
- expect(members_mapper.map[-1]).to eq(user.id)
- end
-
- it 'has invited members with no user' do
- members_mapper.map
-
- expect(ProjectMember.find_by_invite_email('invite@test.com')).not_to be_nil
- end
-
- it 'authorizes the users to the project' do
- members_mapper.map
-
- expect(user.authorized_project?(project)).to be true
- expect(user2.authorized_project?(project)).to be true
- end
-
- it 'maps an owner as a maintainer' do
- exported_members.first['access_level'] = ProjectMember::OWNER
-
- expect(members_mapper.map[exported_user_id]).to eq(user2.id)
- expect(ProjectMember.find_by_user_id(user2.id).access_level).to eq(ProjectMember::MAINTAINER)
- end
-
- it 'removes old user_id from member_hash to avoid conflict with user key' do
- expect(ProjectMember)
- .to receive(:create)
- .twice
- .with(hash_excluding('user_id'))
- .and_call_original
-
- members_mapper.map
- end
-
- context 'user is not an admin' do
- let(:user) { create(:user) }
-
- it 'does not map a project member' do
- expect(members_mapper.map[exported_user_id]).to eq(user.id)
+ shared_examples 'imports exported members' do
+ let(:user) { create(:admin) }
+ let(:user2) { create(:user) }
+ let(:exported_user_id) { 99 }
+ let(:exported_members) do
+ [{
+ "id" => 2,
+ "access_level" => 40,
+ "source_id" => 14,
+ "source_type" => source_type,
+ "notification_level" => 3,
+ "created_at" => "2016-03-11T10:21:44.822Z",
+ "updated_at" => "2016-03-11T10:21:44.822Z",
+ "created_by_id" => nil,
+ "invite_email" => nil,
+ "invite_token" => nil,
+ "invite_accepted_at" => nil,
+ "user" =>
+ {
+ "id" => exported_user_id,
+ "email" => user2.email,
+ "username" => 'test'
+ },
+ "user_id" => 19
+ },
+ {
+ "id" => 3,
+ "access_level" => 40,
+ "source_id" => 14,
+ "source_type" => source_type,
+ "user_id" => nil,
+ "notification_level" => 3,
+ "created_at" => "2016-03-11T10:21:44.822Z",
+ "updated_at" => "2016-03-11T10:21:44.822Z",
+ "created_by_id" => 1,
+ "invite_email" => 'invite@test.com',
+ "invite_token" => 'token',
+ "invite_accepted_at" => nil
+ }]
end
- it 'defaults to importer project member if it does not exist' do
- expect(members_mapper.map[-1]).to eq(user.id)
+ let(:members_mapper) do
+ described_class.new(
+ exported_members: exported_members, user: user, importable: importable)
end
- end
- context 'chooses the one with an email first' do
- let(:user3) { create(:user, username: 'test') }
+ it 'includes the exported user ID in the map' do
+ expect(members_mapper.map.keys).to include(exported_user_id)
+ end
- it 'maps the project member that has a matching email first' do
+ it 'maps a member' do
expect(members_mapper.map[exported_user_id]).to eq(user2.id)
end
- end
- context 'importer same as group member' do
- let(:user2) { create(:admin) }
- let(:group) { create(:group) }
- let(:project) { create(:project, :public, name: 'searchable_project', namespace: group) }
- let(:members_mapper) do
- described_class.new(
- exported_members: exported_members, user: user2, importable: project)
+ it 'defaults to importer member if it does not exist' do
+ expect(members_mapper.map[-1]).to eq(user.id)
end
- before do
- group.add_users([user, user2], GroupMember::DEVELOPER)
- end
+ it 'has invited members with no user' do
+ members_mapper.map
- it 'maps the project member' do
- expect(members_mapper.map[exported_user_id]).to eq(user2.id)
+ expect(member_class.find_by_invite_email('invite@test.com')).not_to be_nil
end
- it 'maps the project member if it already exists' do
- project.add_maintainer(user2)
+ it 'removes old user_id from member_hash to avoid conflict with user key' do
+ expect(member_class)
+ .to receive(:create)
+ .twice
+ .with(hash_excluding('user_id'))
+ .and_call_original
- expect(members_mapper.map[exported_user_id]).to eq(user2.id)
+ members_mapper.map
end
- end
- context 'importing group members' do
- let(:group) { create(:group) }
- let(:project) { create(:project, namespace: group) }
- let(:members_mapper) do
- described_class.new(
- exported_members: exported_members, user: user, importable: project)
- end
+ context 'user is not an admin' do
+ let(:user) { create(:user) }
- before do
- group.add_users([user, user2], GroupMember::DEVELOPER)
- user.update(email: 'invite@test.com')
+ it 'does not map a member' do
+ expect(members_mapper.map[exported_user_id]).to eq(user.id)
+ end
+
+ it 'defaults to importer member if it does not exist' do
+ expect(members_mapper.map[-1]).to eq(user.id)
+ end
end
- it 'maps the importer' do
- expect(members_mapper.map[-1]).to eq(user.id)
+ context 'chooses the one with an email' do
+ let(:user3) { create(:user, username: 'test') }
+
+ it 'maps the member that has a matching email' do
+ expect(members_mapper.map[exported_user_id]).to eq(user2.id)
+ end
end
+ end
- it 'maps the group member' do
- expect(members_mapper.map[exported_user_id]).to eq(user2.id)
+ context 'when importable is Project' do
+ include_examples 'imports exported members' do
+ let(:source_type) { 'Project' }
+ let(:member_class) { ProjectMember }
+ let(:importable) { create(:project, :public, name: 'searchable_project') }
+
+ it 'authorizes the users to the project' do
+ members_mapper.map
+
+ expect(user.authorized_project?(importable)).to be true
+ expect(user2.authorized_project?(importable)).to be true
+ end
+
+ it 'maps an owner as a maintainer' do
+ exported_members.first['access_level'] = ProjectMember::OWNER
+
+ expect(members_mapper.map[exported_user_id]).to eq(user2.id)
+ expect(member_class.find_by_user_id(user2.id).access_level).to eq(ProjectMember::MAINTAINER)
+ end
+
+ context 'importer same as group member' do
+ let(:user2) { create(:admin) }
+ let(:group) { create(:group) }
+ let(:importable) { create(:project, :public, name: 'searchable_project', namespace: group) }
+ let(:members_mapper) do
+ described_class.new(
+ exported_members: exported_members, user: user2, importable: importable)
+ end
+
+ before do
+ group.add_users([user, user2], GroupMember::DEVELOPER)
+ end
+
+ it 'maps the project member' do
+ expect(members_mapper.map[exported_user_id]).to eq(user2.id)
+ end
+
+ it 'maps the project member if it already exists' do
+ importable.add_maintainer(user2)
+
+ expect(members_mapper.map[exported_user_id]).to eq(user2.id)
+ end
+ end
+
+ context 'importing group members' do
+ let(:group) { create(:group) }
+ let(:importable) { create(:project, namespace: group) }
+ let(:members_mapper) do
+ described_class.new(
+ exported_members: exported_members, user: user, importable: importable)
+ end
+
+ before do
+ group.add_users([user, user2], GroupMember::DEVELOPER)
+ user.update(email: 'invite@test.com')
+ end
+
+ it 'maps the importer' do
+ expect(members_mapper.map[-1]).to eq(user.id)
+ end
+
+ it 'maps the group member' do
+ expect(members_mapper.map[exported_user_id]).to eq(user2.id)
+ end
+ end
+
+ context 'when importer mapping fails' do
+ let(:exception_message) { 'Something went wrong' }
+
+ it 'includes importer specific error message' do
+ expect(member_class).to receive(:create!).and_raise(StandardError.new(exception_message))
+
+ expect { members_mapper.map }.to raise_error(StandardError, "Error adding importer user to Project members. #{exception_message}")
+ end
+ end
end
end
- context 'when importer mapping fails' do
- let(:exception_message) { 'Something went wrong' }
+ context 'when importable is Group' do
+ include_examples 'imports exported members' do
+ let(:source_type) { 'Namespace' }
+ let(:member_class) { GroupMember }
+ let(:importable) { create(:group) }
- it 'includes importer specific error message' do
- expect(ProjectMember).to receive(:create!).and_raise(StandardError.new(exception_message))
+ it 'does not lower owner access level' do
+ exported_members.first['access_level'] = member_class::OWNER
- expect { members_mapper.map }.to raise_error(StandardError, "Error adding importer user to Project members. #{exception_message}")
+ expect(members_mapper.map[exported_user_id]).to eq(user2.id)
+ expect(member_class.find_by_user_id(user2.id).access_level).to eq(member_class::OWNER)
+ end
end
end
end
diff --git a/spec/lib/gitlab/kubernetes/helm/pod_spec.rb b/spec/lib/gitlab/kubernetes/helm/pod_spec.rb
index 24a734a2915..3c62219a9a5 100644
--- a/spec/lib/gitlab/kubernetes/helm/pod_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/pod_spec.rb
@@ -32,7 +32,7 @@ describe Gitlab::Kubernetes::Helm::Pod do
it 'generates the appropriate specifications for the container' do
container = subject.generate.spec.containers.first
expect(container.name).to eq('helm')
- expect(container.image).to eq('registry.gitlab.com/gitlab-org/cluster-integration/helm-install-image/releases/2.16.1-kube-1.13.12')
+ expect(container.image).to eq('registry.gitlab.com/gitlab-org/cluster-integration/helm-install-image/releases/2.16.3-kube-1.13.12')
expect(container.env.count).to eq(3)
expect(container.env.map(&:name)).to match_array([:HELM_VERSION, :TILLER_NAMESPACE, :COMMAND_SCRIPT])
expect(container.command).to match_array(["/bin/sh"])
diff --git a/spec/lib/gitlab/reference_counter_spec.rb b/spec/lib/gitlab/reference_counter_spec.rb
index f9361d08faf..ae7b18ca007 100644
--- a/spec/lib/gitlab/reference_counter_spec.rb
+++ b/spec/lib/gitlab/reference_counter_spec.rb
@@ -2,38 +2,54 @@
require 'spec_helper'
-describe Gitlab::ReferenceCounter do
- let(:redis) { double('redis') }
- let(:reference_counter_key) { "git-receive-pack-reference-counter:project-1" }
+describe Gitlab::ReferenceCounter, :clean_gitlab_redis_shared_state do
let(:reference_counter) { described_class.new('project-1') }
- before do
- allow(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis)
+ describe '#increase' do
+ it 'increases and sets the expire time of a reference count for a path' do
+ expect { reference_counter.increase }.to change { reference_counter.value }.by(1)
+ expect(reference_counter.expires_in).to be_positive
+ expect(reference_counter.increase).to be(true)
+ end
end
- it 'increases and set the expire time of a reference count for a path' do
- expect(redis).to receive(:incr).with(reference_counter_key)
- expect(redis).to receive(:expire).with(reference_counter_key,
- described_class::REFERENCE_EXPIRE_TIME)
- expect(reference_counter.increase).to be(true)
+ describe '#decrease' do
+ it 'decreases the reference count for a path' do
+ reference_counter.increase
+
+ expect { reference_counter.decrease }.to change { reference_counter.value }.by(-1)
+ end
+
+ it 'warns if attempting to decrease a counter with a value of zero or less, and resets the counter' do
+ expect(Rails.logger).to receive(:warn).with("Reference counter for project-1" \
+ " decreased when its value was less than 1. Resetting the counter.")
+ expect { reference_counter.decrease }.not_to change { reference_counter.value }
+ end
end
- it 'decreases the reference count for a path' do
- allow(redis).to receive(:decr).and_return(0)
- expect(redis).to receive(:decr).with(reference_counter_key)
- expect(reference_counter.decrease).to be(true)
+ describe '#value' do
+ it 'get the reference count for a path' do
+ expect(reference_counter.value).to eq(0)
+
+ reference_counter.increase
+
+ expect(reference_counter.value).to eq(1)
+ end
end
- it 'warns if attempting to decrease a counter with a value of one or less, and resets the counter' do
- expect(redis).to receive(:decr).and_return(-1)
- expect(redis).to receive(:del)
- expect(Rails.logger).to receive(:warn).with("Reference counter for project-1" \
- " decreased when its value was less than 1. Reseting the counter.")
- expect(reference_counter.decrease).to be(true)
+ describe '#reset!' do
+ it 'resets reference count down to zero' do
+ 3.times { reference_counter.increase }
+
+ expect { reference_counter.reset! }.to change { reference_counter.value}.from(3).to(0)
+ end
end
- it 'get the reference count for a path' do
- allow(redis).to receive(:get).and_return(1)
- expect(reference_counter.value).to be(1)
+ describe '#expires_in' do
+ it 'displays the expiration time in seconds' do
+ reference_counter.increase
+
+ expect(reference_counter.expires_in).to be_between(500, 600)
+ end
end
end
diff --git a/spec/migrations/update_application_setting_npm_package_requests_forwarding_default_spec.rb b/spec/migrations/update_application_setting_npm_package_requests_forwarding_default_spec.rb
new file mode 100644
index 00000000000..dfe14b40c6e
--- /dev/null
+++ b/spec/migrations/update_application_setting_npm_package_requests_forwarding_default_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20200221105436_update_application_setting_npm_package_requests_forwarding_default.rb')
+
+describe UpdateApplicationSettingNpmPackageRequestsForwardingDefault, :migration do
+ # Create test data - pipeline and CI/CD jobs.
+ let(:application_settings) { table(:application_settings) }
+
+ before do
+ application_settings.create!(npm_package_requests_forwarding: false)
+ end
+
+ # Test just the up migration.
+ it 'correctly migrates the application setting' do
+ expect { migrate! }.to change { current_application_setting }.from(false).to(true)
+ end
+
+ # Test a reversible migration.
+ it 'correctly migrates up and down the application setting' do
+ reversible_migration do |migration|
+ # Expectations will run before the up migration,
+ # and then again after the down migration
+ migration.before -> {
+ expect(current_application_setting).to eq false
+ }
+
+ # Expectations will run after the up migration.
+ migration.after -> {
+ expect(current_application_setting).to eq true
+ }
+ end
+ end
+
+ def current_application_setting
+ ApplicationSetting.current_without_cache.npm_package_requests_forwarding
+ end
+end
diff --git a/spec/requests/api/internal/pages_spec.rb b/spec/requests/api/internal/pages_spec.rb
index 9a8c1a0e03b..99cb2bfe221 100644
--- a/spec/requests/api/internal/pages_spec.rb
+++ b/spec/requests/api/internal/pages_spec.rb
@@ -56,6 +56,88 @@ describe API::Internal::Pages do
end
end
+ context 'serverless domain' do
+ let(:namespace) { create(:namespace, name: 'gitlab-org') }
+ let(:project) { create(:project, namespace: namespace, name: 'gitlab-ce') }
+ let(:environment) { create(:environment, project: project) }
+ let(:pages_domain) { create(:pages_domain, domain: 'serverless.gitlab.io') }
+ let(:knative_without_ingress) { create(:clusters_applications_knative) }
+ let(:knative_with_ingress) { create(:clusters_applications_knative, external_ip: '10.0.0.1') }
+
+ context 'without a knative ingress gateway IP' do
+ let!(:serverless_domain_cluster) do
+ create(
+ :serverless_domain_cluster,
+ uuid: 'abcdef12345678',
+ pages_domain: pages_domain,
+ knative: knative_without_ingress
+ )
+ end
+
+ let(:serverless_domain) do
+ create(
+ :serverless_domain,
+ serverless_domain_cluster: serverless_domain_cluster,
+ environment: environment
+ )
+ end
+
+ it 'responds with 204 no content' do
+ query_host(serverless_domain.uri.host)
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(response.body).to be_empty
+ end
+ end
+
+ context 'with a knative ingress gateway IP' do
+ let!(:serverless_domain_cluster) do
+ create(
+ :serverless_domain_cluster,
+ uuid: 'abcdef12345678',
+ pages_domain: pages_domain,
+ knative: knative_with_ingress
+ )
+ end
+
+ let(:serverless_domain) do
+ create(
+ :serverless_domain,
+ serverless_domain_cluster: serverless_domain_cluster,
+ environment: environment
+ )
+ end
+
+ it 'responds with proxy configuration' do
+ query_host(serverless_domain.uri.host)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('internal/serverless/virtual_domain')
+
+ expect(json_response['certificate']).to eq(pages_domain.certificate)
+ expect(json_response['key']).to eq(pages_domain.key)
+
+ expect(json_response['lookup_paths']).to eq(
+ [
+ {
+ 'source' => {
+ 'type' => 'serverless',
+ 'service' => "test-function.#{project.name}-#{project.id}-#{environment.slug}.#{serverless_domain_cluster.knative.hostname}",
+ 'cluster' => {
+ 'hostname' => serverless_domain_cluster.knative.hostname,
+ 'address' => serverless_domain_cluster.knative.external_ip,
+ 'port' => 443,
+ 'cert' => serverless_domain_cluster.certificate,
+ 'key' => serverless_domain_cluster.key
+ }
+ }
+ }
+ ]
+ )
+ end
+ end
+ end
+
context 'custom domain' do
let(:namespace) { create(:namespace, name: 'gitlab-org') }
let(:project) { create(:project, namespace: namespace, name: 'gitlab-ce') }
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 0f2d994efd4..2da1350e2af 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -612,7 +612,8 @@ module Ci
allow(attempt_counter).to receive(:increment)
expect(job_queue_duration_seconds).to receive(:observe)
.with({ shared_runner: expected_shared_runner,
- jobs_running_for_project: expected_jobs_running_for_project_first_job }, 1800)
+ jobs_running_for_project: expected_jobs_running_for_project_first_job,
+ shard: expected_shard }, 1800)
execute(runner)
end
@@ -625,7 +626,8 @@ module Ci
allow(attempt_counter).to receive(:increment)
expect(job_queue_duration_seconds).to receive(:observe)
.with({ shared_runner: expected_shared_runner,
- jobs_running_for_project: expected_jobs_running_for_project_third_job }, 1800)
+ jobs_running_for_project: expected_jobs_running_for_project_third_job,
+ shard: expected_shard }, 1800)
execute(runner)
end
@@ -638,13 +640,28 @@ module Ci
end
context 'when shared runner is used' do
- let(:runner) { shared_runner }
+ let(:runner) { create(:ci_runner, :instance, tag_list: %w(tag1 tag2)) }
let(:expected_shared_runner) { true }
+ let(:expected_shard) { Ci::RegisterJobService::DEFAULT_METRICS_SHARD }
let(:expected_jobs_running_for_project_first_job) { 0 }
let(:expected_jobs_running_for_project_third_job) { 2 }
it_behaves_like 'metrics collector'
+ context 'when metrics_shard tag is defined' do
+ let(:runner) { create(:ci_runner, :instance, tag_list: %w(tag1 metrics_shard::shard_tag tag2)) }
+ let(:expected_shard) { 'shard_tag' }
+
+ it_behaves_like 'metrics collector'
+ end
+
+ context 'when multiple metrics_shard tag is defined' do
+ let(:runner) { create(:ci_runner, :instance, tag_list: %w(tag1 metrics_shard::shard_tag metrics_shard::shard_tag_2 tag2)) }
+ let(:expected_shard) { 'shard_tag' }
+
+ it_behaves_like 'metrics collector'
+ end
+
context 'when pending job with queued_at=nil is used' do
before do
pending_job.update(queued_at: nil)
@@ -662,8 +679,9 @@ module Ci
end
context 'when specific runner is used' do
- let(:runner) { specific_runner }
+ let(:runner) { create(:ci_runner, :project, projects: [project], tag_list: %w(tag1 metrics_shard::shard_tag tag2)) }
let(:expected_shared_runner) { false }
+ let(:expected_shard) { Ci::RegisterJobService::DEFAULT_METRICS_SHARD }
let(:expected_jobs_running_for_project_first_job) { '+Inf' }
let(:expected_jobs_running_for_project_third_job) { '+Inf' }