Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-03-11 18:09:37 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2020-03-11 18:09:37 +0300
commita210c43e0aca0311cc1d3d381763b25979ec72dc (patch)
tree0325d173da7a6e7bd6c2cdf450d0aa1c4e142d0f /spec
parentc9687bdf58e9d4a9c3942f587bd4841f42e3b5de (diff)
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/projects/logs_controller_spec.rb143
-rw-r--r--spec/features/projects/navbar_spec.rb1
-rw-r--r--spec/fixtures/api/schemas/environment.json3
-rw-r--r--spec/fixtures/lib/elasticsearch/logs_response.json73
-rw-r--r--spec/fixtures/lib/elasticsearch/query.json39
-rw-r--r--spec/fixtures/lib/elasticsearch/query_with_container.json46
-rw-r--r--spec/fixtures/lib/elasticsearch/query_with_end_time.json48
-rw-r--r--spec/fixtures/lib/elasticsearch/query_with_search.json48
-rw-r--r--spec/fixtures/lib/elasticsearch/query_with_start_time.json48
-rw-r--r--spec/fixtures/lib/elasticsearch/query_with_times.json49
-rw-r--r--spec/frontend/blob/sketch/index_spec.js92
-rw-r--r--spec/frontend/clusters_list/mock_data.js5
-rw-r--r--spec/frontend/create_cluster/components/cluster_form_dropdown_spec.js142
-rw-r--r--spec/frontend/logs/components/environment_logs_spec.js334
-rw-r--r--spec/frontend/logs/components/log_control_buttons_spec.js108
-rw-r--r--spec/frontend/logs/mock_data.js85
-rw-r--r--spec/frontend/logs/stores/actions_spec.js324
-rw-r--r--spec/frontend/logs/stores/getters_spec.js40
-rw-r--r--spec/frontend/logs/stores/mutations_spec.js171
-rw-r--r--spec/frontend/logs/utils_spec.js38
-rw-r--r--spec/javascripts/blob/balsamiq/balsamiq_viewer_browser_spec.js (renamed from spec/javascripts/blob/balsamiq/balsamiq_viewer_integration_spec.js)2
-rw-r--r--spec/javascripts/blob/sketch/index_spec.js120
-rw-r--r--spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb154
-rw-r--r--spec/lib/gitlab/elasticsearch/logs_spec.rb80
-rw-r--r--spec/models/environment_spec.rb35
-rw-r--r--spec/models/snippet_repository_spec.rb38
-rw-r--r--spec/serializers/environment_entity_spec.rb20
-rw-r--r--spec/services/pod_logs/base_service_spec.rb229
-rw-r--r--spec/services/pod_logs/elasticsearch_service_spec.rb174
-rw-r--r--spec/services/pod_logs/kubernetes_service_spec.rb166
-rw-r--r--spec/support/capybara.rb2
31 files changed, 2637 insertions, 220 deletions
diff --git a/spec/controllers/projects/logs_controller_spec.rb b/spec/controllers/projects/logs_controller_spec.rb
new file mode 100644
index 00000000000..ea71dbe45aa
--- /dev/null
+++ b/spec/controllers/projects/logs_controller_spec.rb
@@ -0,0 +1,143 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::LogsController do
+ include KubernetesHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
+ let_it_be(:environment) do
+ create(:environment, name: 'production', project: project)
+ end
+
+ let(:pod_name) { "foo" }
+ let(:container) { 'container-1' }
+
+ before do
+ project.add_maintainer(user)
+
+ sign_in(user)
+ end
+
+ describe 'GET #index' do
+ let(:empty_project) { create(:project) }
+
+ it 'renders empty logs page if no environment exists' do
+ empty_project.add_maintainer(user)
+ get :index, params: { namespace_id: empty_project.namespace, project_id: empty_project }
+
+ expect(response).to be_ok
+ expect(response).to render_template 'empty_logs'
+ end
+
+ it 'renders index template' do
+ get :index, params: environment_params
+
+ expect(response).to be_ok
+ expect(response).to render_template 'index'
+ end
+ end
+
+ shared_examples 'pod logs service' do |endpoint, service|
+ let(:service_result) do
+ {
+ status: :success,
+ logs: ['Log 1', 'Log 2', 'Log 3'],
+ pods: [pod_name],
+ pod_name: pod_name,
+ container_name: container
+ }
+ end
+ let(:service_result_json) { JSON.parse(service_result.to_json) }
+
+ let_it_be(:cluster) { create(:cluster, :provided_by_gcp, environment_scope: '*', projects: [project]) }
+
+ before do
+ allow_next_instance_of(service) do |instance|
+ allow(instance).to receive(:execute).and_return(service_result)
+ end
+ end
+
+ it 'returns the service result' do
+ get endpoint, params: environment_params(pod_name: pod_name, format: :json)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response).to eq(service_result_json)
+ end
+
+ it 'registers a usage of the endpoint' do
+ expect(::Gitlab::UsageCounters::PodLogs).to receive(:increment).with(project.id)
+
+ get endpoint, params: environment_params(pod_name: pod_name, format: :json)
+
+ expect(response).to have_gitlab_http_status(:success)
+ end
+
+ it 'sets the polling header' do
+ get endpoint, params: environment_params(pod_name: pod_name, format: :json)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(response.headers['Poll-Interval']).to eq('3000')
+ end
+
+ context 'when service is processing' do
+ let(:service_result) { nil }
+
+ it 'returns a 202' do
+ get endpoint, params: environment_params(pod_name: pod_name, format: :json)
+
+ expect(response).to have_gitlab_http_status(:accepted)
+ end
+ end
+
+ shared_examples 'unsuccessful execution response' do |message|
+ let(:service_result) do
+ {
+ status: :error,
+ message: message
+ }
+ end
+
+ it 'returns the error' do
+ get endpoint, params: environment_params(pod_name: pod_name, format: :json)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to eq(service_result_json)
+ end
+ end
+
+ context 'when service is failing' do
+ it_behaves_like 'unsuccessful execution response', 'some error'
+ end
+
+ context 'when cluster is nil' do
+ let!(:cluster) { nil }
+
+ it_behaves_like 'unsuccessful execution response', 'Environment does not have deployments'
+ end
+
+ context 'when namespace is empty' do
+ before do
+ allow(environment).to receive(:deployment_namespace).and_return('')
+ end
+
+ it_behaves_like 'unsuccessful execution response', 'Environment does not have deployments'
+ end
+ end
+
+ describe 'GET #k8s' do
+ it_behaves_like 'pod logs service', :k8s, PodLogs::KubernetesService
+ end
+
+ describe 'GET #elasticsearch' do
+ it_behaves_like 'pod logs service', :elasticsearch, PodLogs::ElasticsearchService
+ end
+
+ def environment_params(opts = {})
+ opts.reverse_merge(namespace_id: project.namespace,
+ project_id: project,
+ environment_name: environment.name)
+ end
+end
diff --git a/spec/features/projects/navbar_spec.rb b/spec/features/projects/navbar_spec.rb
index dabb2b2dbf2..10958db299b 100644
--- a/spec/features/projects/navbar_spec.rb
+++ b/spec/features/projects/navbar_spec.rb
@@ -70,6 +70,7 @@ describe 'Project navbar' do
_('Environments'),
_('Error Tracking'),
_('Serverless'),
+ _('Logs'),
_('Kubernetes')
]
},
diff --git a/spec/fixtures/api/schemas/environment.json b/spec/fixtures/api/schemas/environment.json
index 7e7e5ce37e3..84217a2a01c 100644
--- a/spec/fixtures/api/schemas/environment.json
+++ b/spec/fixtures/api/schemas/environment.json
@@ -26,6 +26,9 @@
"stop_path": { "type": "string" },
"cancel_auto_stop_path": { "type": "string" },
"folder_path": { "type": "string" },
+ "logs_path": { "type": "string" },
+ "logs_api_path": { "type": "string" },
+ "enable_advanced_logs_querying": { "type": "boolean" },
"created_at": { "type": "string", "format": "date-time" },
"updated_at": { "type": "string", "format": "date-time" },
"auto_stop_at": { "type": "string", "format": "date-time" },
diff --git a/spec/fixtures/lib/elasticsearch/logs_response.json b/spec/fixtures/lib/elasticsearch/logs_response.json
new file mode 100644
index 00000000000..7a733882089
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/logs_response.json
@@ -0,0 +1,73 @@
+{
+ "took": 7087,
+ "timed_out": false,
+ "_shards": {
+ "total": 151,
+ "successful": 151,
+ "skipped": 0,
+ "failed": 0,
+ "failures": []
+ },
+ "hits": {
+ "total": 486924,
+ "max_score": null,
+ "hits": [
+ {
+ "_index": "filebeat-6.7.0-2019.10.25",
+ "_type": "doc",
+ "_id": "SkbxAW4BWzhswgK-C5-R",
+ "_score": null,
+ "_source": {
+ "message": "10.8.2.1 - - [25/Oct/2019:08:03:22 UTC] \"GET / HTTP/1.1\" 200 13",
+ "@timestamp": "2019-12-13T14:35:34.034Z"
+ },
+ "sort": [
+ 9999998,
+ 1571990602947
+ ]
+ },
+ {
+ "_index": "filebeat-6.7.0-2019.10.27",
+ "_type": "doc",
+ "_id": "wEigD24BWzhswgK-WUU2",
+ "_score": null,
+ "_source": {
+ "message": "10.8.2.1 - - [27/Oct/2019:23:49:54 UTC] \"GET / HTTP/1.1\" 200 13",
+ "@timestamp": "2019-12-13T14:35:35.034Z"
+ },
+ "sort": [
+ 9999949,
+ 1572220194500
+ ]
+ },
+ {
+ "_index": "filebeat-6.7.0-2019.11.04",
+ "_type": "doc",
+ "_id": "gE6uOG4BWzhswgK-M0x2",
+ "_score": null,
+ "_source": {
+ "message": "10.8.2.1 - - [04/Nov/2019:23:09:24 UTC] \"GET / HTTP/1.1\" 200 13",
+ "@timestamp": "2019-12-13T14:35:36.034Z"
+ },
+ "sort": [
+ 9999944,
+ 1572908964497
+ ]
+ },
+ {
+ "_index": "filebeat-6.7.0-2019.10.30",
+ "_type": "doc",
+ "_id": "0klPHW4BWzhswgK-nfCF",
+ "_score": null,
+ "_source": {
+ "message": "- -\u003e /",
+ "@timestamp": "2019-12-13T14:35:37.034Z"
+ },
+ "sort": [
+ 9999934,
+ 1572449784442
+ ]
+ }
+ ]
+ }
+}
diff --git a/spec/fixtures/lib/elasticsearch/query.json b/spec/fixtures/lib/elasticsearch/query.json
new file mode 100644
index 00000000000..565c871b1c7
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/query.json
@@ -0,0 +1,39 @@
+{
+ "query": {
+ "bool": {
+ "must": [
+ {
+ "match_phrase": {
+ "kubernetes.pod.name": {
+ "query": "production-6866bc8974-m4sk4"
+ }
+ }
+ },
+ {
+ "match_phrase": {
+ "kubernetes.namespace": {
+ "query": "autodevops-deploy-9-production"
+ }
+ }
+ }
+ ]
+ }
+ },
+ "sort": [
+ {
+ "@timestamp": {
+ "order": "desc"
+ }
+ },
+ {
+ "offset": {
+ "order": "desc"
+ }
+ }
+ ],
+ "_source": [
+ "@timestamp",
+ "message"
+ ],
+ "size": 500
+}
diff --git a/spec/fixtures/lib/elasticsearch/query_with_container.json b/spec/fixtures/lib/elasticsearch/query_with_container.json
new file mode 100644
index 00000000000..21eac5d7dbe
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/query_with_container.json
@@ -0,0 +1,46 @@
+{
+ "query": {
+ "bool": {
+ "must": [
+ {
+ "match_phrase": {
+ "kubernetes.pod.name": {
+ "query": "production-6866bc8974-m4sk4"
+ }
+ }
+ },
+ {
+ "match_phrase": {
+ "kubernetes.namespace": {
+ "query": "autodevops-deploy-9-production"
+ }
+ }
+ },
+ {
+ "match_phrase": {
+ "kubernetes.container.name": {
+ "query": "auto-deploy-app"
+ }
+ }
+ }
+ ]
+ }
+ },
+ "sort": [
+ {
+ "@timestamp": {
+ "order": "desc"
+ }
+ },
+ {
+ "offset": {
+ "order": "desc"
+ }
+ }
+ ],
+ "_source": [
+ "@timestamp",
+ "message"
+ ],
+ "size": 500
+}
diff --git a/spec/fixtures/lib/elasticsearch/query_with_end_time.json b/spec/fixtures/lib/elasticsearch/query_with_end_time.json
new file mode 100644
index 00000000000..2859e6427d4
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/query_with_end_time.json
@@ -0,0 +1,48 @@
+{
+ "query": {
+ "bool": {
+ "must": [
+ {
+ "match_phrase": {
+ "kubernetes.pod.name": {
+ "query": "production-6866bc8974-m4sk4"
+ }
+ }
+ },
+ {
+ "match_phrase": {
+ "kubernetes.namespace": {
+ "query": "autodevops-deploy-9-production"
+ }
+ }
+ }
+ ],
+ "filter": [
+ {
+ "range": {
+ "@timestamp": {
+ "lt": "2019-12-13T14:35:34.034Z"
+ }
+ }
+ }
+ ]
+ }
+ },
+ "sort": [
+ {
+ "@timestamp": {
+ "order": "desc"
+ }
+ },
+ {
+ "offset": {
+ "order": "desc"
+ }
+ }
+ ],
+ "_source": [
+ "@timestamp",
+ "message"
+ ],
+ "size": 500
+}
diff --git a/spec/fixtures/lib/elasticsearch/query_with_search.json b/spec/fixtures/lib/elasticsearch/query_with_search.json
new file mode 100644
index 00000000000..3c9bed047fa
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/query_with_search.json
@@ -0,0 +1,48 @@
+{
+ "query": {
+ "bool": {
+ "must": [
+ {
+ "match_phrase": {
+ "kubernetes.pod.name": {
+ "query": "production-6866bc8974-m4sk4"
+ }
+ }
+ },
+ {
+ "match_phrase": {
+ "kubernetes.namespace": {
+ "query": "autodevops-deploy-9-production"
+ }
+ }
+ },
+ {
+ "simple_query_string": {
+ "query": "foo +bar ",
+ "fields": [
+ "message"
+ ],
+ "default_operator": "and"
+ }
+ }
+ ]
+ }
+ },
+ "sort": [
+ {
+ "@timestamp": {
+ "order": "desc"
+ }
+ },
+ {
+ "offset": {
+ "order": "desc"
+ }
+ }
+ ],
+ "_source": [
+ "@timestamp",
+ "message"
+ ],
+ "size": 500
+}
diff --git a/spec/fixtures/lib/elasticsearch/query_with_start_time.json b/spec/fixtures/lib/elasticsearch/query_with_start_time.json
new file mode 100644
index 00000000000..0c5cfca42f7
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/query_with_start_time.json
@@ -0,0 +1,48 @@
+{
+ "query": {
+ "bool": {
+ "must": [
+ {
+ "match_phrase": {
+ "kubernetes.pod.name": {
+ "query": "production-6866bc8974-m4sk4"
+ }
+ }
+ },
+ {
+ "match_phrase": {
+ "kubernetes.namespace": {
+ "query": "autodevops-deploy-9-production"
+ }
+ }
+ }
+ ],
+ "filter": [
+ {
+ "range": {
+ "@timestamp": {
+ "gte": "2019-12-13T14:35:34.034Z"
+ }
+ }
+ }
+ ]
+ }
+ },
+ "sort": [
+ {
+ "@timestamp": {
+ "order": "desc"
+ }
+ },
+ {
+ "offset": {
+ "order": "desc"
+ }
+ }
+ ],
+ "_source": [
+ "@timestamp",
+ "message"
+ ],
+ "size": 500
+}
diff --git a/spec/fixtures/lib/elasticsearch/query_with_times.json b/spec/fixtures/lib/elasticsearch/query_with_times.json
new file mode 100644
index 00000000000..7108d42217e
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/query_with_times.json
@@ -0,0 +1,49 @@
+{
+ "query": {
+ "bool": {
+ "must": [
+ {
+ "match_phrase": {
+ "kubernetes.pod.name": {
+ "query": "production-6866bc8974-m4sk4"
+ }
+ }
+ },
+ {
+ "match_phrase": {
+ "kubernetes.namespace": {
+ "query": "autodevops-deploy-9-production"
+ }
+ }
+ }
+ ],
+ "filter": [
+ {
+ "range": {
+ "@timestamp": {
+ "gte": "2019-12-13T14:35:34.034Z",
+ "lt": "2019-12-13T14:35:34.034Z"
+ }
+ }
+ }
+ ]
+ }
+ },
+ "sort": [
+ {
+ "@timestamp": {
+ "order": "desc"
+ }
+ },
+ {
+ "offset": {
+ "order": "desc"
+ }
+ }
+ ],
+ "_source": [
+ "@timestamp",
+ "message"
+ ],
+ "size": 500
+}
diff --git a/spec/frontend/blob/sketch/index_spec.js b/spec/frontend/blob/sketch/index_spec.js
new file mode 100644
index 00000000000..f5e9da21b2a
--- /dev/null
+++ b/spec/frontend/blob/sketch/index_spec.js
@@ -0,0 +1,92 @@
+import JSZip from 'jszip';
+import SketchLoader from '~/blob/sketch';
+
+jest.mock('jszip');
+
+describe('Sketch viewer', () => {
+ preloadFixtures('static/sketch_viewer.html');
+
+ beforeEach(() => {
+ loadFixtures('static/sketch_viewer.html');
+ window.URL = {
+ createObjectURL: jest.fn(() => 'http://foo/bar'),
+ };
+ });
+
+ afterEach(() => {
+ window.URL = {};
+ });
+
+ describe('with error message', () => {
+ beforeEach(done => {
+ jest.spyOn(SketchLoader.prototype, 'getZipFile').mockImplementation(
+ () =>
+ new Promise((resolve, reject) => {
+ reject();
+ done();
+ }),
+ );
+
+ return new SketchLoader(document.getElementById('js-sketch-viewer'));
+ });
+
+ it('renders error message', () => {
+ expect(document.querySelector('#js-sketch-viewer p')).not.toBeNull();
+
+ expect(document.querySelector('#js-sketch-viewer p').textContent.trim()).toContain(
+ 'Cannot show preview.',
+ );
+ });
+
+ it('removes the loading icon', () => {
+ expect(document.querySelector('.js-loading-icon')).toBeNull();
+ });
+ });
+
+ describe('success', () => {
+ beforeEach(done => {
+ const loadAsyncMock = {
+ files: {
+ 'previews/preview.png': {
+ async: jest.fn(),
+ },
+ },
+ };
+
+ loadAsyncMock.files['previews/preview.png'].async.mockImplementation(
+ () =>
+ new Promise(resolve => {
+ resolve('foo');
+ done();
+ }),
+ );
+
+ jest.spyOn(SketchLoader.prototype, 'getZipFile').mockResolvedValue();
+ jest.spyOn(JSZip, 'loadAsync').mockResolvedValue(loadAsyncMock);
+ return new SketchLoader(document.getElementById('js-sketch-viewer'));
+ });
+
+ it('does not render error message', () => {
+ expect(document.querySelector('#js-sketch-viewer p')).toBeNull();
+ });
+
+ it('removes the loading icon', () => {
+ expect(document.querySelector('.js-loading-icon')).toBeNull();
+ });
+
+ it('renders preview img', () => {
+ const img = document.querySelector('#js-sketch-viewer img');
+
+ expect(img).not.toBeNull();
+ expect(img.classList.contains('img-fluid')).toBeTruthy();
+ });
+
+ it('renders link to image', () => {
+ const img = document.querySelector('#js-sketch-viewer img');
+ const link = document.querySelector('#js-sketch-viewer a');
+
+ expect(link.href).toBe(img.src);
+ expect(link.target).toBe('_blank');
+ });
+ });
+});
diff --git a/spec/frontend/clusters_list/mock_data.js b/spec/frontend/clusters_list/mock_data.js
index 1812bf9b03f..5398975d81c 100644
--- a/spec/frontend/clusters_list/mock_data.js
+++ b/spec/frontend/clusters_list/mock_data.js
@@ -5,6 +5,7 @@ export default [
size: '3',
clusterType: 'group_type',
status: 'disabled',
+ cpu: '6 (100% free)',
memory: '22.50 (30% free)',
},
{
@@ -13,6 +14,7 @@ export default [
size: '12',
clusterType: 'project_type',
status: 'unreachable',
+ cpu: '3 (50% free)',
memory: '11 (60% free)',
},
{
@@ -21,6 +23,7 @@ export default [
size: '12',
clusterType: 'project_type',
status: 'authentication_failure',
+ cpu: '1 (0% free)',
memory: '22 (33% free)',
},
{
@@ -29,6 +32,7 @@ export default [
size: '12',
clusterType: 'project_type',
status: 'deleting',
+ cpu: '6 (100% free)',
memory: '45 (15% free)',
},
{
@@ -37,6 +41,7 @@ export default [
size: '12',
clusterType: 'project_type',
status: 'connected',
+ cpu: '6 (100% free)',
memory: '20.12 (35% free)',
},
];
diff --git a/spec/frontend/create_cluster/components/cluster_form_dropdown_spec.js b/spec/frontend/create_cluster/components/cluster_form_dropdown_spec.js
index 292b8694fbc..14f2a527dfb 100644
--- a/spec/frontend/create_cluster/components/cluster_form_dropdown_spec.js
+++ b/spec/frontend/create_cluster/components/cluster_form_dropdown_spec.js
@@ -7,22 +7,22 @@ import DropdownButton from '~/vue_shared/components/dropdown/dropdown_button.vue
import DropdownSearchInput from '~/vue_shared/components/dropdown/dropdown_search_input.vue';
describe('ClusterFormDropdown', () => {
- let vm;
+ let wrapper;
const firstItem = { name: 'item 1', value: 1 };
const secondItem = { name: 'item 2', value: 2 };
const items = [firstItem, secondItem, { name: 'item 3', value: 3 }];
beforeEach(() => {
- vm = shallowMount(ClusterFormDropdown);
+ wrapper = shallowMount(ClusterFormDropdown);
});
- afterEach(() => vm.destroy());
+ afterEach(() => wrapper.destroy());
describe('when initial value is provided', () => {
it('sets selectedItem to initial value', () => {
- vm.setProps({ items, value: secondItem.value });
+ wrapper.setProps({ items, value: secondItem.value });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).props('toggleText')).toEqual(secondItem.name);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).props('toggleText')).toEqual(secondItem.name);
});
});
});
@@ -31,28 +31,29 @@ describe('ClusterFormDropdown', () => {
it('displays placeholder text', () => {
const placeholder = 'placeholder';
- vm.setProps({ placeholder });
+ wrapper.setProps({ placeholder });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).props('toggleText')).toEqual(placeholder);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).props('toggleText')).toEqual(placeholder);
});
});
});
describe('when an item is selected', () => {
beforeEach(() => {
- vm.setProps({ items });
+ wrapper.setProps({ items });
- return vm.vm.$nextTick().then(() => {
- vm.findAll('.js-dropdown-item')
+ return wrapper.vm.$nextTick().then(() => {
+ wrapper
+ .findAll('.js-dropdown-item')
.at(1)
.trigger('click');
- return vm.vm.$nextTick();
+ return wrapper.vm.$nextTick();
});
});
it('emits input event with selected item', () => {
- expect(vm.emitted('input')[0]).toEqual([secondItem.value]);
+ expect(wrapper.emitted('input')[0]).toEqual([secondItem.value]);
});
});
@@ -60,37 +61,54 @@ describe('ClusterFormDropdown', () => {
const value = [1];
beforeEach(() => {
- vm.setProps({ items, multiple: true, value });
- return vm.vm
+ wrapper.setProps({ items, multiple: true, value });
+ return wrapper.vm
.$nextTick()
.then(() => {
- vm.findAll('.js-dropdown-item')
+ wrapper
+ .findAll('.js-dropdown-item')
.at(0)
.trigger('click');
- return vm.vm.$nextTick();
+ return wrapper.vm.$nextTick();
})
.then(() => {
- vm.findAll('.js-dropdown-item')
+ wrapper
+ .findAll('.js-dropdown-item')
.at(1)
.trigger('click');
- return vm.vm.$nextTick();
+ return wrapper.vm.$nextTick();
});
});
it('emits input event with an array of selected items', () => {
- expect(vm.emitted('input')[1]).toEqual([[firstItem.value, secondItem.value]]);
+ expect(wrapper.emitted('input')[1]).toEqual([[firstItem.value, secondItem.value]]);
});
});
describe('when multiple items can be selected', () => {
beforeEach(() => {
- vm.setProps({ items, multiple: true, value: firstItem.value });
- return vm.vm.$nextTick();
+ wrapper.setProps({ items, multiple: true, value: firstItem.value });
+ return wrapper.vm.$nextTick();
});
it('displays a checked GlIcon next to the item', () => {
- expect(vm.find(GlIcon).is('.invisible')).toBe(false);
- expect(vm.find(GlIcon).props('name')).toBe('mobile-issue-close');
+ expect(wrapper.find(GlIcon).is('.invisible')).toBe(false);
+ expect(wrapper.find(GlIcon).props('name')).toBe('mobile-issue-close');
+ });
+ });
+
+ describe('when multiple values can be selected and initial value is null', () => {
+ it('emits input event with an array of a single selected item', () => {
+ wrapper.setProps({ items, multiple: true, value: null });
+
+ return wrapper.vm.$nextTick().then(() => {
+ wrapper
+ .findAll('.js-dropdown-item')
+ .at(0)
+ .trigger('click');
+
+ expect(wrapper.emitted('input')[0]).toEqual([[firstItem.value]]);
+ });
});
});
@@ -101,20 +119,20 @@ describe('ClusterFormDropdown', () => {
const currentValue = 1;
const customLabelItems = [{ [labelProperty]: label, value: currentValue }];
- vm.setProps({ labelProperty, items: customLabelItems, value: currentValue });
+ wrapper.setProps({ labelProperty, items: customLabelItems, value: currentValue });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).props('toggleText')).toEqual(label);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).props('toggleText')).toEqual(label);
});
});
});
describe('when loading', () => {
it('dropdown button isLoading', () => {
- vm.setProps({ loading: true });
+ wrapper.setProps({ loading: true });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).props('isLoading')).toBe(true);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).props('isLoading')).toBe(true);
});
});
});
@@ -123,20 +141,20 @@ describe('ClusterFormDropdown', () => {
it('uses loading text as toggle button text', () => {
const loadingText = 'loading text';
- vm.setProps({ loading: true, loadingText });
+ wrapper.setProps({ loading: true, loadingText });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).props('toggleText')).toEqual(loadingText);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).props('toggleText')).toEqual(loadingText);
});
});
});
describe('when disabled', () => {
it('dropdown button isDisabled', () => {
- vm.setProps({ disabled: true });
+ wrapper.setProps({ disabled: true });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).props('isDisabled')).toBe(true);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).props('isDisabled')).toBe(true);
});
});
});
@@ -145,20 +163,20 @@ describe('ClusterFormDropdown', () => {
it('uses disabled text as toggle button text', () => {
const disabledText = 'disabled text';
- vm.setProps({ disabled: true, disabledText });
+ wrapper.setProps({ disabled: true, disabledText });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).props('toggleText')).toBe(disabledText);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).props('toggleText')).toBe(disabledText);
});
});
});
describe('when has errors', () => {
it('sets border-danger class selector to dropdown toggle', () => {
- vm.setProps({ hasErrors: true });
+ wrapper.setProps({ hasErrors: true });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).classes('border-danger')).toBe(true);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).classes('border-danger')).toBe(true);
});
});
});
@@ -167,10 +185,10 @@ describe('ClusterFormDropdown', () => {
it('displays error message', () => {
const errorMessage = 'error message';
- vm.setProps({ hasErrors: true, errorMessage });
+ wrapper.setProps({ hasErrors: true, errorMessage });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find('.js-eks-dropdown-error-message').text()).toEqual(errorMessage);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find('.js-eks-dropdown-error-message').text()).toEqual(errorMessage);
});
});
});
@@ -179,10 +197,10 @@ describe('ClusterFormDropdown', () => {
it('displays empty text', () => {
const emptyText = 'error message';
- vm.setProps({ items: [], emptyText });
+ wrapper.setProps({ items: [], emptyText });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find('.js-empty-text').text()).toEqual(emptyText);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find('.js-empty-text').text()).toEqual(emptyText);
});
});
});
@@ -190,34 +208,36 @@ describe('ClusterFormDropdown', () => {
it('displays search field placeholder', () => {
const searchFieldPlaceholder = 'Placeholder';
- vm.setProps({ searchFieldPlaceholder });
+ wrapper.setProps({ searchFieldPlaceholder });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownSearchInput).props('placeholderText')).toEqual(searchFieldPlaceholder);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownSearchInput).props('placeholderText')).toEqual(
+ searchFieldPlaceholder,
+ );
});
});
it('it filters results by search query', () => {
const searchQuery = secondItem.name;
- vm.setProps({ items });
- vm.setData({ searchQuery });
+ wrapper.setProps({ items });
+ wrapper.setData({ searchQuery });
- return vm.vm.$nextTick().then(() => {
- expect(vm.findAll('.js-dropdown-item').length).toEqual(1);
- expect(vm.find('.js-dropdown-item').text()).toEqual(secondItem.name);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.findAll('.js-dropdown-item').length).toEqual(1);
+ expect(wrapper.find('.js-dropdown-item').text()).toEqual(secondItem.name);
});
});
it('focuses dropdown search input when dropdown is displayed', () => {
- const dropdownEl = vm.find('.dropdown').element;
+ const dropdownEl = wrapper.find('.dropdown').element;
- expect(vm.find(DropdownSearchInput).props('focused')).toBe(false);
+ expect(wrapper.find(DropdownSearchInput).props('focused')).toBe(false);
$(dropdownEl).trigger('shown.bs.dropdown');
- return vm.vm.$nextTick(() => {
- expect(vm.find(DropdownSearchInput).props('focused')).toBe(true);
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find(DropdownSearchInput).props('focused')).toBe(true);
});
});
});
diff --git a/spec/frontend/logs/components/environment_logs_spec.js b/spec/frontend/logs/components/environment_logs_spec.js
new file mode 100644
index 00000000000..26542c3d046
--- /dev/null
+++ b/spec/frontend/logs/components/environment_logs_spec.js
@@ -0,0 +1,334 @@
+import Vue from 'vue';
+import { GlDropdown, GlDropdownItem, GlSearchBoxByClick } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
+import EnvironmentLogs from '~/logs/components/environment_logs.vue';
+
+import { createStore } from '~/logs/stores';
+import { scrollDown } from '~/lib/utils/scroll_utils';
+import {
+ mockEnvName,
+ mockEnvironments,
+ mockPods,
+ mockLogsResult,
+ mockTrace,
+ mockPodName,
+ mockSearch,
+ mockEnvironmentsEndpoint,
+ mockDocumentationPath,
+} from '../mock_data';
+
+jest.mock('~/lib/utils/scroll_utils');
+
+describe('EnvironmentLogs', () => {
+ let EnvironmentLogsComponent;
+ let store;
+ let wrapper;
+ let state;
+
+ const propsData = {
+ environmentName: mockEnvName,
+ environmentsPath: mockEnvironmentsEndpoint,
+ clusterApplicationsDocumentationPath: mockDocumentationPath,
+ };
+
+ const actionMocks = {
+ setInitData: jest.fn(),
+ setSearch: jest.fn(),
+ showPodLogs: jest.fn(),
+ showEnvironment: jest.fn(),
+ fetchEnvironments: jest.fn(),
+ };
+
+ const updateControlBtnsMock = jest.fn();
+
+ const findEnvironmentsDropdown = () => wrapper.find('.js-environments-dropdown');
+ const findPodsDropdown = () => wrapper.find('.js-pods-dropdown');
+ const findSearchBar = () => wrapper.find('.js-logs-search');
+ const findTimeRangePicker = () => wrapper.find({ ref: 'dateTimePicker' });
+ const findInfoAlert = () => wrapper.find('.js-elasticsearch-alert');
+
+ const findLogControlButtons = () => wrapper.find({ name: 'log-control-buttons-stub' });
+ const findLogTrace = () => wrapper.find('.js-log-trace');
+
+ const mockSetInitData = () => {
+ state.pods.options = mockPods;
+ state.environments.current = mockEnvName;
+ [state.pods.current] = state.pods.options;
+
+ state.logs.isComplete = false;
+ state.logs.lines = mockLogsResult;
+ };
+
+ const mockShowPodLogs = podName => {
+ state.pods.options = mockPods;
+ [state.pods.current] = podName;
+
+ state.logs.isComplete = false;
+ state.logs.lines = mockLogsResult;
+ };
+
+ const mockFetchEnvs = () => {
+ state.environments.options = mockEnvironments;
+ };
+
+ const initWrapper = () => {
+ wrapper = shallowMount(EnvironmentLogsComponent, {
+ propsData,
+ store,
+ stubs: {
+ LogControlButtons: {
+ name: 'log-control-buttons-stub',
+ template: '<div/>',
+ methods: {
+ update: updateControlBtnsMock,
+ },
+ },
+ },
+ methods: {
+ ...actionMocks,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ store = createStore();
+ state = store.state.environmentLogs;
+ EnvironmentLogsComponent = Vue.extend(EnvironmentLogs);
+ });
+
+ afterEach(() => {
+ actionMocks.setInitData.mockReset();
+ actionMocks.showPodLogs.mockReset();
+ actionMocks.fetchEnvironments.mockReset();
+
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ });
+
+ it('displays UI elements', () => {
+ initWrapper();
+
+ expect(wrapper.isVueInstance()).toBe(true);
+ expect(wrapper.isEmpty()).toBe(false);
+
+ // top bar
+ expect(findEnvironmentsDropdown().is(GlDropdown)).toBe(true);
+ expect(findPodsDropdown().is(GlDropdown)).toBe(true);
+ expect(findLogControlButtons().exists()).toBe(true);
+
+ expect(findSearchBar().exists()).toBe(true);
+ expect(findSearchBar().is(GlSearchBoxByClick)).toBe(true);
+ expect(findTimeRangePicker().exists()).toBe(true);
+ expect(findTimeRangePicker().is(DateTimePicker)).toBe(true);
+
+ // log trace
+ expect(findLogTrace().isEmpty()).toBe(false);
+ });
+
+ it('mounted inits data', () => {
+ initWrapper();
+
+ expect(actionMocks.setInitData).toHaveBeenCalledTimes(1);
+ expect(actionMocks.setInitData).toHaveBeenLastCalledWith({
+ timeRange: expect.objectContaining({
+ default: true,
+ }),
+ environmentName: mockEnvName,
+ podName: null,
+ });
+
+ expect(actionMocks.fetchEnvironments).toHaveBeenCalledTimes(1);
+ expect(actionMocks.fetchEnvironments).toHaveBeenLastCalledWith(mockEnvironmentsEndpoint);
+ });
+
+ describe('loading state', () => {
+ beforeEach(() => {
+ state.pods.options = [];
+
+ state.logs = {
+ lines: [],
+ isLoading: true,
+ };
+
+ state.environments = {
+ options: [],
+ isLoading: true,
+ };
+
+ initWrapper();
+ });
+
+ it('displays a disabled environments dropdown', () => {
+ expect(findEnvironmentsDropdown().attributes('disabled')).toBe('true');
+ expect(findEnvironmentsDropdown().findAll(GlDropdownItem).length).toBe(0);
+ });
+
+ it('displays a disabled pods dropdown', () => {
+ expect(findPodsDropdown().attributes('disabled')).toBe('true');
+ expect(findPodsDropdown().findAll(GlDropdownItem).length).toBe(0);
+ });
+
+ it('displays a disabled search bar', () => {
+ expect(findSearchBar().exists()).toBe(true);
+ expect(findSearchBar().attributes('disabled')).toBe('true');
+ });
+
+ it('displays a disabled time window dropdown', () => {
+ expect(findTimeRangePicker().attributes('disabled')).toBe('true');
+ });
+
+ it('does not update buttons state', () => {
+ expect(updateControlBtnsMock).not.toHaveBeenCalled();
+ });
+
+ it('shows a logs trace', () => {
+ expect(findLogTrace().text()).toBe('');
+ expect(
+ findLogTrace()
+ .find('.js-build-loader-animation')
+ .isVisible(),
+ ).toBe(true);
+ });
+ });
+
+ describe('legacy environment', () => {
+ beforeEach(() => {
+ state.pods.options = [];
+
+ state.logs = {
+ lines: [],
+ isLoading: false,
+ };
+
+ state.environments = {
+ options: mockEnvironments,
+ current: 'staging',
+ isLoading: false,
+ };
+
+ initWrapper();
+ });
+
+ it('displays a disabled time window dropdown', () => {
+ expect(findTimeRangePicker().attributes('disabled')).toBe('true');
+ });
+
+ it('displays a disabled search bar', () => {
+ expect(findSearchBar().attributes('disabled')).toBe('true');
+ });
+
+ it('displays an alert to upgrade to ES', () => {
+ expect(findInfoAlert().exists()).toBe(true);
+ });
+ });
+
+ describe('state with data', () => {
+ beforeEach(() => {
+ actionMocks.setInitData.mockImplementation(mockSetInitData);
+ actionMocks.showPodLogs.mockImplementation(mockShowPodLogs);
+ actionMocks.fetchEnvironments.mockImplementation(mockFetchEnvs);
+
+ initWrapper();
+ });
+
+ afterEach(() => {
+ scrollDown.mockReset();
+ updateControlBtnsMock.mockReset();
+
+ actionMocks.setInitData.mockReset();
+ actionMocks.showPodLogs.mockReset();
+ actionMocks.fetchEnvironments.mockReset();
+ });
+
+ it('displays an enabled search bar', () => {
+ expect(findSearchBar().attributes('disabled')).toBeFalsy();
+
+ // input a query and click `search`
+ findSearchBar().vm.$emit('input', mockSearch);
+ findSearchBar().vm.$emit('submit');
+
+ expect(actionMocks.setSearch).toHaveBeenCalledTimes(1);
+ expect(actionMocks.setSearch).toHaveBeenCalledWith(mockSearch);
+ });
+
+ it('displays an enabled time window dropdown', () => {
+ expect(findTimeRangePicker().attributes('disabled')).toBeFalsy();
+ });
+
+ it('does not display an alert to upgrade to ES', () => {
+ expect(findInfoAlert().exists()).toBe(false);
+ });
+
+ it('populates environments dropdown', () => {
+ const items = findEnvironmentsDropdown().findAll(GlDropdownItem);
+ expect(findEnvironmentsDropdown().props('text')).toBe(mockEnvName);
+ expect(items.length).toBe(mockEnvironments.length);
+ mockEnvironments.forEach((env, i) => {
+ const item = items.at(i);
+ expect(item.text()).toBe(env.name);
+ });
+ });
+
+ it('populates pods dropdown', () => {
+ const items = findPodsDropdown().findAll(GlDropdownItem);
+
+ expect(findPodsDropdown().props('text')).toBe(mockPodName);
+ expect(items.length).toBe(mockPods.length);
+ mockPods.forEach((pod, i) => {
+ const item = items.at(i);
+ expect(item.text()).toBe(pod);
+ });
+ });
+
+ it('populates logs trace', () => {
+ const trace = findLogTrace();
+ expect(trace.text().split('\n').length).toBe(mockTrace.length);
+ expect(trace.text().split('\n')).toEqual(mockTrace);
+ });
+
+ it('update control buttons state', () => {
+ expect(updateControlBtnsMock).toHaveBeenCalledTimes(1);
+ });
+
+ it('scrolls to bottom when loaded', () => {
+ expect(scrollDown).toHaveBeenCalledTimes(1);
+ });
+
+ describe('when user clicks', () => {
+ it('environment name, trace is refreshed', () => {
+ const items = findEnvironmentsDropdown().findAll(GlDropdownItem);
+ const index = 1; // any env
+
+ expect(actionMocks.showEnvironment).toHaveBeenCalledTimes(0);
+
+ items.at(index).vm.$emit('click');
+
+ expect(actionMocks.showEnvironment).toHaveBeenCalledTimes(1);
+ expect(actionMocks.showEnvironment).toHaveBeenLastCalledWith(mockEnvironments[index].name);
+ });
+
+ it('pod name, trace is refreshed', () => {
+ const items = findPodsDropdown().findAll(GlDropdownItem);
+ const index = 2; // any pod
+
+ expect(actionMocks.showPodLogs).toHaveBeenCalledTimes(0);
+
+ items.at(index).vm.$emit('click');
+
+ expect(actionMocks.showPodLogs).toHaveBeenCalledTimes(1);
+ expect(actionMocks.showPodLogs).toHaveBeenLastCalledWith(mockPods[index]);
+ });
+
+ it('refresh button, trace is refreshed', () => {
+ expect(actionMocks.showPodLogs).toHaveBeenCalledTimes(0);
+
+ findLogControlButtons().vm.$emit('refresh');
+
+ expect(actionMocks.showPodLogs).toHaveBeenCalledTimes(1);
+ expect(actionMocks.showPodLogs).toHaveBeenLastCalledWith(mockPodName);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/logs/components/log_control_buttons_spec.js b/spec/frontend/logs/components/log_control_buttons_spec.js
new file mode 100644
index 00000000000..f344e8189c3
--- /dev/null
+++ b/spec/frontend/logs/components/log_control_buttons_spec.js
@@ -0,0 +1,108 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
+import LogControlButtons from '~/logs/components/log_control_buttons.vue';
+import {
+ canScroll,
+ isScrolledToTop,
+ isScrolledToBottom,
+ scrollDown,
+ scrollUp,
+} from '~/lib/utils/scroll_utils';
+
+jest.mock('~/lib/utils/scroll_utils');
+
+describe('LogControlButtons', () => {
+ let wrapper;
+
+ const findScrollToTop = () => wrapper.find('.js-scroll-to-top');
+ const findScrollToBottom = () => wrapper.find('.js-scroll-to-bottom');
+ const findRefreshBtn = () => wrapper.find('.js-refresh-log');
+
+ const initWrapper = () => {
+ wrapper = shallowMount(LogControlButtons);
+ };
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ });
+
+ it('displays UI elements', () => {
+ initWrapper();
+
+ expect(wrapper.isVueInstance()).toBe(true);
+ expect(wrapper.isEmpty()).toBe(false);
+
+ expect(findScrollToTop().is(GlButton)).toBe(true);
+ expect(findScrollToBottom().is(GlButton)).toBe(true);
+ expect(findRefreshBtn().is(GlButton)).toBe(true);
+ });
+
+ it('emits a `refresh` event on click on `refresh` button', () => {
+ initWrapper();
+
+ // An `undefined` value means no event was emitted
+ expect(wrapper.emitted('refresh')).toBe(undefined);
+
+ findRefreshBtn().vm.$emit('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('refresh')).toHaveLength(1);
+ });
+ });
+
+ describe('when scrolling actions are enabled', () => {
+ beforeEach(() => {
+ // mock scrolled to the middle of a long page
+ canScroll.mockReturnValue(true);
+ isScrolledToBottom.mockReturnValue(false);
+ isScrolledToTop.mockReturnValue(false);
+
+ initWrapper();
+ wrapper.vm.update();
+ return wrapper.vm.$nextTick();
+ });
+
+ afterEach(() => {
+ canScroll.mockReset();
+ isScrolledToTop.mockReset();
+ isScrolledToBottom.mockReset();
+ });
+
+ it('click on "scroll to top" scrolls up', () => {
+ expect(findScrollToTop().is('[disabled]')).toBe(false);
+
+ findScrollToTop().vm.$emit('click');
+
+ expect(scrollUp).toHaveBeenCalledTimes(1);
+ });
+
+ it('click on "scroll to bottom" scrolls down', () => {
+ expect(findScrollToBottom().is('[disabled]')).toBe(false);
+
+ findScrollToBottom().vm.$emit('click');
+
+ expect(scrollDown).toHaveBeenCalledTimes(1); // plus one time when trace was loaded
+ });
+ });
+
+ describe('when scrolling actions are disabled', () => {
+ beforeEach(() => {
+ // mock a short page without a scrollbar
+ canScroll.mockReturnValue(false);
+ isScrolledToBottom.mockReturnValue(true);
+ isScrolledToTop.mockReturnValue(true);
+
+ initWrapper();
+ });
+
+ it('buttons are disabled', () => {
+ wrapper.vm.update();
+ return wrapper.vm.$nextTick(() => {
+ expect(findScrollToTop().is('[disabled]')).toBe(true);
+ expect(findScrollToBottom().is('[disabled]')).toBe(true);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/logs/mock_data.js b/spec/frontend/logs/mock_data.js
new file mode 100644
index 00000000000..4c092a84b36
--- /dev/null
+++ b/spec/frontend/logs/mock_data.js
@@ -0,0 +1,85 @@
+export const mockProjectPath = 'root/autodevops-deploy';
+export const mockEnvName = 'production';
+export const mockEnvironmentsEndpoint = `${mockProjectPath}/environments.json`;
+export const mockEnvId = '99';
+export const mockDocumentationPath = '/documentation.md';
+
+const makeMockEnvironment = (id, name, advancedQuerying) => ({
+ id,
+ project_path: mockProjectPath,
+ name,
+ logs_api_path: '/dummy_logs_path.json',
+ enable_advanced_logs_querying: advancedQuerying,
+});
+
+export const mockEnvironment = makeMockEnvironment(mockEnvId, mockEnvName, true);
+export const mockEnvironments = [
+ mockEnvironment,
+ makeMockEnvironment(101, 'staging', false),
+ makeMockEnvironment(102, 'review/a-feature', false),
+];
+
+export const mockPodName = 'production-764c58d697-aaaaa';
+export const mockPods = [
+ mockPodName,
+ 'production-764c58d697-bbbbb',
+ 'production-764c58d697-ccccc',
+ 'production-764c58d697-ddddd',
+];
+
+export const mockLogsResult = [
+ {
+ timestamp: '2019-12-13T13:43:18.2760123Z',
+ message: '10.36.0.1 - - [16/Oct/2019:06:29:48 UTC] "GET / HTTP/1.1" 200 13',
+ },
+ { timestamp: '2019-12-13T13:43:18.2760123Z', message: '- -> /' },
+ {
+ timestamp: '2019-12-13T13:43:26.8420123Z',
+ message: '10.36.0.1 - - [16/Oct/2019:06:29:57 UTC] "GET / HTTP/1.1" 200 13',
+ },
+ { timestamp: '2019-12-13T13:43:26.8420123Z', message: '- -> /' },
+ {
+ timestamp: '2019-12-13T13:43:28.3710123Z',
+ message: '10.36.0.1 - - [16/Oct/2019:06:29:58 UTC] "GET / HTTP/1.1" 200 13',
+ },
+ { timestamp: '2019-12-13T13:43:28.3710123Z', message: '- -> /' },
+ {
+ timestamp: '2019-12-13T13:43:36.8860123Z',
+ message: '10.36.0.1 - - [16/Oct/2019:06:30:07 UTC] "GET / HTTP/1.1" 200 13',
+ },
+ { timestamp: '2019-12-13T13:43:36.8860123Z', message: '- -> /' },
+ {
+ timestamp: '2019-12-13T13:43:38.4000123Z',
+ message: '10.36.0.1 - - [16/Oct/2019:06:30:08 UTC] "GET / HTTP/1.1" 200 13',
+ },
+ { timestamp: '2019-12-13T13:43:38.4000123Z', message: '- -> /' },
+ {
+ timestamp: '2019-12-13T13:43:46.8420123Z',
+ message: '10.36.0.1 - - [16/Oct/2019:06:30:17 UTC] "GET / HTTP/1.1" 200 13',
+ },
+ { timestamp: '2019-12-13T13:43:46.8430123Z', message: '- -> /' },
+ {
+ timestamp: '2019-12-13T13:43:48.3240123Z',
+ message: '10.36.0.1 - - [16/Oct/2019:06:30:18 UTC] "GET / HTTP/1.1" 200 13',
+ },
+ { timestamp: '2019-12-13T13:43:48.3250123Z', message: '- -> /' },
+];
+
+export const mockTrace = [
+ 'Dec 13 13:43:18.276Z | 10.36.0.1 - - [16/Oct/2019:06:29:48 UTC] "GET / HTTP/1.1" 200 13',
+ 'Dec 13 13:43:18.276Z | - -> /',
+ 'Dec 13 13:43:26.842Z | 10.36.0.1 - - [16/Oct/2019:06:29:57 UTC] "GET / HTTP/1.1" 200 13',
+ 'Dec 13 13:43:26.842Z | - -> /',
+ 'Dec 13 13:43:28.371Z | 10.36.0.1 - - [16/Oct/2019:06:29:58 UTC] "GET / HTTP/1.1" 200 13',
+ 'Dec 13 13:43:28.371Z | - -> /',
+ 'Dec 13 13:43:36.886Z | 10.36.0.1 - - [16/Oct/2019:06:30:07 UTC] "GET / HTTP/1.1" 200 13',
+ 'Dec 13 13:43:36.886Z | - -> /',
+ 'Dec 13 13:43:38.400Z | 10.36.0.1 - - [16/Oct/2019:06:30:08 UTC] "GET / HTTP/1.1" 200 13',
+ 'Dec 13 13:43:38.400Z | - -> /',
+ 'Dec 13 13:43:46.842Z | 10.36.0.1 - - [16/Oct/2019:06:30:17 UTC] "GET / HTTP/1.1" 200 13',
+ 'Dec 13 13:43:46.843Z | - -> /',
+ 'Dec 13 13:43:48.324Z | 10.36.0.1 - - [16/Oct/2019:06:30:18 UTC] "GET / HTTP/1.1" 200 13',
+ 'Dec 13 13:43:48.325Z | - -> /',
+];
+
+export const mockSearch = 'foo +bar';
diff --git a/spec/frontend/logs/stores/actions_spec.js b/spec/frontend/logs/stores/actions_spec.js
new file mode 100644
index 00000000000..6309126159e
--- /dev/null
+++ b/spec/frontend/logs/stores/actions_spec.js
@@ -0,0 +1,324 @@
+import MockAdapter from 'axios-mock-adapter';
+
+import testAction from 'helpers/vuex_action_helper';
+import * as types from '~/logs/stores/mutation_types';
+import { convertToFixedRange } from '~/lib/utils/datetime_range';
+import logsPageState from '~/logs/stores/state';
+import {
+ setInitData,
+ setSearch,
+ showPodLogs,
+ fetchEnvironments,
+ fetchLogs,
+} from '~/logs/stores/actions';
+
+import { defaultTimeRange } from '~/monitoring/constants';
+
+import axios from '~/lib/utils/axios_utils';
+import flash from '~/flash';
+
+import {
+ mockProjectPath,
+ mockPodName,
+ mockEnvironmentsEndpoint,
+ mockEnvironments,
+ mockPods,
+ mockLogsResult,
+ mockEnvName,
+ mockSearch,
+} from '../mock_data';
+
+jest.mock('~/flash');
+jest.mock('~/lib/utils/datetime_range');
+jest.mock('~/logs/utils');
+
+const mockDefaultRange = {
+ start: '2020-01-10T18:00:00.000Z',
+ end: '2020-01-10T10:00:00.000Z',
+};
+const mockFixedRange = {
+ start: '2020-01-09T18:06:20.000Z',
+ end: '2020-01-09T18:36:20.000Z',
+};
+const mockRollingRange = {
+ duration: 120,
+};
+const mockRollingRangeAsFixed = {
+ start: '2020-01-10T18:00:00.000Z',
+ end: '2020-01-10T17:58:00.000Z',
+};
+
+describe('Logs Store actions', () => {
+ let state;
+ let mock;
+
+ convertToFixedRange.mockImplementation(range => {
+ if (range === defaultTimeRange) {
+ return { ...mockDefaultRange };
+ }
+ if (range === mockFixedRange) {
+ return { ...mockFixedRange };
+ }
+ if (range === mockRollingRange) {
+ return { ...mockRollingRangeAsFixed };
+ }
+ throw new Error('Invalid time range');
+ });
+
+ beforeEach(() => {
+ state = logsPageState();
+ });
+
+ afterEach(() => {
+ flash.mockClear();
+ });
+
+ describe('setInitData', () => {
+ it('should commit environment and pod name mutation', () =>
+ testAction(setInitData, { environmentName: mockEnvName, podName: mockPodName }, state, [
+ { type: types.SET_PROJECT_ENVIRONMENT, payload: mockEnvName },
+ { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
+ ]));
+ });
+
+ describe('setSearch', () => {
+ it('should commit search mutation', () =>
+ testAction(
+ setSearch,
+ mockSearch,
+ state,
+ [{ type: types.SET_SEARCH, payload: mockSearch }],
+ [{ type: 'fetchLogs' }],
+ ));
+ });
+
+ describe('showPodLogs', () => {
+ it('should commit pod name', () =>
+ testAction(
+ showPodLogs,
+ mockPodName,
+ state,
+ [{ type: types.SET_CURRENT_POD_NAME, payload: mockPodName }],
+ [{ type: 'fetchLogs' }],
+ ));
+ });
+
+ describe('fetchEnvironments', () => {
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ it('should commit RECEIVE_ENVIRONMENTS_DATA_SUCCESS mutation on correct data', () => {
+ mock.onGet(mockEnvironmentsEndpoint).replyOnce(200, { environments: mockEnvironments });
+ return testAction(
+ fetchEnvironments,
+ mockEnvironmentsEndpoint,
+ state,
+ [
+ { type: types.REQUEST_ENVIRONMENTS_DATA },
+ { type: types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS, payload: mockEnvironments },
+ ],
+ [{ type: 'fetchLogs' }],
+ );
+ });
+
+ it('should commit RECEIVE_ENVIRONMENTS_DATA_ERROR on wrong data', () => {
+ mock.onGet(mockEnvironmentsEndpoint).replyOnce(500);
+ return testAction(
+ fetchEnvironments,
+ mockEnvironmentsEndpoint,
+ state,
+ [
+ { type: types.REQUEST_ENVIRONMENTS_DATA },
+ { type: types.RECEIVE_ENVIRONMENTS_DATA_ERROR },
+ ],
+ [],
+ () => {
+ expect(flash).toHaveBeenCalledTimes(1);
+ },
+ );
+ });
+ });
+
+ describe('fetchLogs', () => {
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.reset();
+ });
+
+ it('should commit logs and pod data when there is pod name defined', () => {
+ state.environments.options = mockEnvironments;
+ state.environments.current = mockEnvName;
+ state.pods.current = mockPodName;
+
+ const endpoint = '/dummy_logs_path.json';
+
+ mock
+ .onGet(endpoint, {
+ params: {
+ pod_name: mockPodName,
+ ...mockDefaultRange,
+ },
+ })
+ .reply(200, {
+ pod_name: mockPodName,
+ pods: mockPods,
+ logs: mockLogsResult,
+ });
+
+ mock.onGet(endpoint).replyOnce(202); // mock reactive cache
+
+ return testAction(
+ fetchLogs,
+ null,
+ state,
+ [
+ { type: types.REQUEST_PODS_DATA },
+ { type: types.REQUEST_LOGS_DATA },
+ { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
+ { type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods },
+ { type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLogsResult },
+ ],
+ [],
+ );
+ });
+
+ it('should commit logs and pod data when there is pod name defined and a non-default date range', () => {
+ state.projectPath = mockProjectPath;
+ state.environments.options = mockEnvironments;
+ state.environments.current = mockEnvName;
+ state.pods.current = mockPodName;
+ state.timeRange.current = mockFixedRange;
+
+ const endpoint = '/dummy_logs_path.json';
+
+ mock
+ .onGet(endpoint, {
+ params: {
+ pod_name: mockPodName,
+ start: mockFixedRange.start,
+ end: mockFixedRange.end,
+ },
+ })
+ .reply(200, {
+ pod_name: mockPodName,
+ pods: mockPods,
+ logs: mockLogsResult,
+ });
+
+ return testAction(
+ fetchLogs,
+ null,
+ state,
+ [
+ { type: types.REQUEST_PODS_DATA },
+ { type: types.REQUEST_LOGS_DATA },
+ { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
+ { type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods },
+ { type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLogsResult },
+ ],
+ [],
+ );
+ });
+
+ it('should commit logs and pod data when there is pod name and search and a faulty date range', () => {
+ state.environments.options = mockEnvironments;
+ state.environments.current = mockEnvName;
+ state.pods.current = mockPodName;
+ state.search = mockSearch;
+ state.timeRange.current = 'INVALID_TIME_RANGE';
+
+ const endpoint = '/dummy_logs_path.json';
+
+ mock
+ .onGet(endpoint, {
+ params: {
+ pod_name: mockPodName,
+ search: mockSearch,
+ },
+ })
+ .reply(200, {
+ pod_name: mockPodName,
+ pods: mockPods,
+ logs: mockLogsResult,
+ });
+
+ mock.onGet(endpoint).replyOnce(202); // mock reactive cache
+
+ return testAction(
+ fetchLogs,
+ null,
+ state,
+ [
+ { type: types.REQUEST_PODS_DATA },
+ { type: types.REQUEST_LOGS_DATA },
+ { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
+ { type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods },
+ { type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLogsResult },
+ ],
+ [],
+ () => {
+ // Warning about time ranges was issued
+ expect(flash).toHaveBeenCalledTimes(1);
+ expect(flash).toHaveBeenCalledWith(expect.any(String), 'warning');
+ },
+ );
+ });
+
+ it('should commit logs and pod data when no pod name defined', done => {
+ state.environments.options = mockEnvironments;
+ state.environments.current = mockEnvName;
+
+ const endpoint = '/dummy_logs_path.json';
+
+ mock.onGet(endpoint, { params: { ...mockDefaultRange } }).reply(200, {
+ pod_name: mockPodName,
+ pods: mockPods,
+ logs: mockLogsResult,
+ });
+ mock.onGet(endpoint).replyOnce(202); // mock reactive cache
+
+ testAction(
+ fetchLogs,
+ null,
+ state,
+ [
+ { type: types.REQUEST_PODS_DATA },
+ { type: types.REQUEST_LOGS_DATA },
+ { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
+ { type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods },
+ { type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLogsResult },
+ ],
+ [],
+ done,
+ );
+ });
+
+ it('should commit logs and pod errors when backend fails', () => {
+ state.environments.options = mockEnvironments;
+ state.environments.current = mockEnvName;
+
+ const endpoint = `/${mockProjectPath}/-/logs/elasticsearch.json?environment_name=${mockEnvName}`;
+ mock.onGet(endpoint).replyOnce(500);
+
+ return testAction(
+ fetchLogs,
+ null,
+ state,
+ [
+ { type: types.REQUEST_PODS_DATA },
+ { type: types.REQUEST_LOGS_DATA },
+ { type: types.RECEIVE_PODS_DATA_ERROR },
+ { type: types.RECEIVE_LOGS_DATA_ERROR },
+ ],
+ [],
+ () => {
+ expect(flash).toHaveBeenCalledTimes(1);
+ },
+ );
+ });
+ });
+});
diff --git a/spec/frontend/logs/stores/getters_spec.js b/spec/frontend/logs/stores/getters_spec.js
new file mode 100644
index 00000000000..fdce575fa97
--- /dev/null
+++ b/spec/frontend/logs/stores/getters_spec.js
@@ -0,0 +1,40 @@
+import * as getters from '~/logs/stores/getters';
+import logsPageState from '~/logs/stores/state';
+
+import { mockLogsResult, mockTrace } from '../mock_data';
+
+describe('Logs Store getters', () => {
+ let state;
+
+ beforeEach(() => {
+ state = logsPageState();
+ });
+
+ describe('trace', () => {
+ describe('when state is initialized', () => {
+ it('returns an empty string', () => {
+ expect(getters.trace(state)).toEqual('');
+ });
+ });
+
+ describe('when state logs are empty', () => {
+ beforeEach(() => {
+ state.logs.lines = [];
+ });
+
+ it('returns an empty string', () => {
+ expect(getters.trace(state)).toEqual('');
+ });
+ });
+
+ describe('when state logs are set', () => {
+ beforeEach(() => {
+ state.logs.lines = mockLogsResult;
+ });
+
+ it('returns an empty string', () => {
+ expect(getters.trace(state)).toEqual(mockTrace.join('\n'));
+ });
+ });
+ });
+});
diff --git a/spec/frontend/logs/stores/mutations_spec.js b/spec/frontend/logs/stores/mutations_spec.js
new file mode 100644
index 00000000000..dcb358c7d5b
--- /dev/null
+++ b/spec/frontend/logs/stores/mutations_spec.js
@@ -0,0 +1,171 @@
+import mutations from '~/logs/stores/mutations';
+import * as types from '~/logs/stores/mutation_types';
+
+import logsPageState from '~/logs/stores/state';
+import {
+ mockEnvName,
+ mockEnvironments,
+ mockPods,
+ mockPodName,
+ mockLogsResult,
+ mockSearch,
+} from '../mock_data';
+
+describe('Logs Store Mutations', () => {
+ let state;
+
+ beforeEach(() => {
+ state = logsPageState();
+ });
+
+ it('ensures mutation types are correctly named', () => {
+ Object.keys(types).forEach(k => {
+ expect(k).toEqual(types[k]);
+ });
+ });
+
+ describe('SET_PROJECT_ENVIRONMENT', () => {
+ it('sets the environment', () => {
+ mutations[types.SET_PROJECT_ENVIRONMENT](state, mockEnvName);
+ expect(state.environments.current).toEqual(mockEnvName);
+ });
+ });
+
+ describe('SET_SEARCH', () => {
+ it('sets the search', () => {
+ mutations[types.SET_SEARCH](state, mockSearch);
+ expect(state.search).toEqual(mockSearch);
+ });
+ });
+
+ describe('REQUEST_ENVIRONMENTS_DATA', () => {
+ it('inits data', () => {
+ mutations[types.REQUEST_ENVIRONMENTS_DATA](state);
+ expect(state.environments.options).toEqual([]);
+ expect(state.environments.isLoading).toEqual(true);
+ });
+ });
+
+ describe('RECEIVE_ENVIRONMENTS_DATA_SUCCESS', () => {
+ it('receives environments data and stores it as options', () => {
+ expect(state.environments.options).toEqual([]);
+
+ mutations[types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS](state, mockEnvironments);
+
+ expect(state.environments.options).toEqual(mockEnvironments);
+ expect(state.environments.isLoading).toEqual(false);
+ });
+ });
+
+ describe('RECEIVE_ENVIRONMENTS_DATA_ERROR', () => {
+ it('captures an error loading environments', () => {
+ mutations[types.RECEIVE_ENVIRONMENTS_DATA_ERROR](state);
+
+ expect(state.environments).toEqual({
+ options: [],
+ isLoading: false,
+ current: null,
+ });
+ });
+ });
+
+ describe('REQUEST_LOGS_DATA', () => {
+ it('starts loading for logs', () => {
+ mutations[types.REQUEST_LOGS_DATA](state);
+
+ expect(state.logs).toEqual(
+ expect.objectContaining({
+ lines: [],
+ isLoading: true,
+ isComplete: false,
+ }),
+ );
+ });
+ });
+
+ describe('RECEIVE_LOGS_DATA_SUCCESS', () => {
+ it('receives logs lines', () => {
+ mutations[types.RECEIVE_LOGS_DATA_SUCCESS](state, mockLogsResult);
+
+ expect(state.logs).toEqual(
+ expect.objectContaining({
+ lines: mockLogsResult,
+ isLoading: false,
+ isComplete: true,
+ }),
+ );
+ });
+ });
+
+ describe('RECEIVE_LOGS_DATA_ERROR', () => {
+ it('receives log data error and stops loading', () => {
+ mutations[types.RECEIVE_LOGS_DATA_ERROR](state);
+
+ expect(state.logs).toEqual(
+ expect.objectContaining({
+ lines: [],
+ isLoading: false,
+ isComplete: true,
+ }),
+ );
+ });
+ });
+
+ describe('SET_CURRENT_POD_NAME', () => {
+ it('set current pod name', () => {
+ mutations[types.SET_CURRENT_POD_NAME](state, mockPodName);
+
+ expect(state.pods.current).toEqual(mockPodName);
+ });
+ });
+
+ describe('SET_TIME_RANGE', () => {
+ it('sets a default range', () => {
+ expect(state.timeRange.current).toEqual(expect.any(Object));
+ });
+
+ it('sets a time range', () => {
+ const mockRange = {
+ start: '2020-01-10T18:00:00.000Z',
+ end: '2020-01-10T10:00:00.000Z',
+ };
+ mutations[types.SET_TIME_RANGE](state, mockRange);
+
+ expect(state.timeRange.current).toEqual(mockRange);
+ });
+ });
+
+ describe('REQUEST_PODS_DATA', () => {
+ it('receives log data error and stops loading', () => {
+ mutations[types.REQUEST_PODS_DATA](state);
+
+ expect(state.pods).toEqual(
+ expect.objectContaining({
+ options: [],
+ }),
+ );
+ });
+ });
+ describe('RECEIVE_PODS_DATA_SUCCESS', () => {
+ it('receives pods data success', () => {
+ mutations[types.RECEIVE_PODS_DATA_SUCCESS](state, mockPods);
+
+ expect(state.pods).toEqual(
+ expect.objectContaining({
+ options: mockPods,
+ }),
+ );
+ });
+ });
+ describe('RECEIVE_PODS_DATA_ERROR', () => {
+ it('receives pods data error', () => {
+ mutations[types.RECEIVE_PODS_DATA_ERROR](state);
+
+ expect(state.pods).toEqual(
+ expect.objectContaining({
+ options: [],
+ }),
+ );
+ });
+ });
+});
diff --git a/spec/frontend/logs/utils_spec.js b/spec/frontend/logs/utils_spec.js
new file mode 100644
index 00000000000..986fe320363
--- /dev/null
+++ b/spec/frontend/logs/utils_spec.js
@@ -0,0 +1,38 @@
+import { getTimeRange } from '~/logs/utils';
+
+describe('logs/utils', () => {
+ describe('getTimeRange', () => {
+ const nowTimestamp = 1577836800000;
+ const nowString = '2020-01-01T00:00:00.000Z';
+
+ beforeEach(() => {
+ jest.spyOn(Date, 'now').mockImplementation(() => nowTimestamp);
+ });
+
+ afterEach(() => {
+ Date.now.mockRestore();
+ });
+
+ it('returns the right values', () => {
+ expect(getTimeRange(0)).toEqual({
+ start: '2020-01-01T00:00:00.000Z',
+ end: nowString,
+ });
+
+ expect(getTimeRange(60 * 30)).toEqual({
+ start: '2019-12-31T23:30:00.000Z',
+ end: nowString,
+ });
+
+ expect(getTimeRange(60 * 60 * 24 * 7 * 1)).toEqual({
+ start: '2019-12-25T00:00:00.000Z',
+ end: nowString,
+ });
+
+ expect(getTimeRange(60 * 60 * 24 * 7 * 4)).toEqual({
+ start: '2019-12-04T00:00:00.000Z',
+ end: nowString,
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/blob/balsamiq/balsamiq_viewer_integration_spec.js b/spec/javascripts/blob/balsamiq/balsamiq_viewer_browser_spec.js
index 0c2b7b7392d..4e06e5c12fc 100644
--- a/spec/javascripts/blob/balsamiq/balsamiq_viewer_integration_spec.js
+++ b/spec/javascripts/blob/balsamiq/balsamiq_viewer_browser_spec.js
@@ -1,3 +1,5 @@
+// this file can't be migrated to jest because it relies on the browser to perform integration tests:
+// see: https://gitlab.com/gitlab-org/gitlab/-/issues/194207#note_301878738
import { FIXTURES_PATH } from 'spec/test_constants';
import BalsamiqViewer from '~/blob/balsamiq/balsamiq_viewer';
diff --git a/spec/javascripts/blob/sketch/index_spec.js b/spec/javascripts/blob/sketch/index_spec.js
deleted file mode 100644
index 3d3129e10da..00000000000
--- a/spec/javascripts/blob/sketch/index_spec.js
+++ /dev/null
@@ -1,120 +0,0 @@
-/* eslint-disable no-new, promise/catch-or-return */
-import JSZip from 'jszip';
-import SketchLoader from '~/blob/sketch';
-
-describe('Sketch viewer', () => {
- const generateZipFileArrayBuffer = (zipFile, resolve, done) => {
- zipFile.generateAsync({ type: 'arrayBuffer' }).then(content => {
- resolve(content);
-
- setTimeout(() => {
- done();
- }, 100);
- });
- };
-
- preloadFixtures('static/sketch_viewer.html');
-
- beforeEach(() => {
- loadFixtures('static/sketch_viewer.html');
- });
-
- describe('with error message', () => {
- beforeEach(done => {
- spyOn(SketchLoader.prototype, 'getZipFile').and.callFake(
- () =>
- new Promise((resolve, reject) => {
- reject();
-
- setTimeout(() => {
- done();
- });
- }),
- );
-
- new SketchLoader(document.getElementById('js-sketch-viewer'));
- });
-
- it('renders error message', () => {
- expect(document.querySelector('#js-sketch-viewer p')).not.toBeNull();
-
- expect(document.querySelector('#js-sketch-viewer p').textContent.trim()).toContain(
- 'Cannot show preview.',
- );
- });
-
- it('removes render the loading icon', () => {
- expect(document.querySelector('.js-loading-icon')).toBeNull();
- });
- });
-
- describe('success', () => {
- beforeEach(done => {
- spyOn(SketchLoader.prototype, 'getZipFile').and.callFake(
- () =>
- new Promise(resolve => {
- const zipFile = new JSZip();
- zipFile
- .folder('previews')
- .file(
- 'preview.png',
- 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAMAAAAoyzS7AAAAA1BMVEUAAACnej3aAAAAAXRSTlMAQObYZgAAAA1JREFUeNoBAgD9/wAAAAIAAVMrnDAAAAAASUVORK5CYII=',
- {
- base64: true,
- },
- );
-
- generateZipFileArrayBuffer(zipFile, resolve, done);
- }),
- );
-
- new SketchLoader(document.getElementById('js-sketch-viewer'));
- });
-
- it('does not render error message', () => {
- expect(document.querySelector('#js-sketch-viewer p')).toBeNull();
- });
-
- it('removes render the loading icon', () => {
- expect(document.querySelector('.js-loading-icon')).toBeNull();
- });
-
- it('renders preview img', () => {
- const img = document.querySelector('#js-sketch-viewer img');
-
- expect(img).not.toBeNull();
- expect(img.classList.contains('img-fluid')).toBeTruthy();
- });
-
- it('renders link to image', () => {
- const img = document.querySelector('#js-sketch-viewer img');
- const link = document.querySelector('#js-sketch-viewer a');
-
- expect(link.href).toBe(img.src);
- expect(link.target).toBe('_blank');
- });
- });
-
- describe('incorrect file', () => {
- beforeEach(done => {
- spyOn(SketchLoader.prototype, 'getZipFile').and.callFake(
- () =>
- new Promise(resolve => {
- const zipFile = new JSZip();
-
- generateZipFileArrayBuffer(zipFile, resolve, done);
- }),
- );
-
- new SketchLoader(document.getElementById('js-sketch-viewer'));
- });
-
- it('renders error message', () => {
- expect(document.querySelector('#js-sketch-viewer p')).not.toBeNull();
-
- expect(document.querySelector('#js-sketch-viewer p').textContent.trim()).toContain(
- 'Cannot show preview.',
- );
- });
- });
-});
diff --git a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
new file mode 100644
index 00000000000..08d3b7bec6a
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
@@ -0,0 +1,154 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migration, schema: 2020_02_26_162723 do
+ let(:gitlab_shell) { Gitlab::Shell.new }
+ let(:users) { table(:users) }
+ let(:snippets) { table(:snippets) }
+ let(:snippet_repositories) { table(:snippet_repositories) }
+
+ let(:user) { users.create(id: 1, email: 'user@example.com', projects_limit: 10, username: 'test', name: 'Test') }
+ let!(:snippet_with_repo) { snippets.create(id: 1, type: 'PersonalSnippet', author_id: user.id, file_name: file_name, content: content) }
+ let!(:snippet_with_empty_repo) { snippets.create(id: 2, type: 'PersonalSnippet', author_id: user.id, file_name: file_name, content: content) }
+ let!(:snippet_without_repo) { snippets.create(id: 3, type: 'PersonalSnippet', author_id: user.id, file_name: file_name, content: content) }
+
+ let(:file_name) { 'file_name.rb' }
+ let(:content) { 'content' }
+ let(:ids) { snippets.pluck('MIN(id)', 'MAX(id)').first }
+ let(:service) { described_class.new }
+
+ subject { service.perform(*ids) }
+
+ before do
+ allow(snippet_with_repo).to receive(:disk_path).and_return(disk_path(snippet_with_repo))
+
+ TestEnv.copy_repo(snippet_with_repo,
+ bare_repo: TestEnv.factory_repo_path_bare,
+ refs: TestEnv::BRANCH_SHA)
+
+ raw_repository(snippet_with_empty_repo).create_repository
+ end
+
+ after do
+ raw_repository(snippet_with_repo).remove
+ raw_repository(snippet_without_repo).remove
+ raw_repository(snippet_with_empty_repo).remove
+ end
+
+ describe '#perform' do
+ it 'logs successful migrated snippets' do
+ expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
+ expect(instance).to receive(:info).exactly(3).times
+ end
+
+ subject
+ end
+
+ context 'when snippet has a non empty repository' do
+ it 'does not perform any action' do
+ expect(service).not_to receive(:create_repository_and_files).with(snippet_with_repo)
+
+ subject
+ end
+ end
+
+ shared_examples 'commits the file to the repository' do
+ it do
+ subject
+
+ blob = blob_at(snippet, file_name)
+
+ aggregate_failures do
+ expect(blob).to be
+ expect(blob.data).to eq content
+ end
+ end
+ end
+
+ context 'when snippet has an empty repo' do
+ before do
+ expect(repository_exists?(snippet_with_empty_repo)).to be_truthy
+ end
+
+ it_behaves_like 'commits the file to the repository' do
+ let(:snippet) { snippet_with_empty_repo }
+ end
+ end
+
+ context 'when snippet does not have a repository' do
+ it 'creates the repository' do
+ expect { subject }.to change { repository_exists?(snippet_without_repo) }.from(false).to(true)
+ end
+
+ it_behaves_like 'commits the file to the repository' do
+ let(:snippet) { snippet_without_repo }
+ end
+ end
+
+ context 'when an error is raised' do
+ before do
+ allow(service).to receive(:create_commit).and_raise(StandardError)
+ end
+
+ it 'logs errors' do
+ expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
+ expect(instance).to receive(:error).exactly(3).times
+ end
+
+ subject
+ end
+
+ it "retries #{described_class::MAX_RETRIES} times the operation if it fails" do
+ expect(service).to receive(:create_commit).exactly(snippets.count * described_class::MAX_RETRIES).times
+
+ subject
+ end
+
+ it 'destroys the snippet repository' do
+ expect(service).to receive(:destroy_snippet_repository).exactly(3).times.and_call_original
+
+ subject
+
+ expect(snippet_repositories.count).to eq 0
+ end
+
+ it 'deletes the repository on disk' do
+ subject
+
+ aggregate_failures do
+ expect(repository_exists?(snippet_with_repo)).to be_falsey
+ expect(repository_exists?(snippet_without_repo)).to be_falsey
+ expect(repository_exists?(snippet_with_empty_repo)).to be_falsey
+ end
+ end
+ end
+ end
+
+ def blob_at(snippet, path)
+ raw_repository(snippet).blob_at('master', path)
+ end
+
+ def repository_exists?(snippet)
+ gitlab_shell.repository_exists?('default', "#{disk_path(snippet)}.git")
+ end
+
+ def raw_repository(snippet)
+ Gitlab::Git::Repository.new('default',
+ "#{disk_path(snippet)}.git",
+ Gitlab::GlRepository::SNIPPET.identifier_for_container(snippet),
+ "@snippets/#{snippet.id}")
+ end
+
+ def hashed_repository(snippet)
+ Storage::Hashed.new(snippet, prefix: '@snippets')
+ end
+
+ def disk_path(snippet)
+ hashed_repository(snippet).disk_path
+ end
+
+ def ls_files(snippet)
+ raw_repository(snippet).ls_files(nil)
+ end
+end
diff --git a/spec/lib/gitlab/elasticsearch/logs_spec.rb b/spec/lib/gitlab/elasticsearch/logs_spec.rb
new file mode 100644
index 00000000000..b2f23e30465
--- /dev/null
+++ b/spec/lib/gitlab/elasticsearch/logs_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Elasticsearch::Logs do
+ let(:client) { Elasticsearch::Transport::Client }
+
+ let(:es_message_1) { { timestamp: "2019-12-13T14:35:34.034Z", message: "10.8.2.1 - - [25/Oct/2019:08:03:22 UTC] \"GET / HTTP/1.1\" 200 13" } }
+ let(:es_message_2) { { timestamp: "2019-12-13T14:35:35.034Z", message: "10.8.2.1 - - [27/Oct/2019:23:49:54 UTC] \"GET / HTTP/1.1\" 200 13" } }
+ let(:es_message_3) { { timestamp: "2019-12-13T14:35:36.034Z", message: "10.8.2.1 - - [04/Nov/2019:23:09:24 UTC] \"GET / HTTP/1.1\" 200 13" } }
+ let(:es_message_4) { { timestamp: "2019-12-13T14:35:37.034Z", message: "- -\u003e /" } }
+
+ let(:es_response) { JSON.parse(fixture_file('lib/elasticsearch/logs_response.json')) }
+
+ subject { described_class.new(client) }
+
+ let(:namespace) { "autodevops-deploy-9-production" }
+ let(:pod_name) { "production-6866bc8974-m4sk4" }
+ let(:container_name) { "auto-deploy-app" }
+ let(:search) { "foo +bar "}
+ let(:start_time) { "2019-12-13T14:35:34.034Z" }
+ let(:end_time) { "2019-12-13T14:35:34.034Z" }
+
+ let(:body) { JSON.parse(fixture_file('lib/elasticsearch/query.json')) }
+ let(:body_with_container) { JSON.parse(fixture_file('lib/elasticsearch/query_with_container.json')) }
+ let(:body_with_search) { JSON.parse(fixture_file('lib/elasticsearch/query_with_search.json')) }
+ let(:body_with_times) { JSON.parse(fixture_file('lib/elasticsearch/query_with_times.json')) }
+ let(:body_with_start_time) { JSON.parse(fixture_file('lib/elasticsearch/query_with_start_time.json')) }
+ let(:body_with_end_time) { JSON.parse(fixture_file('lib/elasticsearch/query_with_end_time.json')) }
+
+ RSpec::Matchers.define :a_hash_equal_to_json do |expected|
+ match do |actual|
+ actual.as_json == expected
+ end
+ end
+
+ describe '#pod_logs' do
+ it 'returns the logs as an array' do
+ expect(client).to receive(:search).with(body: a_hash_equal_to_json(body)).and_return(es_response)
+
+ result = subject.pod_logs(namespace, pod_name)
+ expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
+ end
+
+ it 'can further filter the logs by container name' do
+ expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_container)).and_return(es_response)
+
+ result = subject.pod_logs(namespace, pod_name, container_name)
+ expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
+ end
+
+ it 'can further filter the logs by search' do
+ expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_search)).and_return(es_response)
+
+ result = subject.pod_logs(namespace, pod_name, nil, search)
+ expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
+ end
+
+ it 'can further filter the logs by start_time and end_time' do
+ expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_times)).and_return(es_response)
+
+ result = subject.pod_logs(namespace, pod_name, nil, nil, start_time, end_time)
+ expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
+ end
+
+ it 'can further filter the logs by only start_time' do
+ expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_start_time)).and_return(es_response)
+
+ result = subject.pod_logs(namespace, pod_name, nil, nil, start_time)
+ expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
+ end
+
+ it 'can further filter the logs by only end_time' do
+ expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_end_time)).and_return(es_response)
+
+ result = subject.pod_logs(namespace, pod_name, nil, nil, nil, end_time)
+ expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
+ end
+ end
+end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 03aef7aea5c..6020db09ccf 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -1266,4 +1266,39 @@ describe Environment, :use_clean_rails_memory_store_caching do
expect(env).to be_persisted
end
end
+
+ describe '#elastic_stack_available?' do
+ let!(:cluster) { create(:cluster, :project, :provided_by_user, projects: [project]) }
+ let!(:deployment) { create(:deployment, :success, environment: environment, project: project, cluster: cluster) }
+
+ context 'when app does not exist' do
+ it 'returns false' do
+ expect(environment.elastic_stack_available?).to be(false)
+ end
+ end
+
+ context 'when app exists' do
+ let!(:application) { create(:clusters_applications_elastic_stack, cluster: cluster) }
+
+ it 'returns false' do
+ expect(environment.elastic_stack_available?).to be(false)
+ end
+ end
+
+ context 'when app is installed' do
+ let!(:application) { create(:clusters_applications_elastic_stack, :installed, cluster: cluster) }
+
+ it 'returns true' do
+ expect(environment.elastic_stack_available?).to be(true)
+ end
+ end
+
+ context 'when app is updated' do
+ let!(:application) { create(:clusters_applications_elastic_stack, :updated, cluster: cluster) }
+
+ it 'returns true' do
+ expect(environment.elastic_stack_available?).to be(true)
+ end
+ end
+ end
end
diff --git a/spec/models/snippet_repository_spec.rb b/spec/models/snippet_repository_spec.rb
index 088d37725aa..6861e03282a 100644
--- a/spec/models/snippet_repository_spec.rb
+++ b/spec/models/snippet_repository_spec.rb
@@ -26,44 +26,6 @@ describe SnippetRepository do
end
end
- describe '#create_file' do
- let(:snippet) { create(:personal_snippet, :empty_repo, author: user) }
-
- it 'creates the file' do
- snippet_repository.create_file(user, 'foo', 'bar', commit_opts)
- blob = first_blob(snippet)
-
- aggregate_failures do
- expect(blob).not_to be_nil
- expect(blob.path).to eq 'foo'
- expect(blob.data).to eq 'bar'
- end
- end
-
- it 'fills the file path if empty' do
- snippet_repository.create_file(user, nil, 'bar', commit_opts)
- blob = first_blob(snippet)
-
- aggregate_failures do
- expect(blob).not_to be_nil
- expect(blob.path).to eq 'snippetfile1.txt'
- expect(blob.data).to eq 'bar'
- end
- end
-
- context 'when the file exists' do
- let(:snippet) { create(:personal_snippet, :repository, author: user) }
-
- it 'captures the git exception and raises a SnippetRepository::CommitError' do
- existing_blob = first_blob(snippet)
-
- expect do
- snippet_repository.create_file(user, existing_blob.path, existing_blob.data, commit_opts)
- end.to raise_error described_class::CommitError
- end
- end
- end
-
describe '#multi_files_action' do
let(:new_file) { { file_path: 'new_file_test', content: 'bar' } }
let(:move_file) { { previous_path: 'CHANGELOG', file_path: 'CHANGELOG_new', content: 'bar' } }
diff --git a/spec/serializers/environment_entity_spec.rb b/spec/serializers/environment_entity_spec.rb
index f392ecea959..b4ea90d2141 100644
--- a/spec/serializers/environment_entity_spec.rb
+++ b/spec/serializers/environment_entity_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
describe EnvironmentEntity do
+ include Gitlab::Routing.url_helpers
+
let(:request) { double('request') }
let(:entity) do
described_class.new(environment, request: spy('request'))
@@ -71,4 +73,22 @@ describe EnvironmentEntity do
expect(subject).to include(:cancel_auto_stop_path, :auto_stop_at)
end
end
+
+ context 'pod_logs' do
+ it 'exposes logs keys' do
+ expect(subject).to include(:logs_path)
+ expect(subject).to include(:logs_api_path)
+ expect(subject).to include(:enable_advanced_logs_querying)
+ end
+
+ it 'uses k8s api when ES is not available' do
+ expect(subject[:logs_api_path]).to eq(k8s_project_logs_path(environment.project, environment_name: environment.name, format: :json))
+ end
+
+ it 'uses ES api when ES is available' do
+ allow(environment).to receive(:elastic_stack_available?).and_return(true)
+
+ expect(subject[:logs_api_path]).to eq(elasticsearch_project_logs_path(environment.project, environment_name: environment.name, format: :json))
+ end
+ end
end
diff --git a/spec/services/pod_logs/base_service_spec.rb b/spec/services/pod_logs/base_service_spec.rb
new file mode 100644
index 00000000000..a18fda544df
--- /dev/null
+++ b/spec/services/pod_logs/base_service_spec.rb
@@ -0,0 +1,229 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ::PodLogs::BaseService do
+ include KubernetesHelpers
+
+ let_it_be(:cluster) { create(:cluster, :provided_by_gcp, environment_scope: '*') }
+ let(:namespace) { 'autodevops-deploy-9-production' }
+
+ let(:pod_name) { 'pod-1' }
+ let(:container_name) { 'container-0' }
+ let(:params) { {} }
+ let(:raw_pods) do
+ JSON.parse([
+ kube_pod(name: pod_name)
+ ].to_json, object_class: OpenStruct)
+ end
+
+ subject { described_class.new(cluster, namespace, params: params) }
+
+ describe '#initialize' do
+ let(:params) do
+ {
+ 'container_name' => container_name,
+ 'another_param' => 'foo'
+ }
+ end
+
+ it 'filters the parameters' do
+ expect(subject.cluster).to eq(cluster)
+ expect(subject.namespace).to eq(namespace)
+ expect(subject.params).to eq({
+ 'container_name' => container_name
+ })
+ expect(subject.params.equal?(params)).to be(false)
+ end
+ end
+
+ describe '#check_arguments' do
+ context 'when cluster and namespace are provided' do
+ it 'returns success' do
+ result = subject.send(:check_arguments, {})
+
+ expect(result[:status]).to eq(:success)
+ end
+ end
+
+ context 'when cluster is nil' do
+ let(:cluster) { nil }
+
+ it 'returns an error' do
+ result = subject.send(:check_arguments, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Cluster does not exist')
+ end
+ end
+
+ context 'when namespace is nil' do
+ let(:namespace) { nil }
+
+ it 'returns an error' do
+ result = subject.send(:check_arguments, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Namespace is empty')
+ end
+ end
+
+ context 'when namespace is empty' do
+ let(:namespace) { '' }
+
+ it 'returns an error' do
+ result = subject.send(:check_arguments, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Namespace is empty')
+ end
+ end
+ end
+
+ describe '#check_param_lengths' do
+ context 'when pod_name and container_name are provided' do
+ let(:params) do
+ {
+ 'pod_name' => pod_name,
+ 'container_name' => container_name
+ }
+ end
+
+ it 'returns success' do
+ result = subject.send(:check_param_lengths, {})
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:pod_name]).to eq(pod_name)
+ expect(result[:container_name]).to eq(container_name)
+ end
+ end
+
+ context 'when pod_name is too long' do
+ let(:params) do
+ {
+ 'pod_name' => "a very long string." * 15
+ }
+ end
+
+ it 'returns an error' do
+ result = subject.send(:check_param_lengths, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('pod_name cannot be larger than 253 chars')
+ end
+ end
+
+ context 'when container_name is too long' do
+ let(:params) do
+ {
+ 'container_name' => "a very long string." * 15
+ }
+ end
+
+ it 'returns an error' do
+ result = subject.send(:check_param_lengths, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('container_name cannot be larger than 253 chars')
+ end
+ end
+ end
+
+ describe '#get_raw_pods' do
+ let(:service) { create(:cluster_platform_kubernetes, :configured) }
+
+ it 'returns success with passthrough k8s response' do
+ stub_kubeclient_pods(namespace)
+
+ result = subject.send(:get_raw_pods, {})
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:raw_pods].first).to be_a(Kubeclient::Resource)
+ end
+ end
+
+ describe '#get_pod_names' do
+ it 'returns success with a list of pods' do
+ result = subject.send(:get_pod_names, raw_pods: raw_pods)
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:pods]).to eq([pod_name])
+ end
+ end
+
+ describe '#check_pod_name' do
+ it 'returns success if pod_name was specified' do
+ result = subject.send(:check_pod_name, pod_name: pod_name, pods: [pod_name])
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:pod_name]).to eq(pod_name)
+ end
+
+ it 'returns success if pod_name was not specified but there are pods' do
+ result = subject.send(:check_pod_name, pod_name: nil, pods: [pod_name])
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:pod_name]).to eq(pod_name)
+ end
+
+ it 'returns error if pod_name was not specified and there are no pods' do
+ result = subject.send(:check_pod_name, pod_name: nil, pods: [])
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('No pods available')
+ end
+
+ it 'returns error if pod_name was specified but does not exist' do
+ result = subject.send(:check_pod_name, pod_name: 'another_pod', pods: [pod_name])
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Pod does not exist')
+ end
+ end
+
+ describe '#check_container_name' do
+ it 'returns success if container_name was specified' do
+ result = subject.send(:check_container_name,
+ container_name: container_name,
+ pod_name: pod_name,
+ raw_pods: raw_pods
+ )
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:container_name]).to eq(container_name)
+ end
+
+ it 'returns success if container_name was not specified and there are containers' do
+ result = subject.send(:check_container_name,
+ pod_name: pod_name,
+ raw_pods: raw_pods
+ )
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:container_name]).to eq(container_name)
+ end
+
+ it 'returns error if container_name was not specified and there are no containers on the pod' do
+ raw_pods.first.spec.containers = []
+
+ result = subject.send(:check_container_name,
+ pod_name: pod_name,
+ raw_pods: raw_pods
+ )
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('No containers available')
+ end
+
+ it 'returns error if container_name was specified but does not exist' do
+ result = subject.send(:check_container_name,
+ container_name: 'foo',
+ pod_name: pod_name,
+ raw_pods: raw_pods
+ )
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Container does not exist')
+ end
+ end
+end
diff --git a/spec/services/pod_logs/elasticsearch_service_spec.rb b/spec/services/pod_logs/elasticsearch_service_spec.rb
new file mode 100644
index 00000000000..0f0c36da56a
--- /dev/null
+++ b/spec/services/pod_logs/elasticsearch_service_spec.rb
@@ -0,0 +1,174 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ::PodLogs::ElasticsearchService do
+ let_it_be(:cluster) { create(:cluster, :provided_by_gcp, environment_scope: '*') }
+ let(:namespace) { 'autodevops-deploy-9-production' }
+
+ let(:pod_name) { 'pod-1' }
+ let(:container_name) { 'container-1' }
+ let(:search) { 'foo -bar' }
+ let(:start_time) { '2019-01-02T12:13:14+02:00' }
+ let(:end_time) { '2019-01-03T12:13:14+02:00' }
+ let(:params) { {} }
+ let(:expected_logs) do
+ [
+ { message: "Log 1", timestamp: "2019-12-13T14:04:22.123456Z" },
+ { message: "Log 2", timestamp: "2019-12-13T14:04:23.123456Z" },
+ { message: "Log 3", timestamp: "2019-12-13T14:04:24.123456Z" }
+ ]
+ end
+
+ subject { described_class.new(cluster, namespace, params: params) }
+
+ describe '#check_times' do
+ context 'with start and end provided and valid' do
+ let(:params) do
+ {
+ 'start' => start_time,
+ 'end' => end_time
+ }
+ end
+
+ it 'returns success with times' do
+ result = subject.send(:check_times, {})
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:start]).to eq(start_time)
+ expect(result[:end]).to eq(end_time)
+ end
+ end
+
+ context 'with start and end not provided' do
+ let(:params) do
+ {}
+ end
+
+ it 'returns success with nothing else' do
+ result = subject.send(:check_times, {})
+
+ expect(result.keys.length).to eq(1)
+ expect(result[:status]).to eq(:success)
+ end
+ end
+
+ context 'with start valid and end invalid' do
+ let(:params) do
+ {
+ 'start' => start_time,
+ 'end' => 'invalid date'
+ }
+ end
+
+ it 'returns error' do
+ result = subject.send(:check_times, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Invalid start or end time format')
+ end
+ end
+
+ context 'with start invalid and end valid' do
+ let(:params) do
+ {
+ 'start' => 'invalid date',
+ 'end' => end_time
+ }
+ end
+
+ it 'returns error' do
+ result = subject.send(:check_times, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Invalid start or end time format')
+ end
+ end
+ end
+
+ describe '#check_search' do
+ context 'with search provided and valid' do
+ let(:params) do
+ {
+ 'search' => search
+ }
+ end
+
+ it 'returns success with search' do
+ result = subject.send(:check_search, {})
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:search]).to eq(search)
+ end
+ end
+
+ context 'with search not provided' do
+ let(:params) do
+ {}
+ end
+
+ it 'returns success with nothing else' do
+ result = subject.send(:check_search, {})
+
+ expect(result.keys.length).to eq(1)
+ expect(result[:status]).to eq(:success)
+ end
+ end
+ end
+
+ describe '#pod_logs' do
+ let(:result_arg) do
+ {
+ pod_name: pod_name,
+ container_name: container_name,
+ search: search,
+ start: start_time,
+ end: end_time
+ }
+ end
+
+ before do
+ create(:clusters_applications_elastic_stack, :installed, cluster: cluster)
+ end
+
+ it 'returns the logs' do
+ allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ .to receive(:elasticsearch_client)
+ .and_return(Elasticsearch::Transport::Client.new)
+ allow_any_instance_of(::Gitlab::Elasticsearch::Logs)
+ .to receive(:pod_logs)
+ .with(namespace, pod_name, container_name, search, start_time, end_time)
+ .and_return(expected_logs)
+
+ result = subject.send(:pod_logs, result_arg)
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:logs]).to eq(expected_logs)
+ end
+
+ it 'returns an error when ES is unreachable' do
+ allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ .to receive(:elasticsearch_client)
+ .and_return(nil)
+
+ result = subject.send(:pod_logs, result_arg)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Unable to connect to Elasticsearch')
+ end
+
+ it 'handles server errors from elasticsearch' do
+ allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ .to receive(:elasticsearch_client)
+ .and_return(Elasticsearch::Transport::Client.new)
+ allow_any_instance_of(::Gitlab::Elasticsearch::Logs)
+ .to receive(:pod_logs)
+ .and_raise(Elasticsearch::Transport::Transport::Errors::ServiceUnavailable.new)
+
+ result = subject.send(:pod_logs, result_arg)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Elasticsearch returned status code: ServiceUnavailable')
+ end
+ end
+end
diff --git a/spec/services/pod_logs/kubernetes_service_spec.rb b/spec/services/pod_logs/kubernetes_service_spec.rb
new file mode 100644
index 00000000000..9fab88a14f6
--- /dev/null
+++ b/spec/services/pod_logs/kubernetes_service_spec.rb
@@ -0,0 +1,166 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ::PodLogs::KubernetesService do
+ include KubernetesHelpers
+
+ let_it_be(:cluster) { create(:cluster, :provided_by_gcp, environment_scope: '*') }
+ let(:namespace) { 'autodevops-deploy-9-production' }
+
+ let(:pod_name) { 'pod-1' }
+ let(:container_name) { 'container-1' }
+ let(:params) { {} }
+
+ let(:raw_logs) do
+ "2019-12-13T14:04:22.123456Z Log 1\n2019-12-13T14:04:23.123456Z Log 2\n" \
+ "2019-12-13T14:04:24.123456Z Log 3"
+ end
+
+ subject { described_class.new(cluster, namespace, params: params) }
+
+ describe '#pod_logs' do
+ let(:result_arg) do
+ {
+ pod_name: pod_name,
+ container_name: container_name
+ }
+ end
+
+ let(:expected_logs) { raw_logs }
+ let(:service) { create(:cluster_platform_kubernetes, :configured) }
+
+ it 'returns the logs' do
+ stub_kubeclient_logs(pod_name, namespace, container: container_name)
+
+ result = subject.send(:pod_logs, result_arg)
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:logs]).to eq(expected_logs)
+ end
+
+ it 'handles Not Found errors from k8s' do
+ allow_any_instance_of(Gitlab::Kubernetes::KubeClient)
+ .to receive(:get_pod_log)
+ .with(any_args)
+ .and_raise(Kubeclient::ResourceNotFoundError.new(404, 'Not Found', {}))
+
+ result = subject.send(:pod_logs, result_arg)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Pod not found')
+ end
+
+ it 'handles HTTP errors from k8s' do
+ allow_any_instance_of(Gitlab::Kubernetes::KubeClient)
+ .to receive(:get_pod_log)
+ .with(any_args)
+ .and_raise(Kubeclient::HttpError.new(500, 'Error', {}))
+
+ result = subject.send(:pod_logs, result_arg)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Kubernetes API returned status code: 500')
+ end
+ end
+
+ describe '#encode_logs_to_utf8', :aggregate_failures do
+ let(:service) { create(:cluster_platform_kubernetes, :configured) }
+ let(:expected_logs) { '2019-12-13T14:04:22.123456Z ✔ Started logging errors to Sentry' }
+ let(:raw_logs) { expected_logs.dup.force_encoding(Encoding::ASCII_8BIT) }
+ let(:result) { subject.send(:encode_logs_to_utf8, result_arg) }
+
+ let(:result_arg) do
+ {
+ pod_name: pod_name,
+ container_name: container_name,
+ logs: raw_logs
+ }
+ end
+
+ it 'converts logs to utf-8' do
+ expect(result[:status]).to eq(:success)
+ expect(result[:logs]).to eq(expected_logs)
+ end
+
+ it 'returns error if output of encoding helper is blank' do
+ allow(Gitlab::EncodingHelper).to receive(:encode_utf8).and_return('')
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Unable to convert Kubernetes logs encoding to UTF-8')
+ end
+
+ it 'returns error if output of encoding helper is nil' do
+ allow(Gitlab::EncodingHelper).to receive(:encode_utf8).and_return(nil)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Unable to convert Kubernetes logs encoding to UTF-8')
+ end
+
+ it 'returns error if output of encoding helper is not UTF-8' do
+ allow(Gitlab::EncodingHelper).to receive(:encode_utf8)
+ .and_return(expected_logs.encode(Encoding::UTF_16BE))
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Unable to convert Kubernetes logs encoding to UTF-8')
+ end
+
+ context 'when logs are nil' do
+ let(:raw_logs) { nil }
+ let(:expected_logs) { nil }
+
+ it 'returns nil' do
+ expect(result[:status]).to eq(:success)
+ expect(result[:logs]).to eq(expected_logs)
+ end
+ end
+
+ context 'when logs are blank' do
+ let(:raw_logs) { (+'').force_encoding(Encoding::ASCII_8BIT) }
+ let(:expected_logs) { '' }
+
+ it 'returns blank string' do
+ expect(result[:status]).to eq(:success)
+ expect(result[:logs]).to eq(expected_logs)
+ end
+ end
+
+ context 'when logs are already in utf-8' do
+ let(:raw_logs) { expected_logs }
+
+ it 'does not fail' do
+ expect(result[:status]).to eq(:success)
+ expect(result[:logs]).to eq(expected_logs)
+ end
+ end
+ end
+
+ describe '#split_logs' do
+ let(:service) { create(:cluster_platform_kubernetes, :configured) }
+
+ let(:expected_logs) do
+ [
+ { message: "Log 1", timestamp: "2019-12-13T14:04:22.123456Z" },
+ { message: "Log 2", timestamp: "2019-12-13T14:04:23.123456Z" },
+ { message: "Log 3", timestamp: "2019-12-13T14:04:24.123456Z" }
+ ]
+ end
+
+ let(:result_arg) do
+ {
+ pod_name: pod_name,
+ container_name: container_name,
+ logs: raw_logs
+ }
+ end
+
+ it 'returns the logs' do
+ result = subject.send(:split_logs, result_arg)
+
+ aggregate_failures do
+ expect(result[:status]).to eq(:success)
+ expect(result[:logs]).to eq(expected_logs)
+ end
+ end
+ end
+end
diff --git a/spec/support/capybara.rb b/spec/support/capybara.rb
index 9ac7d0df737..5d8779ec782 100644
--- a/spec/support/capybara.rb
+++ b/spec/support/capybara.rb
@@ -82,7 +82,7 @@ Capybara.enable_aria_label = true
Capybara::Screenshot.append_timestamp = false
Capybara::Screenshot.register_filename_prefix_formatter(:rspec) do |example|
- ::File.join(QA::Runtime::Namespace.name, example.full_description.downcase.parameterize(separator: "_")[0..99])
+ example.full_description.downcase.parameterize(separator: "_")[0..99]
end
# Keep only the screenshots generated from the last failing test suite
Capybara::Screenshot.prune_strategy = :keep_last_run