diff options
author | GitLab Bot <gitlab-bot@gitlab.com> | 2020-05-07 18:09:29 +0300 |
---|---|---|
committer | GitLab Bot <gitlab-bot@gitlab.com> | 2020-05-07 18:09:29 +0300 |
commit | f35a7a3b8e97d7af2ec1505d3fbcd6ffdd869fd2 (patch) | |
tree | 3a31002cc98598aed02c21606b21a5a123afaad2 /spec/services | |
parent | 896b68514b43b9646d763e67f63fbe8f9ef2f723 (diff) |
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'spec/services')
6 files changed, 238 insertions, 13 deletions
diff --git a/spec/services/alert_management/process_prometheus_alert_service_spec.rb b/spec/services/alert_management/process_prometheus_alert_service_spec.rb new file mode 100644 index 00000000000..73f9f103902 --- /dev/null +++ b/spec/services/alert_management/process_prometheus_alert_service_spec.rb @@ -0,0 +1,136 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe AlertManagement::ProcessPrometheusAlertService do + let_it_be(:project) { create(:project) } + + describe '#execute' do + subject { described_class.new(project, nil, payload).execute } + + context 'when alert payload is valid' do + let(:parsed_alert) { Gitlab::Alerting::Alert.new(project: project, payload: payload) } + let(:payload) do + { + 'status' => status, + 'labels' => { + 'alertname' => 'GitalyFileServerDown', + 'channel' => 'gitaly', + 'pager' => 'pagerduty', + 'severity' => 's1' + }, + 'annotations' => { + 'description' => 'Alert description', + 'runbook' => 'troubleshooting/gitaly-down.md', + 'title' => 'Alert title' + }, + 'startsAt' => '2020-04-27T10:10:22.265949279Z', + 'endsAt' => '2020-04-27T10:20:22.265949279Z', + 'generatorURL' => 'http://8d467bd4607a:9090/graph?g0.expr=vector%281%29&g0.tab=1', + 'fingerprint' => 'b6ac4d42057c43c1' + } + end + + context 'when Prometheus alert status is firing' do + let(:status) { 'firing' } + + context 'when alert with the same fingerprint already exists' do + let!(:alert) { create(:alert_management_alert, :resolved, project: project, fingerprint: parsed_alert.gitlab_fingerprint) } + + context 'when status can be changed' do + it 'changes status to triggered' do + expect { subject }.to change { alert.reload.triggered? }.to(true) + end + end + + context 'when status change did not succeed' do + before do + allow(AlertManagement::Alert).to receive(:for_fingerprint).and_return([alert]) + allow(alert).to receive(:trigger).and_return(false) + end + + it 'writes a warning to the log' do + expect(Gitlab::AppLogger).to receive(:warn).with( + message: 'Unable to update AlertManagement::Alert status to triggered', + project_id: project.id, + alert_id: alert.id + ) + + subject + end + end + + it { is_expected.to be_success } + end + + context 'when alert does not exist' do + context 'when alert can be created' do + it 'creates a new alert' do + expect { subject }.to change { AlertManagement::Alert.where(project: project).count }.by(1) + end + end + + context 'when alert cannot be created' do + let(:errors) { double(messages: { hosts: ['hosts array is over 255 chars'] })} + let(:am_alert) { instance_double(AlertManagement::Alert, save: false, errors: errors) } + + before do + allow(AlertManagement::Alert).to receive(:new).and_return(am_alert) + end + + it 'writes a warning to the log' do + expect(Gitlab::AppLogger).to receive(:warn).with( + message: 'Unable to create AlertManagement::Alert', + project_id: project.id, + alert_errors: { hosts: ['hosts array is over 255 chars'] } + ) + + subject + end + end + + it { is_expected.to be_success } + end + end + + context 'when Prometheus alert status is resolved' do + let(:status) { 'resolved' } + let!(:alert) { create(:alert_management_alert, project: project, fingerprint: parsed_alert.gitlab_fingerprint) } + + context 'when status can be changed' do + it 'resolves an existing alert' do + expect { subject }.to change { alert.reload.resolved? }.to(true) + end + end + + context 'when status change did not succeed' do + before do + allow(AlertManagement::Alert).to receive(:for_fingerprint).and_return([alert]) + allow(alert).to receive(:resolve).and_return(false) + end + + it 'writes a warning to the log' do + expect(Gitlab::AppLogger).to receive(:warn).with( + message: 'Unable to update AlertManagement::Alert status to resolved', + project_id: project.id, + alert_id: alert.id + ) + + subject + end + end + + it { is_expected.to be_success } + end + end + + context 'when alert payload is invalid' do + let(:payload) { {} } + + it 'responds with bad_request' do + expect(subject).to be_error + expect(subject.http_status).to eq(:bad_request) + end + end + end +end diff --git a/spec/services/alert_management/update_alert_status_service_spec.rb b/spec/services/alert_management/update_alert_status_service_spec.rb index 325b03840d3..5bdad7a8e19 100644 --- a/spec/services/alert_management/update_alert_status_service_spec.rb +++ b/spec/services/alert_management/update_alert_status_service_spec.rb @@ -11,7 +11,7 @@ describe AlertManagement::UpdateAlertStatusService do let(:new_status) { 'acknowledged' } it 'updates the status' do - expect { execute }.to change { alert.status }.to(new_status) + expect { execute }.to change { alert.acknowledged? }.to(true) end context 'with unknown status' do diff --git a/spec/services/merge_requests/rebase_service_spec.rb b/spec/services/merge_requests/rebase_service_spec.rb index 22df3b84243..69d555f838d 100644 --- a/spec/services/merge_requests/rebase_service_spec.rb +++ b/spec/services/merge_requests/rebase_service_spec.rb @@ -72,12 +72,15 @@ describe MergeRequests::RebaseService do it_behaves_like 'sequence of failure and success' context 'when unexpected error occurs' do + let(:exception) { RuntimeError.new('Something went wrong') } + let(:merge_request_ref) { merge_request.to_reference(full: true) } + before do - allow(repository).to receive(:gitaly_operation_client).and_raise('Something went wrong') + allow(repository).to receive(:gitaly_operation_client).and_raise(exception) end it 'saves a generic error message' do - subject.execute(merge_request) + service.execute(merge_request) expect(merge_request.reload.merge_error).to eq(described_class::REBASE_ERROR) end @@ -86,6 +89,18 @@ describe MergeRequests::RebaseService do expect(service.execute(merge_request)).to match(status: :error, message: described_class::REBASE_ERROR) end + + it 'logs the error' do + expect(service).to receive(:log_error).with(exception: exception, message: described_class::REBASE_ERROR, save_message_on_model: true).and_call_original + expect(Gitlab::ErrorTracking).to receive(:track_exception).with(exception, + class: described_class.to_s, + merge_request: merge_request_ref, + merge_request_id: merge_request.id, + message: described_class::REBASE_ERROR, + save_message_on_model: true).and_call_original + + service.execute(merge_request) + end end context 'with git command failure' do diff --git a/spec/services/merge_requests/squash_service_spec.rb b/spec/services/merge_requests/squash_service_spec.rb index cb278eec692..a53314ed737 100644 --- a/spec/services/merge_requests/squash_service_spec.rb +++ b/spec/services/merge_requests/squash_service_spec.rb @@ -141,15 +141,14 @@ describe MergeRequests::SquashService do let(:merge_request) { merge_request_with_only_new_files } let(:error) { 'A test error' } - context 'with gitaly enabled' do + context 'with an error in Gitaly UserSquash RPC' do before do allow(repository.gitaly_operation_client).to receive(:user_squash) .and_raise(Gitlab::Git::Repository::GitError, error) end - it 'logs the stage and output' do - expect(service).to receive(:log_error).with(log_error) - expect(service).to receive(:log_error).with(error) + it 'logs the error' do + expect(service).to receive(:log_error).with(exception: an_instance_of(Gitlab::Git::Repository::GitError), message: 'Failed to squash merge request') service.execute end @@ -158,19 +157,42 @@ describe MergeRequests::SquashService do expect(service.execute).to match(status: :error, message: a_string_including('squash')) end end + + context 'with an error in squash in progress check' do + before do + allow(repository).to receive(:squash_in_progress?) + .and_raise(Gitlab::Git::Repository::GitError, error) + end + + it 'logs the stage and output' do + expect(service).to receive(:log_error).with(exception: an_instance_of(Gitlab::Git::Repository::GitError), message: 'Failed to check squash in progress') + + service.execute + end + + it 'returns an error' do + expect(service.execute).to match(status: :error, message: 'An error occurred while checking whether another squash is in progress.') + end + end end context 'when any other exception is thrown' do let(:merge_request) { merge_request_with_only_new_files } - let(:error) { 'A test error' } + let(:merge_request_ref) { merge_request.to_reference(full: true) } + let(:exception) { RuntimeError.new('A test error') } before do - allow(merge_request.target_project.repository).to receive(:squash).and_raise(error) + allow(merge_request.target_project.repository).to receive(:squash).and_raise(exception) end - it 'logs the MR reference and exception' do - expect(service).to receive(:log_error).with(a_string_including("#{project.full_path}#{merge_request.to_reference}")) - expect(service).to receive(:log_error).with(error) + it 'logs the error' do + expect(service).to receive(:log_error).with(exception: exception, message: 'Failed to squash merge request').and_call_original + expect(Gitlab::ErrorTracking).to receive(:track_exception).with(exception, + class: described_class.to_s, + merge_request: merge_request_ref, + merge_request_id: merge_request.id, + message: 'Failed to squash merge request', + save_message_on_model: false).and_call_original service.execute end diff --git a/spec/services/projects/alerting/notify_service_spec.rb b/spec/services/projects/alerting/notify_service_spec.rb index 8315d2292a0..bfd51874549 100644 --- a/spec/services/projects/alerting/notify_service_spec.rb +++ b/spec/services/projects/alerting/notify_service_spec.rb @@ -121,7 +121,7 @@ describe Projects::Alerting::NotifyService do 'hosts' => [], 'payload' => payload_raw, 'severity' => 'critical', - 'status' => 'triggered', + 'status' => AlertManagement::Alert::STATUSES[:triggered], 'events' => 1, 'started_at' => alert.started_at, 'ended_at' => nil diff --git a/spec/services/projects/prometheus/alerts/notify_service_spec.rb b/spec/services/projects/prometheus/alerts/notify_service_spec.rb index dce96dda1e3..bfa784cd212 100644 --- a/spec/services/projects/prometheus/alerts/notify_service_spec.rb +++ b/spec/services/projects/prometheus/alerts/notify_service_spec.rb @@ -217,6 +217,51 @@ describe Projects::Prometheus::Alerts::NotifyService do end end + context 'process Alert Management alerts' do + let(:process_service) { instance_double(AlertManagement::ProcessPrometheusAlertService) } + + before do + create(:prometheus_service, project: project) + create(:project_alerting_setting, project: project, token: token) + end + + context 'when alert_management_minimal feature enabled' do + before do + stub_feature_flags(alert_management_minimal: true) + end + + context 'with multiple firing alerts and resolving alerts' do + let(:payload_raw) do + payload_for(firing: [alert_firing, alert_firing], resolved: [alert_resolved]) + end + + it 'processes Prometheus alerts' do + expect(AlertManagement::ProcessPrometheusAlertService) + .to receive(:new) + .with(project, nil, kind_of(Hash)) + .exactly(3).times + .and_return(process_service) + expect(process_service).to receive(:execute).exactly(3).times + + subject + end + end + end + + context 'when alert_management_minimal feature disabled' do + before do + stub_feature_flags(alert_management_minimal: false) + end + + it 'does not process Prometheus alerts' do + expect(AlertManagement::ProcessPrometheusAlertService) + .not_to receive(:new) + + subject + end + end + end + context 'process incident issues' do before do create(:prometheus_service, project: project) @@ -286,6 +331,13 @@ describe Projects::Prometheus::Alerts::NotifyService do it_behaves_like 'no notifications', http_status: :bad_request + it 'does not process Prometheus alerts' do + expect(AlertManagement::ProcessPrometheusAlertService) + .not_to receive(:new) + + subject + end + it 'does not process issues' do expect(IncidentManagement::ProcessPrometheusAlertWorker) .not_to receive(:perform_async) |