Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-04-28 00:10:10 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2020-04-28 00:10:10 +0300
commit579e85eb029c4ee66e8b8cd537a94b9e6cb0e58b (patch)
tree26c28e966b7f2e4fabf06bfaa1d650ac4579f9e4 /spec
parentf569792df8a25caa1bed9c448c8c4c3f837f5164 (diff)
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'spec')
-rw-r--r--spec/frontend/custom_metrics/components/custom_metrics_form_fields_spec.js1
-rw-r--r--spec/frontend/diffs/components/app_spec.js67
-rw-r--r--spec/frontend/ide/components/preview/clientside_spec.js8
-rw-r--r--spec/frontend/monitoring/store/actions_spec.js3
-rw-r--r--spec/frontend/notes/old_notes_spec.js1
-rw-r--r--spec/frontend/notes/stores/actions_spec.js2
-rw-r--r--spec/frontend/smart_interval_spec.js2
-rw-r--r--spec/models/ci/job_artifact_spec.rb29
-rw-r--r--spec/services/ci/create_job_artifacts_service_spec.rb20
-rw-r--r--spec/services/ci/destroy_expired_job_artifacts_service_spec.rb26
-rw-r--r--spec/services/git/branch_push_service_spec.rb8
-rw-r--r--spec/workers/process_commit_worker_spec.rb24
12 files changed, 150 insertions, 41 deletions
diff --git a/spec/frontend/custom_metrics/components/custom_metrics_form_fields_spec.js b/spec/frontend/custom_metrics/components/custom_metrics_form_fields_spec.js
index 61cbef0c557..79c37293fe5 100644
--- a/spec/frontend/custom_metrics/components/custom_metrics_form_fields_spec.js
+++ b/spec/frontend/custom_metrics/components/custom_metrics_form_fields_spec.js
@@ -152,7 +152,6 @@ describe('custom metrics form fields component', () => {
describe('when query validation is in flight', () => {
beforeEach(() => {
- jest.useFakeTimers();
mountComponent(
{ metricPersisted: true, ...makeFormData({ query: 'validQuery' }) },
{
diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js
index 3a0354205f8..1a95f586344 100644
--- a/spec/frontend/diffs/components/app_spec.js
+++ b/spec/frontend/diffs/components/app_spec.js
@@ -14,10 +14,13 @@ import TreeList from '~/diffs/components/tree_list.vue';
import { INLINE_DIFF_VIEW_TYPE, PARALLEL_DIFF_VIEW_TYPE } from '~/diffs/constants';
import createDiffsStore from '../create_diffs_store';
import axios from '~/lib/utils/axios_utils';
+import * as urlUtils from '~/lib/utils/url_utility';
import diffsMockData from '../mock_data/merge_request_diffs';
const mergeRequestDiff = { version_index: 1 };
const TEST_ENDPOINT = `${TEST_HOST}/diff/endpoint`;
+const COMMIT_URL = '[BASE URL]/OLD';
+const UPDATED_COMMIT_URL = '[BASE URL]/NEW';
describe('diffs/components/app', () => {
const oldMrTabs = window.mrTabs;
@@ -602,6 +605,70 @@ describe('diffs/components/app', () => {
});
});
+ describe('commit watcher', () => {
+ const spy = () => {
+ jest.spyOn(wrapper.vm, 'refetchDiffData').mockImplementation(() => {});
+ jest.spyOn(wrapper.vm, 'adjustView').mockImplementation(() => {});
+ };
+ let location;
+
+ beforeAll(() => {
+ location = window.location;
+ delete window.location;
+ window.location = COMMIT_URL;
+ document.title = 'My Title';
+ });
+
+ beforeEach(() => {
+ jest.spyOn(urlUtils, 'updateHistory');
+ });
+
+ afterAll(() => {
+ window.location = location;
+ });
+
+ it('when the commit changes and the app is not loading it should update the history, refetch the diff data, and update the view', () => {
+ createComponent({}, ({ state }) => {
+ state.diffs.commit = { ...state.diffs.commit, id: 'OLD' };
+ });
+ spy();
+
+ store.state.diffs.commit = { id: 'NEW' };
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(urlUtils.updateHistory).toHaveBeenCalledWith({
+ title: document.title,
+ url: UPDATED_COMMIT_URL,
+ });
+ expect(wrapper.vm.refetchDiffData).toHaveBeenCalled();
+ expect(wrapper.vm.adjustView).toHaveBeenCalled();
+ });
+ });
+
+ it.each`
+ isLoading | oldSha | newSha
+ ${true} | ${'OLD'} | ${'NEW'}
+ ${false} | ${'NEW'} | ${'NEW'}
+ `(
+ 'given `{ "isLoading": $isLoading, "oldSha": "$oldSha", "newSha": "$newSha" }`, nothing should happen',
+ ({ isLoading, oldSha, newSha }) => {
+ createComponent({}, ({ state }) => {
+ state.diffs.isLoading = isLoading;
+ state.diffs.commit = { ...state.diffs.commit, id: oldSha };
+ });
+ spy();
+
+ store.state.diffs.commit = { id: newSha };
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(urlUtils.updateHistory).not.toHaveBeenCalled();
+ expect(wrapper.vm.refetchDiffData).not.toHaveBeenCalled();
+ expect(wrapper.vm.adjustView).not.toHaveBeenCalled();
+ });
+ },
+ );
+ });
+
describe('diffs', () => {
it('should render compare versions component', () => {
createComponent({}, ({ state }) => {
diff --git a/spec/frontend/ide/components/preview/clientside_spec.js b/spec/frontend/ide/components/preview/clientside_spec.js
index 0cde6fb6060..7b2025f5e9f 100644
--- a/spec/frontend/ide/components/preview/clientside_spec.js
+++ b/spec/frontend/ide/components/preview/clientside_spec.js
@@ -70,14 +70,6 @@ describe('IDE clientside preview', () => {
});
};
- beforeAll(() => {
- jest.useFakeTimers();
- });
-
- afterAll(() => {
- jest.useRealTimers();
- });
-
afterEach(() => {
wrapper.destroy();
});
diff --git a/spec/frontend/monitoring/store/actions_spec.js b/spec/frontend/monitoring/store/actions_spec.js
index f312aa1fd34..37bd270a1c0 100644
--- a/spec/frontend/monitoring/store/actions_spec.js
+++ b/spec/frontend/monitoring/store/actions_spec.js
@@ -62,9 +62,6 @@ describe('Monitoring store actions', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
- // Mock `backOff` function to remove exponential algorithm delay.
- jest.useFakeTimers();
-
jest.spyOn(commonUtils, 'backOff').mockImplementation(callback => {
const q = new Promise((resolve, reject) => {
const stop = arg => (arg instanceof Error ? reject(arg) : resolve(arg));
diff --git a/spec/frontend/notes/old_notes_spec.js b/spec/frontend/notes/old_notes_spec.js
index 49b887b21b4..3f7c94c3a2b 100644
--- a/spec/frontend/notes/old_notes_spec.js
+++ b/spec/frontend/notes/old_notes_spec.js
@@ -33,7 +33,6 @@ gl.utils.disableButtonIfEmptyField = () => {};
// eslint-disable-next-line jest/no-disabled-tests
describe.skip('Old Notes (~/notes.js)', () => {
beforeEach(() => {
- jest.useFakeTimers();
loadFixtures(fixture);
// Re-declare this here so that test_setup.js#beforeEach() doesn't
diff --git a/spec/frontend/notes/stores/actions_spec.js b/spec/frontend/notes/stores/actions_spec.js
index e0c5441b9d3..cbfb9597159 100644
--- a/spec/frontend/notes/stores/actions_spec.js
+++ b/spec/frontend/notes/stores/actions_spec.js
@@ -272,8 +272,6 @@ describe('Actions Notes Store', () => {
});
describe('poll', () => {
- jest.useFakeTimers();
-
beforeEach(done => {
jest.spyOn(axios, 'get');
diff --git a/spec/frontend/smart_interval_spec.js b/spec/frontend/smart_interval_spec.js
index b32ac99e4e4..1a2fd7ff8f1 100644
--- a/spec/frontend/smart_interval_spec.js
+++ b/spec/frontend/smart_interval_spec.js
@@ -3,8 +3,6 @@ import { assignIn } from 'lodash';
import waitForPromises from 'helpers/wait_for_promises';
import SmartInterval from '~/smart_interval';
-jest.useFakeTimers();
-
let interval;
describe('SmartInterval', () => {
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 0701f088837..dd33d55d954 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -160,15 +160,26 @@ describe Ci::JobArtifact do
end
describe '.for_sha' do
+ let(:first_pipeline) { create(:ci_pipeline) }
+ let(:second_pipeline) { create(:ci_pipeline, project: first_pipeline.project, sha: Digest::SHA1.hexdigest(SecureRandom.hex)) }
+ let!(:first_artifact) { create(:ci_job_artifact, job: create(:ci_build, pipeline: first_pipeline)) }
+ let!(:second_artifact) { create(:ci_job_artifact, job: create(:ci_build, pipeline: second_pipeline)) }
+
it 'returns job artifacts for a given pipeline sha' do
- project = create(:project)
- first_pipeline = create(:ci_pipeline, project: project)
- second_pipeline = create(:ci_pipeline, project: project, sha: Digest::SHA1.hexdigest(SecureRandom.hex))
- first_artifact = create(:ci_job_artifact, job: create(:ci_build, pipeline: first_pipeline))
- second_artifact = create(:ci_job_artifact, job: create(:ci_build, pipeline: second_pipeline))
+ expect(described_class.for_sha(first_pipeline.sha, first_pipeline.project.id)).to eq([first_artifact])
+ expect(described_class.for_sha(second_pipeline.sha, first_pipeline.project.id)).to eq([second_artifact])
+ end
+ end
+
+ describe '.for_ref' do
+ let(:first_pipeline) { create(:ci_pipeline, ref: 'first_ref') }
+ let(:second_pipeline) { create(:ci_pipeline, ref: 'second_ref', project: first_pipeline.project) }
+ let!(:first_artifact) { create(:ci_job_artifact, job: create(:ci_build, pipeline: first_pipeline)) }
+ let!(:second_artifact) { create(:ci_job_artifact, job: create(:ci_build, pipeline: second_pipeline)) }
- expect(described_class.for_sha(first_pipeline.sha, project.id)).to eq([first_artifact])
- expect(described_class.for_sha(second_pipeline.sha, project.id)).to eq([second_artifact])
+ it 'returns job artifacts for a given pipeline ref' do
+ expect(described_class.for_ref(first_pipeline.ref, first_pipeline.project.id)).to eq([first_artifact])
+ expect(described_class.for_ref(second_pipeline.ref, first_pipeline.project.id)).to eq([second_artifact])
end
end
@@ -185,9 +196,9 @@ describe Ci::JobArtifact do
end
describe 'callbacks' do
- subject { create(:ci_job_artifact, :archive) }
-
describe '#schedule_background_upload' do
+ subject { create(:ci_job_artifact, :archive) }
+
context 'when object storage is disabled' do
before do
stub_artifacts_object_storage(enabled: false)
diff --git a/spec/services/ci/create_job_artifacts_service_spec.rb b/spec/services/ci/create_job_artifacts_service_spec.rb
index fe64a66f322..2d869575d2a 100644
--- a/spec/services/ci/create_job_artifacts_service_spec.rb
+++ b/spec/services/ci/create_job_artifacts_service_spec.rb
@@ -30,6 +30,26 @@ describe Ci::CreateJobArtifactsService do
describe '#execute' do
subject { service.execute(job, artifacts_file, params, metadata_file: metadata_file) }
+ context 'locking' do
+ let(:old_job) { create(:ci_build, pipeline: create(:ci_pipeline, project: job.project, ref: job.ref)) }
+ let!(:latest_artifact) { create(:ci_job_artifact, job: old_job, locked: true) }
+ let!(:other_artifact) { create(:ci_job_artifact, locked: true) }
+
+ it 'locks the new artifact' do
+ subject
+
+ expect(Ci::JobArtifact.last).to have_attributes(locked: true)
+ end
+
+ it 'unlocks all other artifacts for the same ref' do
+ expect { subject }.to change { latest_artifact.reload.locked }.from(true).to(false)
+ end
+
+ it 'does not unlock artifacts for other refs' do
+ expect { subject }.not_to change { other_artifact.reload.locked }.from(true)
+ end
+ end
+
context 'when artifacts file is uploaded' do
it 'saves artifact for the given type' do
expect { subject }.to change { Ci::JobArtifact.count }.by(1)
diff --git a/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb b/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb
index fc5450ab33d..4b9f12d8fdf 100644
--- a/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb
+++ b/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb
@@ -11,8 +11,26 @@ describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared_state
let(:service) { described_class.new }
let!(:artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
- it 'destroys expired job artifacts' do
- expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
+ context 'when artifact is expired' do
+ context 'when artifact is not locked' do
+ before do
+ artifact.update!(locked: false)
+ end
+
+ it 'destroys job artifact' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
+ end
+ end
+
+ context 'when artifact is locked' do
+ before do
+ artifact.update!(locked: true)
+ end
+
+ it 'does not destroy job artifact' do
+ expect { subject }.not_to change { Ci::JobArtifact.count }
+ end
+ end
end
context 'when artifact is not expired' do
@@ -72,7 +90,7 @@ describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared_state
stub_const('Ci::DestroyExpiredJobArtifactsService::BATCH_SIZE', 1)
end
- let!(:artifact) { create_list(:ci_job_artifact, 2, expire_at: 1.day.ago) }
+ let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
it 'raises an error and does not continue destroying' do
is_expected.to be_falsy
@@ -96,7 +114,7 @@ describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared_state
stub_const('Ci::DestroyExpiredJobArtifactsService::BATCH_SIZE', 1)
end
- let!(:artifact) { create_list(:ci_job_artifact, 2, expire_at: 1.day.ago) }
+ let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
it 'destroys all expired artifacts' do
expect { subject }.to change { Ci::JobArtifact.count }.by(-2)
diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb
index acd14005c69..d5ffe4d4536 100644
--- a/spec/services/git/branch_push_service_spec.rb
+++ b/spec/services/git/branch_push_service_spec.rb
@@ -291,7 +291,7 @@ describe Git::BranchPushService, services: true do
execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
end
- it "defaults to the pushing user if the commit's author is not known", :sidekiq_might_not_need_inline do
+ it "defaults to the pushing user if the commit's author is not known", :sidekiq_inline, :use_clean_rails_redis_caching do
allow(commit).to receive_messages(
author_name: 'unknown name',
author_email: 'unknown@email.com'
@@ -336,7 +336,7 @@ describe Git::BranchPushService, services: true do
end
context "while saving the 'first_mentioned_in_commit_at' metric for an issue" do
- it 'sets the metric for referenced issues', :sidekiq_might_not_need_inline do
+ it 'sets the metric for referenced issues', :sidekiq_inline, :use_clean_rails_redis_caching do
execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
expect(issue.reload.metrics.first_mentioned_in_commit_at).to be_like_time(commit_time)
@@ -397,7 +397,7 @@ describe Git::BranchPushService, services: true do
allow(project).to receive(:default_branch).and_return('not-master')
end
- it "creates cross-reference notes", :sidekiq_might_not_need_inline do
+ it "creates cross-reference notes", :sidekiq_inline, :use_clean_rails_redis_caching do
expect(SystemNoteService).to receive(:cross_reference).with(issue, closing_commit, commit_author)
execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
end
@@ -438,7 +438,7 @@ describe Git::BranchPushService, services: true do
context "mentioning an issue" do
let(:message) { "this is some work.\n\nrelated to JIRA-1" }
- it "initiates one api call to jira server to mention the issue", :sidekiq_might_not_need_inline do
+ it "initiates one api call to jira server to mention the issue", :sidekiq_inline, :use_clean_rails_redis_caching do
execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
expect(WebMock).to have_requested(:post, jira_api_comment_url('JIRA-1')).with(
diff --git a/spec/workers/process_commit_worker_spec.rb b/spec/workers/process_commit_worker_spec.rb
index 21c300af7ac..d247668ac76 100644
--- a/spec/workers/process_commit_worker_spec.rb
+++ b/spec/workers/process_commit_worker_spec.rb
@@ -22,16 +22,26 @@ describe ProcessCommitWorker do
worker.perform(project.id, -1, commit.to_hash)
end
- it 'processes the commit message' do
- expect(worker).to receive(:process_commit_message).and_call_original
+ include_examples 'an idempotent worker' do
+ subject do
+ perform_multiple([project.id, user.id, commit.to_hash], worker: worker)
+ end
- worker.perform(project.id, user.id, commit.to_hash)
- end
+ it 'processes the commit message' do
+ expect(worker).to receive(:process_commit_message)
+ .exactly(IdempotentWorkerHelper::WORKER_EXEC_TIMES)
+ .and_call_original
- it 'updates the issue metrics' do
- expect(worker).to receive(:update_issue_metrics).and_call_original
+ subject
+ end
- worker.perform(project.id, user.id, commit.to_hash)
+ it 'updates the issue metrics' do
+ expect(worker).to receive(:update_issue_metrics)
+ .exactly(IdempotentWorkerHelper::WORKER_EXEC_TIMES)
+ .and_call_original
+
+ subject
+ end
end
end