Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'spec/lib/gitlab/github_import/parallel_scheduling_spec.rb')
-rw-r--r--spec/lib/gitlab/github_import/parallel_scheduling_spec.rb89
1 files changed, 32 insertions, 57 deletions
diff --git a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
index c351ead91eb..9de39a3ff7e 100644
--- a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
+++ b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
@@ -289,77 +289,52 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling, feature_category: :impo
.and_return({ title: 'One' }, { title: 'Two' }, { title: 'Three' })
end
- context 'with multiple objects' do
- before do
- stub_feature_flags(improved_spread_parallel_import: false)
-
- expect(importer).to receive(:each_object_to_import).and_yield(object).and_yield(object).and_yield(object)
- end
-
- it 'imports data in parallel batches with delays' do
- expect(worker_class).to receive(:bulk_perform_in)
- .with(1.second, [
- [project.id, { title: 'One' }, an_instance_of(String)],
- [project.id, { title: 'Two' }, an_instance_of(String)],
- [project.id, { title: 'Three' }, an_instance_of(String)]
- ], batch_size: batch_size, batch_delay: batch_delay)
-
- importer.parallel_import
- end
+ it 'imports data in parallel with delays respecting parallel_import_batch definition and return job waiter' do
+ allow(::Gitlab::JobWaiter).to receive(:generate_key).and_return('waiter-key')
+ allow(importer).to receive(:parallel_import_batch).and_return({ size: 2, delay: 1.minute })
+
+ expect(importer).to receive(:each_object_to_import)
+ .and_yield(object).and_yield(object).and_yield(object)
+ expect(worker_class).to receive(:perform_in)
+ .with(1.second, project.id, { title: 'One' }, 'waiter-key').ordered
+ expect(worker_class).to receive(:perform_in)
+ .with(1.second, project.id, { title: 'Two' }, 'waiter-key').ordered
+ expect(worker_class).to receive(:perform_in)
+ .with(1.minute + 1.second, project.id, { title: 'Three' }, 'waiter-key').ordered
+
+ job_waiter = importer.parallel_import
+
+ expect(job_waiter.key).to eq('waiter-key')
+ expect(job_waiter.jobs_remaining).to eq(3)
end
- context 'when the feature flag `improved_spread_parallel_import` is enabled' do
+ context 'when job restarts due to API rate limit or Sidekiq interruption' do
before do
- stub_feature_flags(improved_spread_parallel_import: true)
+ cache_key = format(described_class::JOB_WAITER_CACHE_KEY,
+ project: project.id, collection: importer.collection_method)
+ Gitlab::Cache::Import::Caching.write(cache_key, 'waiter-key')
+
+ cache_key = format(described_class::JOB_WAITER_REMAINING_CACHE_KEY,
+ project: project.id, collection: importer.collection_method)
+ Gitlab::Cache::Import::Caching.write(cache_key, 3)
end
- it 'imports data in parallel with delays respecting parallel_import_batch definition and return job waiter' do
- allow(::Gitlab::JobWaiter).to receive(:generate_key).and_return('waiter-key')
- allow(importer).to receive(:parallel_import_batch).and_return({ size: 2, delay: 1.minute })
+ it "restores job waiter's key and jobs_remaining" do
+ allow(importer).to receive(:parallel_import_batch).and_return({ size: 1, delay: 1.minute })
+
+ expect(importer).to receive(:each_object_to_import).and_yield(object).and_yield(object).and_yield(object)
- expect(importer).to receive(:each_object_to_import)
- .and_yield(object).and_yield(object).and_yield(object)
expect(worker_class).to receive(:perform_in)
.with(1.second, project.id, { title: 'One' }, 'waiter-key').ordered
expect(worker_class).to receive(:perform_in)
- .with(1.second, project.id, { title: 'Two' }, 'waiter-key').ordered
+ .with(1.minute + 1.second, project.id, { title: 'Two' }, 'waiter-key').ordered
expect(worker_class).to receive(:perform_in)
- .with(1.minute + 1.second, project.id, { title: 'Three' }, 'waiter-key').ordered
+ .with(2.minutes + 1.second, project.id, { title: 'Three' }, 'waiter-key').ordered
job_waiter = importer.parallel_import
expect(job_waiter.key).to eq('waiter-key')
- expect(job_waiter.jobs_remaining).to eq(3)
- end
-
- context 'when job restarts due to API rate limit or Sidekiq interruption' do
- before do
- cache_key = format(described_class::JOB_WAITER_CACHE_KEY,
- project: project.id, collection: importer.collection_method)
- Gitlab::Cache::Import::Caching.write(cache_key, 'waiter-key')
-
- cache_key = format(described_class::JOB_WAITER_REMAINING_CACHE_KEY,
- project: project.id, collection: importer.collection_method)
- Gitlab::Cache::Import::Caching.write(cache_key, 3)
- end
-
- it "restores job waiter's key and jobs_remaining" do
- allow(importer).to receive(:parallel_import_batch).and_return({ size: 1, delay: 1.minute })
-
- expect(importer).to receive(:each_object_to_import).and_yield(object).and_yield(object).and_yield(object)
-
- expect(worker_class).to receive(:perform_in)
- .with(1.second, project.id, { title: 'One' }, 'waiter-key').ordered
- expect(worker_class).to receive(:perform_in)
- .with(1.minute + 1.second, project.id, { title: 'Two' }, 'waiter-key').ordered
- expect(worker_class).to receive(:perform_in)
- .with(2.minutes + 1.second, project.id, { title: 'Three' }, 'waiter-key').ordered
-
- job_waiter = importer.parallel_import
-
- expect(job_waiter.key).to eq('waiter-key')
- expect(job_waiter.jobs_remaining).to eq(6)
- end
+ expect(job_waiter.jobs_remaining).to eq(6)
end
end
end