Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-06-03 18:09:26 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2022-06-03 18:09:26 +0300
commitf5f6cb45c73c8aa059c3006a3696014522a41a4b (patch)
treebde1e1c22c83276f49858e827909a1e13ef0f0c2 /spec
parentc74f702c747d1b14c3ddea951ceb7970941dc8f5 (diff)
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'spec')
-rw-r--r--spec/frontend/__helpers__/dl_locator_helper.js28
-rw-r--r--spec/frontend/members/components/table/members_table_spec.js22
-rw-r--r--spec/frontend/runner/components/runner_details_spec.js22
-rw-r--r--spec/frontend/vue_shared/components/form/input_copy_toggle_visibility_spec.js9
-rw-r--r--spec/graphql/resolvers/user_resolver_spec.rb49
-rw-r--r--spec/graphql/resolvers/users_resolver_spec.rb30
-rw-r--r--spec/graphql/types/project_type_spec.rb28
-rw-r--r--spec/graphql/types/user_type_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/reports/coverage_report_spec.rb (renamed from spec/lib/gitlab/ci/reports/coverage_reports_spec.rb)2
-rw-r--r--spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb27
-rw-r--r--spec/lib/gitlab/redis/duplicate_jobs_spec.rb68
-rw-r--r--spec/lib/gitlab/redis/multi_store_spec.rb901
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb19
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb649
-rw-r--r--spec/models/ci/build_spec.rb2
-rw-r--r--spec/models/concerns/pg_full_text_searchable_spec.rb11
-rw-r--r--spec/requests/api/graphql/user/starred_projects_query_spec.rb21
-rw-r--r--spec/support/helpers/test_env.rb2
-rw-r--r--spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb15
-rw-r--r--spec/support/shared_examples/lib/gitlab/redis/multi_store_feature_flags_shared_examples.rb51
-rw-r--r--spec/tooling/danger/project_helper_spec.rb10
22 files changed, 1590 insertions, 386 deletions
diff --git a/spec/frontend/__helpers__/dl_locator_helper.js b/spec/frontend/__helpers__/dl_locator_helper.js
new file mode 100644
index 00000000000..b507dcd599d
--- /dev/null
+++ b/spec/frontend/__helpers__/dl_locator_helper.js
@@ -0,0 +1,28 @@
+import { createWrapper, ErrorWrapper } from '@vue/test-utils';
+
+/**
+ * Find the definition (<dd>) that corresponds to this term (<dt>)
+ *
+ * Given html in the `wrapper`:
+ *
+ * <dl>
+ * <dt>My label</dt>
+ * <dd>Value</dd>
+ * </dl>
+ *
+ * findDd('My label', wrapper)
+ *
+ * Returns `<dd>Value</dd>`
+ *
+ * @param {object} wrapper - Parent wrapper
+ * @param {string} dtLabel - Label for this value
+ * @returns Wrapper
+ */
+export const findDd = (dtLabel, wrapper) => {
+ const dt = wrapper.findByText(dtLabel).element;
+ const dd = dt.nextElementSibling;
+ if (dt.tagName === 'DT' && dd.tagName === 'DD') {
+ return createWrapper(dd, {});
+ }
+ return ErrorWrapper(dtLabel);
+};
diff --git a/spec/frontend/members/components/table/members_table_spec.js b/spec/frontend/members/components/table/members_table_spec.js
index 7d58b822916..08baa663bf0 100644
--- a/spec/frontend/members/components/table/members_table_spec.js
+++ b/spec/frontend/members/components/table/members_table_spec.js
@@ -16,9 +16,9 @@ import {
MEMBER_STATE_CREATED,
MEMBER_STATE_AWAITING,
MEMBER_STATE_ACTIVE,
- USER_STATE_BLOCKED_PENDING_APPROVAL,
- BADGE_LABELS_AWAITING_USER_SIGNUP,
- BADGE_LABELS_PENDING_OWNER_ACTION,
+ USER_STATE_BLOCKED,
+ BADGE_LABELS_AWAITING_SIGNUP,
+ BADGE_LABELS_PENDING,
TAB_QUERY_PARAM_VALUES,
} from '~/members/constants';
import {
@@ -133,14 +133,14 @@ describe('MembersTable', () => {
describe('Invited column', () => {
describe.each`
- state | userState | expectedBadgeLabel
- ${MEMBER_STATE_CREATED} | ${null} | ${BADGE_LABELS_AWAITING_USER_SIGNUP}
- ${MEMBER_STATE_CREATED} | ${USER_STATE_BLOCKED_PENDING_APPROVAL} | ${BADGE_LABELS_PENDING_OWNER_ACTION}
- ${MEMBER_STATE_AWAITING} | ${''} | ${BADGE_LABELS_AWAITING_USER_SIGNUP}
- ${MEMBER_STATE_AWAITING} | ${USER_STATE_BLOCKED_PENDING_APPROVAL} | ${BADGE_LABELS_PENDING_OWNER_ACTION}
- ${MEMBER_STATE_AWAITING} | ${'something_else'} | ${BADGE_LABELS_PENDING_OWNER_ACTION}
- ${MEMBER_STATE_ACTIVE} | ${null} | ${''}
- ${MEMBER_STATE_ACTIVE} | ${'something_else'} | ${''}
+ state | userState | expectedBadgeLabel
+ ${MEMBER_STATE_CREATED} | ${null} | ${BADGE_LABELS_AWAITING_SIGNUP}
+ ${MEMBER_STATE_CREATED} | ${USER_STATE_BLOCKED} | ${BADGE_LABELS_PENDING}
+ ${MEMBER_STATE_AWAITING} | ${''} | ${BADGE_LABELS_AWAITING_SIGNUP}
+ ${MEMBER_STATE_AWAITING} | ${USER_STATE_BLOCKED} | ${BADGE_LABELS_PENDING}
+ ${MEMBER_STATE_AWAITING} | ${'something_else'} | ${BADGE_LABELS_PENDING}
+ ${MEMBER_STATE_ACTIVE} | ${null} | ${''}
+ ${MEMBER_STATE_ACTIVE} | ${'something_else'} | ${''}
`('Invited Badge', ({ state, userState, expectedBadgeLabel }) => {
it(`${
expectedBadgeLabel ? 'shows' : 'hides'
diff --git a/spec/frontend/runner/components/runner_details_spec.js b/spec/frontend/runner/components/runner_details_spec.js
index 162d21febfd..47847cead1d 100644
--- a/spec/frontend/runner/components/runner_details_spec.js
+++ b/spec/frontend/runner/components/runner_details_spec.js
@@ -1,8 +1,8 @@
import { GlSprintf, GlIntersperse, GlTab } from '@gitlab/ui';
-import { createWrapper, ErrorWrapper } from '@vue/test-utils';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
import { useFakeDate } from 'helpers/fake_date';
+import { findDd } from 'helpers/dl_locator_helper';
import { ACCESS_LEVEL_REF_PROTECTED, ACCESS_LEVEL_NOT_PROTECTED } from '~/runner/constants';
import RunnerDetails from '~/runner/components/runner_details.vue';
@@ -24,20 +24,6 @@ describe('RunnerDetails', () => {
useFakeDate(mockNow);
- /**
- * Find the definition (<dd>) that corresponds to this term (<dt>)
- * @param {string} dtLabel - Label for this value
- * @returns Wrapper
- */
- const findDd = (dtLabel) => {
- const dt = wrapper.findByText(dtLabel).element;
- const dd = dt.nextElementSibling;
- if (dt.tagName === 'DT' && dd.tagName === 'DD') {
- return createWrapper(dd, {});
- }
- return ErrorWrapper(dtLabel);
- };
-
const findDetailGroups = () => wrapper.findComponent(RunnerGroups);
const findRunnersJobs = () => wrapper.findComponent(RunnersJobs);
const findJobCountBadge = () => wrapper.findByTestId('job-count-badge');
@@ -108,7 +94,7 @@ describe('RunnerDetails', () => {
});
it(`displays expected value "${expectedValue}"`, () => {
- expect(findDd(field).text()).toBe(expectedValue);
+ expect(findDd(field, wrapper).text()).toBe(expectedValue);
});
});
@@ -123,7 +109,7 @@ describe('RunnerDetails', () => {
stubs,
});
- expect(findDd('Tags').text().replace(/\s+/g, ' ')).toBe('tag-1 tag-2');
+ expect(findDd('Tags', wrapper).text().replace(/\s+/g, ' ')).toBe('tag-1 tag-2');
});
it('displays "None" when runner has no tags', () => {
@@ -134,7 +120,7 @@ describe('RunnerDetails', () => {
stubs,
});
- expect(findDd('Tags').text().replace(/\s+/g, ' ')).toBe('None');
+ expect(findDd('Tags', wrapper).text().replace(/\s+/g, ' ')).toBe('None');
});
});
diff --git a/spec/frontend/vue_shared/components/form/input_copy_toggle_visibility_spec.js b/spec/frontend/vue_shared/components/form/input_copy_toggle_visibility_spec.js
index 6d176e1bf6a..11abde04361 100644
--- a/spec/frontend/vue_shared/components/form/input_copy_toggle_visibility_spec.js
+++ b/spec/frontend/vue_shared/components/form/input_copy_toggle_visibility_spec.js
@@ -77,6 +77,15 @@ describe('InputCopyToggleVisibility', () => {
expect(event.preventDefault).toHaveBeenCalled();
});
+ it('emits `copy` event when manually copied the token', () => {
+ expect(wrapper.emitted('copy')).toBeUndefined();
+
+ findFormInput().element.dispatchEvent(createCopyEvent());
+
+ expect(wrapper.emitted('copy')).toHaveLength(1);
+ expect(wrapper.emitted('copy')[0]).toEqual([]);
+ });
+
describe('visibility toggle button', () => {
it('renders a reveal button', () => {
const revealButton = findRevealButton();
diff --git a/spec/graphql/resolvers/user_resolver_spec.rb b/spec/graphql/resolvers/user_resolver_spec.rb
index 446d765d3ee..32a9b177629 100644
--- a/spec/graphql/resolvers/user_resolver_spec.rb
+++ b/spec/graphql/resolvers/user_resolver_spec.rb
@@ -6,8 +6,41 @@ RSpec.describe Resolvers::UserResolver do
include GraphqlHelpers
describe '#resolve' do
+ let_it_be(:current_user) { nil }
let_it_be(:user) { create(:user) }
+ shared_examples 'queries user' do
+ context 'authenticated access' do
+ let_it_be(:current_user) { create(:user) }
+
+ it 'returns the correct user' do
+ expect(
+ resolve_user(args)
+ ).to eq(user)
+ end
+ end
+
+ context 'unauthenticated access' do
+ it 'forbids search' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ resolve_user(args)
+ end
+ end
+
+ context 'require_auth_for_graphql_user_resolver feature flag is disabled' do
+ before do
+ stub_feature_flags(require_auth_for_graphql_user_resolver: false)
+ end
+
+ it 'returns the correct user' do
+ expect(
+ resolve_user(args)
+ ).to eq(user)
+ end
+ end
+ end
+ end
+
context 'when neither an ID or a username is provided' do
it 'generates an ArgumentError' do
expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError) do
@@ -23,25 +56,21 @@ RSpec.describe Resolvers::UserResolver do
end
context 'by username' do
- it 'returns the correct user' do
- expect(
- resolve_user(username: user.username)
- ).to eq(user)
+ include_examples "queries user" do
+ let(:args) { { username: user.username } }
end
end
context 'by ID' do
- it 'returns the correct user' do
- expect(
- resolve_user(id: user.to_global_id)
- ).to eq(user)
+ include_examples "queries user" do
+ let(:args) { { id: user.to_global_id } }
end
end
end
private
- def resolve_user(args = {})
- sync(resolve(described_class, args: args))
+ def resolve_user(args = {}, context = { current_user: current_user })
+ sync(resolve(described_class, args: args, ctx: context))
end
end
diff --git a/spec/graphql/resolvers/users_resolver_spec.rb b/spec/graphql/resolvers/users_resolver_spec.rb
index 1ba296912a3..5f7a096a14b 100644
--- a/spec/graphql/resolvers/users_resolver_spec.rb
+++ b/spec/graphql/resolvers/users_resolver_spec.rb
@@ -14,14 +14,6 @@ RSpec.describe Resolvers::UsersResolver do
end
describe '#resolve' do
- it 'generates an error when read_users_list is not authorized' do
- expect(Ability).to receive(:allowed?).with(current_user, :read_users_list).and_return(false)
-
- expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
- resolve_users
- end
- end
-
context 'when no arguments are passed' do
it 'returns all users' do
expect(resolve_users).to contain_exactly(user1, user2, current_user)
@@ -79,8 +71,26 @@ RSpec.describe Resolvers::UsersResolver do
end
end
- it 'allows to search by username' do
- expect(resolve_users(args: { usernames: [user1.username] })).to contain_exactly(user1)
+ it 'prohibits search by username' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ resolve_users(args: { usernames: [user1.username] })
+ end
+ end
+
+ context 'require_auth_for_graphql_user_resolver feature flag is disabled' do
+ before do
+ stub_feature_flags(require_auth_for_graphql_user_resolver: false)
+ end
+
+ it 'prohibits search without usernames passed' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ resolve_users
+ end
+ end
+
+ it 'allows to search by username' do
+ expect(resolve_users(args: { usernames: [user1.username] })).to contain_exactly(user1)
+ end
end
end
end
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index a08bd717c72..23deef73734 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -42,6 +42,34 @@ RSpec.describe GitlabSchema.types['Project'] do
expect(described_class).to include_graphql_fields(*expected_fields)
end
+ describe 'count' do
+ let_it_be(:user) { create(:user) }
+
+ let(:query) do
+ %(
+ query {
+ projects {
+ count
+ edges {
+ node {
+ id
+ }
+ }
+ }
+ }
+ )
+ end
+
+ subject { GitlabSchema.execute(query, context: { current_user: user }).as_json }
+
+ it 'returns valid projects count' do
+ create(:project, namespace: user.namespace)
+ create(:project, namespace: user.namespace)
+
+ expect(subject.dig('data', 'projects', 'count')).to eq(2)
+ end
+ end
+
describe 'container_registry_enabled' do
let_it_be(:project, reload: true) { create(:project, :public) }
let_it_be(:user) { create(:user) }
diff --git a/spec/graphql/types/user_type_spec.rb b/spec/graphql/types/user_type_spec.rb
index c913a4c3662..fec6a771640 100644
--- a/spec/graphql/types/user_type_spec.rb
+++ b/spec/graphql/types/user_type_spec.rb
@@ -91,8 +91,8 @@ RSpec.describe GitlabSchema.types['User'] do
context 'when requester is nil' do
let(:current_user) { nil }
- it 'returns `****`' do
- expect(user_name).to eq('****')
+ it 'returns nothing' do
+ expect(user_name).to be_nil
end
end
@@ -134,8 +134,8 @@ RSpec.describe GitlabSchema.types['User'] do
context 'when requester is nil' do
let(:current_user) { nil }
- it 'returns `****`' do
- expect(user_name).to eq('****')
+ it 'returns nothing' do
+ expect(user_name).to be_nil
end
end
diff --git a/spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb b/spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb
index 0580cb9922b..a9851d78f48 100644
--- a/spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Ci::Parsers::Coverage::SaxDocument do
subject(:parse_report) { Nokogiri::XML::SAX::Parser.new(described_class.new(coverage_report, project_path, paths)).parse(cobertura) }
describe '#parse!' do
- let(:coverage_report) { Gitlab::Ci::Reports::CoverageReports.new }
+ let(:coverage_report) { Gitlab::Ci::Reports::CoverageReport.new }
let(:project_path) { 'foo/bar' }
let(:paths) { ['app/user.rb'] }
diff --git a/spec/lib/gitlab/ci/reports/coverage_reports_spec.rb b/spec/lib/gitlab/ci/reports/coverage_report_spec.rb
index 41ebae863ee..383bc5434ee 100644
--- a/spec/lib/gitlab/ci/reports/coverage_reports_spec.rb
+++ b/spec/lib/gitlab/ci/reports/coverage_report_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Reports::CoverageReports do
+RSpec.describe Gitlab::Ci::Reports::CoverageReport do
let(:coverage_report) { described_class.new }
it { expect(coverage_report.files).to eq({}) }
diff --git a/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb b/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb
index 1b74e24bf81..b617da6b157 100644
--- a/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb
+++ b/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb
@@ -133,7 +133,7 @@ RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFile do
end
context 'assigns the correct position' do
- it 'computes de first line where the remove would appear' do
+ it 'computes the first line where the remove would appear' do
expect(nb_file.highlighted_diff_lines[0].old_pos).to eq(3)
expect(nb_file.highlighted_diff_lines[0].new_pos).to eq(3)
@@ -142,8 +142,29 @@ RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFile do
end
end
- it 'computes de first line where the remove would appear' do
- expect(nb_file.highlighted_diff_lines.map(&:text).join('')).to include('[Hidden Image Output]')
+ context 'has image' do
+ it 'replaces rich text with img to the embedded image' do
+ expect(nb_file.highlighted_diff_lines[58].rich_text).to include('<img')
+ end
+
+ it 'adds image to src' do
+ img = 'data:image/png;base64,some_image_here'
+ allow(diff).to receive(:diff).and_return("@@ -1,76 +1,74 @@\n ![](#{img})")
+
+ expect(nb_file.highlighted_diff_lines[0].rich_text).to include("<img src=\"#{img}\"")
+ end
+ end
+
+ context 'when embedded image has injected html' do
+ let(:commit) { project.commit("4963fefc990451a8ad34289ce266b757456fc88c") }
+
+ it 'prevents injected html to be rendered as html' do
+ expect(nb_file.highlighted_diff_lines[45].rich_text).not_to include('<div>Hello')
+ end
+
+ it 'keeps the injected html as part of the string' do
+ expect(nb_file.highlighted_diff_lines[45].rich_text).to end_with('/div&gt;">')
+ end
end
end
end
diff --git a/spec/lib/gitlab/redis/duplicate_jobs_spec.rb b/spec/lib/gitlab/redis/duplicate_jobs_spec.rb
new file mode 100644
index 00000000000..33f7391a836
--- /dev/null
+++ b/spec/lib/gitlab/redis/duplicate_jobs_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::DuplicateJobs do
+ # Note: this is a pseudo-store in front of `SharedState`, meant only as a tool
+ # to move away from `Sidekiq.redis` for duplicate job data. Thus, we use the
+ # same store configuration as the former.
+ let(:instance_specific_config_file) { "config/redis.shared_state.yml" }
+ let(:environment_config_file_name) { "GITLAB_REDIS_SHARED_STATE_CONFIG_FILE" }
+
+ include_examples "redis_shared_examples"
+
+ describe '#pool' do
+ let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" }
+ let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" }
+
+ subject { described_class.pool }
+
+ before do
+ redis_clear_raw_config!(Gitlab::Redis::SharedState)
+ redis_clear_raw_config!(Gitlab::Redis::Queues)
+
+ allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_host)
+ allow(Gitlab::Redis::Queues).to receive(:config_file_name).and_return(config_new_format_socket)
+ end
+
+ after do
+ redis_clear_raw_config!(Gitlab::Redis::SharedState)
+ redis_clear_raw_config!(Gitlab::Redis::Queues)
+ end
+
+ around do |example|
+ clear_pool
+ example.run
+ ensure
+ clear_pool
+ end
+
+ it 'instantiates an instance of MultiStore' do
+ subject.with do |redis_instance|
+ expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
+
+ expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99")
+ expect(redis_instance.secondary_store.connection[:id]).to eq("redis:///path/to/redis.sock/0")
+
+ expect(redis_instance.instance_name).to eq('DuplicateJobs')
+ end
+ end
+
+ it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_duplicate_jobs,
+ :use_primary_store_as_default_for_duplicate_jobs
+ end
+
+ describe '#raw_config_hash' do
+ it 'has a legacy default URL' do
+ expect(subject).to receive(:fetch_config) { false }
+
+ expect(subject.send(:raw_config_hash)).to eq(url: 'redis://localhost:6382')
+ end
+ end
+
+ describe '#store_name' do
+ it 'returns the name of the SharedState store' do
+ expect(described_class.store_name).to eq('SharedState')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/redis/multi_store_spec.rb b/spec/lib/gitlab/redis/multi_store_spec.rb
new file mode 100644
index 00000000000..70f28b38082
--- /dev/null
+++ b/spec/lib/gitlab/redis/multi_store_spec.rb
@@ -0,0 +1,901 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::MultiStore do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:redis_store_class) do
+ Class.new(Gitlab::Redis::Wrapper) do
+ def config_file_name
+ config_file_name = "spec/fixtures/config/redis_new_format_host.yml"
+ Rails.root.join(config_file_name).to_s
+ end
+
+ def self.name
+ 'Sessions'
+ end
+ end
+ end
+
+ let_it_be(:primary_db) { 1 }
+ let_it_be(:secondary_db) { 2 }
+ let_it_be(:primary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
+ let_it_be(:secondary_store) { create_redis_store(redis_store_class.params, db: secondary_db, serializer: nil) }
+ let_it_be(:instance_name) { 'TestStore' }
+ let_it_be(:multi_store) { described_class.new(primary_store, secondary_store, instance_name)}
+
+ subject { multi_store.send(name, *args) }
+
+ before do
+ skip_feature_flags_yaml_validation
+ skip_default_enabled_yaml_check
+ end
+
+ after(:all) do
+ primary_store.flushdb
+ secondary_store.flushdb
+ end
+
+ context 'when primary_store is nil' do
+ let(:multi_store) { described_class.new(nil, secondary_store, instance_name)}
+
+ it 'fails with exception' do
+ expect { multi_store }.to raise_error(ArgumentError, /primary_store is required/)
+ end
+ end
+
+ context 'when secondary_store is nil' do
+ let(:multi_store) { described_class.new(primary_store, nil, instance_name)}
+
+ it 'fails with exception' do
+ expect { multi_store }.to raise_error(ArgumentError, /secondary_store is required/)
+ end
+ end
+
+ context 'when instance_name is nil' do
+ let(:instance_name) { nil }
+ let(:multi_store) { described_class.new(primary_store, secondary_store, instance_name)}
+
+ it 'fails with exception' do
+ expect { multi_store }.to raise_error(ArgumentError, /instance_name is required/)
+ end
+ end
+
+ context 'when primary_store is not a ::Redis instance' do
+ before do
+ allow(primary_store).to receive(:is_a?).with(::Redis).and_return(false)
+ end
+
+ it 'fails with exception' do
+ expect { described_class.new(primary_store, secondary_store, instance_name) }
+ .to raise_error(ArgumentError, /invalid primary_store/)
+ end
+ end
+
+ context 'when secondary_store is not a ::Redis instance' do
+ before do
+ allow(secondary_store).to receive(:is_a?).with(::Redis).and_return(false)
+ end
+
+ it 'fails with exception' do
+ expect { described_class.new(primary_store, secondary_store, instance_name) }
+ .to raise_error(ArgumentError, /invalid secondary_store/)
+ end
+ end
+
+ context 'with READ redis commands' do
+ let_it_be(:key1) { "redis:{1}:key_a" }
+ let_it_be(:key2) { "redis:{1}:key_b" }
+ let_it_be(:value1) { "redis_value1"}
+ let_it_be(:value2) { "redis_value2"}
+ let_it_be(:skey) { "redis:set:key" }
+ let_it_be(:keys) { [key1, key2] }
+ let_it_be(:values) { [value1, value2] }
+ let_it_be(:svalues) { [value2, value1] }
+
+ where(:case_name, :name, :args, :value, :block) do
+ 'execute :get command' | :get | ref(:key1) | ref(:value1) | nil
+ 'execute :mget command' | :mget | ref(:keys) | ref(:values) | nil
+ 'execute :mget with block' | :mget | ref(:keys) | ref(:values) | ->(value) { value }
+ 'execute :smembers command' | :smembers | ref(:skey) | ref(:svalues) | nil
+ 'execute :scard command' | :scard | ref(:skey) | 2 | nil
+ end
+
+ before(:all) do
+ primary_store.multi do |multi|
+ multi.set(key1, value1)
+ multi.set(key2, value2)
+ multi.sadd(skey, value1)
+ multi.sadd(skey, value2)
+ end
+
+ secondary_store.multi do |multi|
+ multi.set(key1, value1)
+ multi.set(key2, value2)
+ multi.sadd(skey, value1)
+ multi.sadd(skey, value2)
+ end
+ end
+
+ RSpec.shared_examples_for 'reads correct value' do
+ it 'returns the correct value' do
+ if value.is_a?(Array)
+ # :smembers does not guarantee the order it will return the values (unsorted set)
+ is_expected.to match_array(value)
+ else
+ is_expected.to eq(value)
+ end
+ end
+ end
+
+ RSpec.shared_examples_for 'fallback read from the secondary store' do
+ let(:counter) { Gitlab::Metrics::NullMetric.instance }
+
+ before do
+ allow(Gitlab::Metrics).to receive(:counter).and_return(counter)
+ end
+
+ it 'fallback and execute on secondary instance' do
+ expect(secondary_store).to receive(name).with(*args).and_call_original
+
+ subject
+ end
+
+ it 'logs the ReadFromPrimaryError' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ an_instance_of(Gitlab::Redis::MultiStore::ReadFromPrimaryError),
+ hash_including(command_name: name, extra: hash_including(instance_name: instance_name))
+ )
+
+ subject
+ end
+
+ it 'increment read fallback count metrics' do
+ expect(counter).to receive(:increment).with(command: name, instance_name: instance_name)
+
+ subject
+ end
+
+ include_examples 'reads correct value'
+
+ context 'when fallback read from the secondary instance raises an exception' do
+ before do
+ allow(secondary_store).to receive(name).with(*args).and_raise(StandardError)
+ end
+
+ it 'fails with exception' do
+ expect { subject }.to raise_error(StandardError)
+ end
+ end
+ end
+
+ RSpec.shared_examples_for 'secondary store' do
+ it 'execute on the secondary instance' do
+ expect(secondary_store).to receive(name).with(*args).and_call_original
+
+ subject
+ end
+
+ include_examples 'reads correct value'
+
+ it 'does not execute on the primary store' do
+ expect(primary_store).not_to receive(name)
+
+ subject
+ end
+ end
+
+ with_them do
+ describe "#{name}" do
+ before do
+ allow(primary_store).to receive(name).and_call_original
+ allow(secondary_store).to receive(name).and_call_original
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
+ end
+
+ context 'when reading from the primary is successful' do
+ it 'returns the correct value' do
+ expect(primary_store).to receive(name).with(*args).and_call_original
+
+ subject
+ end
+
+ it 'does not execute on the secondary store' do
+ expect(secondary_store).not_to receive(name)
+
+ subject
+ end
+
+ include_examples 'reads correct value'
+ end
+
+ context 'when reading from primary instance is raising an exception' do
+ before do
+ allow(primary_store).to receive(name).with(*args).and_raise(StandardError)
+ allow(Gitlab::ErrorTracking).to receive(:log_exception)
+ end
+
+ it 'logs the exception' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
+ hash_including(extra: hash_including(:multi_store_error_message, instance_name: instance_name),
+ command_name: name))
+
+ subject
+ end
+
+ include_examples 'fallback read from the secondary store'
+ end
+
+ context 'when reading from primary instance return no value' do
+ before do
+ allow(primary_store).to receive(name).and_return(nil)
+ end
+
+ include_examples 'fallback read from the secondary store'
+ end
+
+ context 'when the command is executed within pipelined block' do
+ subject do
+ multi_store.pipelined do
+ multi_store.send(name, *args)
+ end
+ end
+
+ it 'is executed only 1 time on primary instance' do
+ expect(primary_store).to receive(name).with(*args).once
+
+ subject
+ end
+ end
+
+ if params[:block]
+ subject do
+ multi_store.send(name, *args, &block)
+ end
+
+ context 'when block is provided' do
+ it 'yields to the block' do
+ expect(primary_store).to receive(name).and_yield(value)
+
+ subject
+ end
+
+ include_examples 'reads correct value'
+ end
+ end
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it_behaves_like 'secondary store'
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it 'execute on the primary instance' do
+ expect(primary_store).to receive(name).with(*args).and_call_original
+
+ subject
+ end
+
+ include_examples 'reads correct value'
+
+ it 'does not execute on the secondary store' do
+ expect(secondary_store).not_to receive(name)
+
+ subject
+ end
+ end
+ end
+
+ context 'with both primary and secondary store using same redis instance' do
+ let(:primary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
+ let(:secondary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
+ let(:multi_store) { described_class.new(primary_store, secondary_store, instance_name)}
+
+ it_behaves_like 'secondary store'
+ end
+ end
+ end
+ end
+
+ RSpec.shared_examples_for 'verify that store contains values' do |store|
+ it "#{store} redis store contains correct values", :aggregate_errors do
+ subject
+
+ redis_store = multi_store.send(store)
+
+ if expected_value.is_a?(Array)
+ # :smembers does not guarantee the order it will return the values
+ expect(redis_store.send(verification_name, *verification_args)).to match_array(expected_value)
+ else
+ expect(redis_store.send(verification_name, *verification_args)).to eq(expected_value)
+ end
+ end
+ end
+
+ context 'with WRITE redis commands' do
+ let_it_be(:key1) { "redis:{1}:key_a" }
+ let_it_be(:key2) { "redis:{1}:key_b" }
+ let_it_be(:value1) { "redis_value1"}
+ let_it_be(:value2) { "redis_value2"}
+ let_it_be(:key1_value1) { [key1, value1] }
+ let_it_be(:key1_value2) { [key1, value2] }
+ let_it_be(:ttl) { 10 }
+ let_it_be(:key1_ttl_value1) { [key1, ttl, value1] }
+ let_it_be(:skey) { "redis:set:key" }
+ let_it_be(:svalues1) { [value2, value1] }
+ let_it_be(:svalues2) { [value1] }
+ let_it_be(:skey_value1) { [skey, value1] }
+ let_it_be(:skey_value2) { [skey, value2] }
+
+ where(:case_name, :name, :args, :expected_value, :verification_name, :verification_args) do
+ 'execute :set command' | :set | ref(:key1_value1) | ref(:value1) | :get | ref(:key1)
+ 'execute :setnx command' | :setnx | ref(:key1_value2) | ref(:value1) | :get | ref(:key2)
+ 'execute :setex command' | :setex | ref(:key1_ttl_value1) | ref(:ttl) | :ttl | ref(:key1)
+ 'execute :sadd command' | :sadd | ref(:skey_value2) | ref(:svalues1) | :smembers | ref(:skey)
+ 'execute :srem command' | :srem | ref(:skey_value1) | [] | :smembers | ref(:skey)
+ 'execute :del command' | :del | ref(:key2) | nil | :get | ref(:key2)
+ 'execute :flushdb command' | :flushdb | nil | 0 | :dbsize | nil
+ end
+
+ before do
+ primary_store.flushdb
+ secondary_store.flushdb
+
+ primary_store.multi do |multi|
+ multi.set(key2, value1)
+ multi.sadd(skey, value1)
+ end
+
+ secondary_store.multi do |multi|
+ multi.set(key2, value1)
+ multi.sadd(skey, value1)
+ end
+ end
+
+ with_them do
+ describe "#{name}" do
+ let(:expected_args) {args || no_args }
+
+ before do
+ allow(primary_store).to receive(name).and_call_original
+ allow(secondary_store).to receive(name).and_call_original
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
+ end
+
+ context 'when executing on primary instance is successful' do
+ it 'executes on both primary and secondary redis store', :aggregate_errors do
+ expect(primary_store).to receive(name).with(*expected_args).and_call_original
+ expect(secondary_store).to receive(name).with(*expected_args).and_call_original
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :primary_store
+ include_examples 'verify that store contains values', :secondary_store
+ end
+
+ context 'when executing on the primary instance is raising an exception' do
+ before do
+ allow(primary_store).to receive(name).with(*expected_args).and_raise(StandardError)
+ allow(Gitlab::ErrorTracking).to receive(:log_exception)
+ end
+
+ it 'logs the exception and execute on secondary instance', :aggregate_errors do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
+ hash_including(extra: hash_including(:multi_store_error_message), command_name: name))
+ expect(secondary_store).to receive(name).with(*expected_args).and_call_original
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :secondary_store
+ end
+
+ context 'when the command is executed within pipelined block' do
+ subject do
+ multi_store.pipelined do
+ multi_store.send(name, *args)
+ end
+ end
+
+ it 'is executed only 1 time on each instance', :aggregate_errors do
+ expect(primary_store).to receive(name).with(*expected_args).once
+ expect(secondary_store).to receive(name).with(*expected_args).once
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :primary_store
+ include_examples 'verify that store contains values', :secondary_store
+ end
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'executes only on the secondary redis store', :aggregate_errors do
+ expect(secondary_store).to receive(name).with(*expected_args)
+ expect(primary_store).not_to receive(name).with(*expected_args)
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :secondary_store
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it 'executes only on the primary_redis redis store', :aggregate_errors do
+ expect(primary_store).to receive(name).with(*expected_args)
+ expect(secondary_store).not_to receive(name).with(*expected_args)
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :primary_store
+ end
+ end
+ end
+ end
+ end
+
+ RSpec.shared_examples_for 'pipelined command' do |name|
+ let_it_be(:key1) { "redis:{1}:key_a" }
+ let_it_be(:value1) { "redis_value1"}
+ let_it_be(:value2) { "redis_value2"}
+ let_it_be(:expected_value) { value1 }
+ let_it_be(:verification_name) { :get }
+ let_it_be(:verification_args) { key1 }
+
+ before do
+ primary_store.flushdb
+ secondary_store.flushdb
+ end
+
+ describe "command execution in a transaction" do
+ let(:counter) { Gitlab::Metrics::NullMetric.instance }
+
+ before do
+ allow(Gitlab::Metrics).to receive(:counter).with(
+ :gitlab_redis_multi_store_pipelined_diff_error_total,
+ 'Redis MultiStore pipelined command diff between stores'
+ ).and_return(counter)
+ end
+
+ subject do
+ multi_store.send(name) do |redis|
+ redis.set(key1, value1)
+ end
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
+ end
+
+ context 'when executing on primary instance is successful' do
+ it 'executes on both primary and secondary redis store', :aggregate_errors do
+ expect(primary_store).to receive(name).and_call_original
+ expect(secondary_store).to receive(name).and_call_original
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :primary_store
+ include_examples 'verify that store contains values', :secondary_store
+ end
+
+ context 'when executing on the primary instance is raising an exception' do
+ before do
+ allow(primary_store).to receive(name).and_raise(StandardError)
+ allow(Gitlab::ErrorTracking).to receive(:log_exception)
+ end
+
+ it 'logs the exception and execute on secondary instance', :aggregate_errors do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
+ hash_including(extra: hash_including(:multi_store_error_message), command_name: name))
+ expect(secondary_store).to receive(name).and_call_original
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :secondary_store
+ end
+
+ describe 'return values from a transaction' do
+ subject do
+ multi_store.send(name) do |redis|
+ redis.get(key1)
+ end
+ end
+
+ context 'when the value exists on both and are equal' do
+ before do
+ primary_store.set(key1, value1)
+ secondary_store.set(key1, value1)
+ end
+
+ it 'returns the value' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+
+ expect(subject).to eq([value1])
+ end
+ end
+
+ context 'when the value exists on both but differ' do
+ before do
+ primary_store.set(key1, value1)
+ secondary_store.set(key1, value2)
+ end
+
+ it 'returns the value from the secondary store, logging an error' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ an_instance_of(Gitlab::Redis::MultiStore::PipelinedDiffError),
+ hash_including(command_name: name, extra: hash_including(instance_name: instance_name))
+ ).and_call_original
+ expect(counter).to receive(:increment).with(command: name, instance_name: instance_name)
+
+ expect(subject).to eq([value2])
+ end
+ end
+
+ context 'when the value does not exist on the primary but it does on the secondary' do
+ before do
+ secondary_store.set(key1, value2)
+ end
+
+ it 'returns the value from the secondary store, logging an error' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ an_instance_of(Gitlab::Redis::MultiStore::PipelinedDiffError),
+ hash_including(command_name: name, extra: hash_including(instance_name: instance_name))
+ )
+ expect(counter).to receive(:increment).with(command: name, instance_name: instance_name)
+
+ expect(subject).to eq([value2])
+ end
+ end
+
+ context 'when the value does not exist in either' do
+ it 'returns nil without logging an error' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+ expect(counter).not_to receive(:increment)
+
+ expect(subject).to eq([nil])
+ end
+ end
+ end
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'executes only on the secondary redis store', :aggregate_errors do
+ expect(secondary_store).to receive(name)
+ expect(primary_store).not_to receive(name)
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :secondary_store
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it 'executes only on the primary_redis redis store', :aggregate_errors do
+ expect(primary_store).to receive(name)
+ expect(secondary_store).not_to receive(name)
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :primary_store
+ end
+ end
+ end
+ end
+
+ describe '#multi' do
+ include_examples 'pipelined command', :multi
+ end
+
+ describe '#pipelined' do
+ include_examples 'pipelined command', :pipelined
+ end
+
+ context 'with unsupported command' do
+ let(:counter) { Gitlab::Metrics::NullMetric.instance }
+
+ before do
+ primary_store.flushdb
+ secondary_store.flushdb
+ allow(Gitlab::Metrics).to receive(:counter).and_return(counter)
+ end
+
+ let_it_be(:key) { "redis:counter" }
+
+ subject { multi_store.incr(key) }
+
+ it 'responds to missing method' do
+ expect(multi_store).to receive(:respond_to_missing?).and_call_original
+
+ expect(multi_store.respond_to?(:incr)).to be(true)
+ end
+
+ it 'executes method missing' do
+ expect(multi_store).to receive(:method_missing)
+
+ subject
+ end
+
+ context 'when command is not in SKIP_LOG_METHOD_MISSING_FOR_COMMANDS' do
+ it 'logs MethodMissingError' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ an_instance_of(Gitlab::Redis::MultiStore::MethodMissingError),
+ hash_including(command_name: :incr, extra: hash_including(instance_name: instance_name))
+ )
+
+ subject
+ end
+
+ it 'increments method missing counter' do
+ expect(counter).to receive(:increment).with(command: :incr, instance_name: instance_name)
+
+ subject
+ end
+ end
+
+ context 'when command is in SKIP_LOG_METHOD_MISSING_FOR_COMMANDS' do
+ subject { multi_store.info }
+
+ it 'does not log MethodMissingError' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+
+ subject
+ end
+
+ it 'does not increment method missing counter' do
+ expect(counter).not_to receive(:increment)
+
+ subject
+ end
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it 'fallback and executes only on the secondary store', :aggregate_errors do
+ expect(primary_store).to receive(:incr).with(key).and_call_original
+ expect(secondary_store).not_to receive(:incr)
+
+ subject
+ end
+
+ it 'correct value is stored on the secondary store', :aggregate_errors do
+ subject
+
+ expect(secondary_store.get(key)).to be_nil
+ expect(primary_store.get(key)).to eq('1')
+ end
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'fallback and executes only on the secondary store', :aggregate_errors do
+ expect(secondary_store).to receive(:incr).with(key).and_call_original
+ expect(primary_store).not_to receive(:incr)
+
+ subject
+ end
+
+ it 'correct value is stored on the secondary store', :aggregate_errors do
+ subject
+
+ expect(primary_store.get(key)).to be_nil
+ expect(secondary_store.get(key)).to eq('1')
+ end
+ end
+
+ context 'when the command is executed within pipelined block' do
+ subject do
+ multi_store.pipelined do
+ multi_store.incr(key)
+ end
+ end
+
+ it 'is executed only 1 time on each instance', :aggregate_errors do
+ expect(primary_store).to receive(:incr).with(key).once
+ expect(secondary_store).to receive(:incr).with(key).once
+
+ subject
+ end
+
+ it "both redis stores are containing correct values", :aggregate_errors do
+ subject
+
+ expect(primary_store.get(key)).to eq('1')
+ expect(secondary_store.get(key)).to eq('1')
+ end
+ end
+ end
+
+ describe '#to_s' do
+ subject { multi_store.to_s }
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
+ end
+
+ it 'returns same value as primary_store' do
+ is_expected.to eq(primary_store.to_s)
+ end
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it 'returns same value as primary_store' do
+ is_expected.to eq(primary_store.to_s)
+ end
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'returns same value as primary_store' do
+ is_expected.to eq(secondary_store.to_s)
+ end
+ end
+ end
+ end
+
+ describe '#is_a?' do
+ it 'returns true for ::Redis::Store' do
+ expect(multi_store.is_a?(::Redis::Store)).to be true
+ end
+ end
+
+ describe '#use_primary_and_secondary_stores?' do
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_and_secondary_stores?).to be true
+ end
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_and_secondary_stores?).to be false
+ end
+ end
+
+ context 'with empty DB' do
+ before do
+ allow(Feature::FlipperFeature).to receive(:table_exists?).and_return(false)
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_and_secondary_stores?).to be false
+ end
+ end
+
+ context 'when FF table guard raises' do
+ before do
+ allow(Feature::FlipperFeature).to receive(:table_exists?).and_raise
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_and_secondary_stores?).to be false
+ end
+ end
+ end
+
+ describe '#use_primary_store_as_default?' do
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_store_as_default?).to be true
+ end
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_store_as_default?).to be false
+ end
+ end
+
+ context 'with empty DB' do
+ before do
+ allow(Feature::FlipperFeature).to receive(:table_exists?).and_return(false)
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_and_secondary_stores?).to be false
+ end
+ end
+
+ context 'when FF table guard raises' do
+ before do
+ allow(Feature::FlipperFeature).to receive(:table_exists?).and_raise
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_and_secondary_stores?).to be false
+ end
+ end
+ end
+
+ def create_redis_store(options, extras = {})
+ ::Redis::Store.new(options.merge(extras))
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index 00ae55237e9..8e8e1f2f072 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -59,6 +59,21 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
end
+ it 'logs the normalized SQL query for statement timeouts' do
+ travel_to(timestamp) do
+ expect(logger).to receive(:info).with(start_payload)
+ expect(logger).to receive(:warn).with(
+ include('exception.sql' => 'SELECT "users".* FROM "users" WHERE "users"."id" = $1 AND "users"."foo" = $2')
+ )
+
+ expect do
+ call_subject(job, 'test_queue') do
+ raise ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = 2')
+ end
+ end.to raise_error(ActiveRecord::StatementInvalid)
+ end
+ end
+
it 'logs the root cause of an Sidekiq::JobRetry::Skip exception in the job' do
travel_to(timestamp) do
expect(logger).to receive(:info).with(start_payload)
@@ -100,8 +115,8 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
include(
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: fail: 0.0 sec',
'job_status' => 'fail',
- 'error_class' => 'Sidekiq::JobRetry::Skip',
- 'error_message' => 'Sidekiq::JobRetry::Skip'
+ 'exception.class' => 'Sidekiq::JobRetry::Skip',
+ 'exception.message' => 'Sidekiq::JobRetry::Skip'
)
)
expect(subject).to receive(:log_job_start).and_call_original
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
index 8d46845548a..d240bf51e67 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gitlab_redis_queues do
+RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gitlab_redis_queues, :clean_gitlab_redis_shared_state do
using RSpec::Parameterized::TableSyntax
subject(:duplicate_job) do
@@ -81,135 +81,99 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
end
end
- describe '#check!' do
- context 'when there was no job in the queue yet' do
- it { expect(duplicate_job.check!).to eq('123') }
+ shared_examples 'tracking duplicates in redis' do
+ describe '#check!' do
+ context 'when there was no job in the queue yet' do
+ it { expect(duplicate_job.check!).to eq('123') }
- shared_examples 'sets Redis keys with correct TTL' do
- it "adds an idempotency key with correct ttl" do
- expect { duplicate_job.check! }
- .to change { read_idempotency_key_with_ttl(idempotency_key) }
- .from([nil, -2])
- .to(['123', be_within(1).of(expected_ttl)])
- end
-
- context 'when wal locations is not empty' do
- it "adds an existing wal locations key with correct ttl" do
+ shared_examples 'sets Redis keys with correct TTL' do
+ it "adds an idempotency key with correct ttl" do
expect { duplicate_job.check! }
- .to change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) }
- .from([nil, -2])
- .to([wal_locations[:main], be_within(1).of(expected_ttl)])
- .and change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) }
+ .to change { read_idempotency_key_with_ttl(idempotency_key) }
.from([nil, -2])
- .to([wal_locations[:ci], be_within(1).of(expected_ttl)])
+ .to(['123', be_within(1).of(expected_ttl)])
end
- end
- end
- context 'with TTL option is not set' do
- let(:expected_ttl) { described_class::DEFAULT_DUPLICATE_KEY_TTL }
-
- it_behaves_like 'sets Redis keys with correct TTL'
- end
-
- context 'when TTL option is set' do
- let(:expected_ttl) { 5.minutes }
-
- before do
- allow(duplicate_job).to receive(:options).and_return({ ttl: expected_ttl })
+ context 'when wal locations is not empty' do
+ it "adds an existing wal locations key with correct ttl" do
+ expect { duplicate_job.check! }
+ .to change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) }
+ .from([nil, -2])
+ .to([wal_locations[:main], be_within(1).of(expected_ttl)])
+ .and change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) }
+ .from([nil, -2])
+ .to([wal_locations[:ci], be_within(1).of(expected_ttl)])
+ end
+ end
end
- it_behaves_like 'sets Redis keys with correct TTL'
- end
+ context 'when TTL option is not set' do
+ let(:expected_ttl) { described_class::DEFAULT_DUPLICATE_KEY_TTL }
- it "adds the idempotency key to the jobs payload" do
- expect { duplicate_job.check! }.to change { job['idempotency_key'] }.from(nil).to(idempotency_key)
- end
- end
-
- context 'when there was already a job with same arguments in the same queue' do
- before do
- set_idempotency_key(idempotency_key, 'existing-key')
- wal_locations.each do |config_name, location|
- set_idempotency_key(existing_wal_location_key(idempotency_key, config_name), location)
+ it_behaves_like 'sets Redis keys with correct TTL'
end
- end
- it { expect(duplicate_job.check!).to eq('existing-key') }
+ context 'when TTL option is set' do
+ let(:expected_ttl) { 5.minutes }
- it "does not change the existing key's TTL" do
- expect { duplicate_job.check! }
- .not_to change { read_idempotency_key_with_ttl(idempotency_key) }
- .from(['existing-key', -1])
- end
-
- it "does not change the existing wal locations key's TTL" do
- expect { duplicate_job.check! }
- .to not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) }
- .from([wal_locations[:main], -1])
- .and not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) }
- .from([wal_locations[:ci], -1])
- end
+ before do
+ allow(duplicate_job).to receive(:options).and_return({ ttl: expected_ttl })
+ end
- it 'sets the existing jid' do
- duplicate_job.check!
+ it_behaves_like 'sets Redis keys with correct TTL'
+ end
- expect(duplicate_job.existing_jid).to eq('existing-key')
+ it "adds the idempotency key to the jobs payload" do
+ expect { duplicate_job.check! }.to change { job['idempotency_key'] }.from(nil).to(idempotency_key)
+ end
end
- end
- end
-
- describe '#update_latest_wal_location!' do
- before do
- allow(Gitlab::Database).to receive(:database_base_models).and_return(
- { main: ::ActiveRecord::Base,
- ci: ::ActiveRecord::Base })
- set_idempotency_key(existing_wal_location_key(idempotency_key, :main), existing_wal[:main])
- set_idempotency_key(existing_wal_location_key(idempotency_key, :ci), existing_wal[:ci])
+ context 'when there was already a job with same arguments in the same queue' do
+ before do
+ set_idempotency_key(idempotency_key, 'existing-key')
+ wal_locations.each do |config_name, location|
+ set_idempotency_key(existing_wal_location_key(idempotency_key, config_name), location)
+ end
+ end
- # read existing_wal_locations
- duplicate_job.check!
- end
+ it { expect(duplicate_job.check!).to eq('existing-key') }
- context "when the key doesn't exists in redis" do
- let(:existing_wal) do
- {
- main: '0/D525E3A0',
- ci: 'AB/12340'
- }
- end
+ it "does not change the existing key's TTL" do
+ expect { duplicate_job.check! }
+ .not_to change { read_idempotency_key_with_ttl(idempotency_key) }
+ .from(['existing-key', -1])
+ end
- let(:new_wal_location_with_offset) do
- {
- # offset is relative to `existing_wal`
- main: ['0/D525E3A8', '8'],
- ci: ['AB/12345', '5']
- }
- end
+ it "does not change the existing wal locations key's TTL" do
+ expect { duplicate_job.check! }
+ .to not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) }
+ .from([wal_locations[:main], -1])
+ .and not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) }
+ .from([wal_locations[:ci], -1])
+ end
- let(:wal_locations) { new_wal_location_with_offset.transform_values(&:first) }
+ it 'sets the existing jid' do
+ duplicate_job.check!
- it 'stores a wal location to redis with an offset relative to existing wal location' do
- expect { duplicate_job.update_latest_wal_location! }
- .to change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
- .from([])
- .to(new_wal_location_with_offset[:main])
- .and change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
- .from([])
- .to(new_wal_location_with_offset[:ci])
+ expect(duplicate_job.existing_jid).to eq('existing-key')
+ end
end
end
- context "when the key exists in redis" do
+ describe '#update_latest_wal_location!' do
before do
- rpush_to_redis_key(wal_location_key(idempotency_key, :main), *stored_wal_location_with_offset[:main])
- rpush_to_redis_key(wal_location_key(idempotency_key, :ci), *stored_wal_location_with_offset[:ci])
- end
+ allow(Gitlab::Database).to receive(:database_base_models).and_return(
+ { main: ::ActiveRecord::Base,
+ ci: ::ActiveRecord::Base })
- let(:wal_locations) { new_wal_location_with_offset.transform_values(&:first) }
+ set_idempotency_key(existing_wal_location_key(idempotency_key, :main), existing_wal[:main])
+ set_idempotency_key(existing_wal_location_key(idempotency_key, :ci), existing_wal[:ci])
- context "when the new offset is bigger then the existing one" do
+ # read existing_wal_locations
+ duplicate_job.check!
+ end
+
+ context "when the key doesn't exists in redis" do
let(:existing_wal) do
{
main: '0/D525E3A0',
@@ -217,14 +181,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
}
end
- let(:stored_wal_location_with_offset) do
- {
- # offset is relative to `existing_wal`
- main: ['0/D525E3A3', '3'],
- ci: ['AB/12342', '2']
- }
- end
-
let(:new_wal_location_with_offset) do
{
# offset is relative to `existing_wal`
@@ -233,154 +189,335 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
}
end
- it 'updates a wal location to redis with an offset' do
+ let(:wal_locations) { new_wal_location_with_offset.transform_values(&:first) }
+
+ it 'stores a wal location to redis with an offset relative to existing wal location' do
expect { duplicate_job.update_latest_wal_location! }
.to change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
- .from(stored_wal_location_with_offset[:main])
+ .from([])
.to(new_wal_location_with_offset[:main])
.and change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
- .from(stored_wal_location_with_offset[:ci])
+ .from([])
.to(new_wal_location_with_offset[:ci])
end
end
- context "when the old offset is not bigger then the existing one" do
- let(:existing_wal) do
- {
- main: '0/D525E3A0',
- ci: 'AB/12340'
- }
+ context "when the key exists in redis" do
+ before do
+ rpush_to_redis_key(wal_location_key(idempotency_key, :main), *stored_wal_location_with_offset[:main])
+ rpush_to_redis_key(wal_location_key(idempotency_key, :ci), *stored_wal_location_with_offset[:ci])
end
- let(:stored_wal_location_with_offset) do
- {
- # offset is relative to `existing_wal`
- main: ['0/D525E3A8', '8'],
- ci: ['AB/12345', '5']
- }
- end
+ let(:wal_locations) { new_wal_location_with_offset.transform_values(&:first) }
- let(:new_wal_location_with_offset) do
- {
- # offset is relative to `existing_wal`
- main: ['0/D525E3A2', '2'],
- ci: ['AB/12342', '2']
- }
+ context "when the new offset is bigger then the existing one" do
+ let(:existing_wal) do
+ {
+ main: '0/D525E3A0',
+ ci: 'AB/12340'
+ }
+ end
+
+ let(:stored_wal_location_with_offset) do
+ {
+ # offset is relative to `existing_wal`
+ main: ['0/D525E3A3', '3'],
+ ci: ['AB/12342', '2']
+ }
+ end
+
+ let(:new_wal_location_with_offset) do
+ {
+ # offset is relative to `existing_wal`
+ main: ['0/D525E3A8', '8'],
+ ci: ['AB/12345', '5']
+ }
+ end
+
+ it 'updates a wal location to redis with an offset' do
+ expect { duplicate_job.update_latest_wal_location! }
+ .to change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
+ .from(stored_wal_location_with_offset[:main])
+ .to(new_wal_location_with_offset[:main])
+ .and change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
+ .from(stored_wal_location_with_offset[:ci])
+ .to(new_wal_location_with_offset[:ci])
+ end
end
- it "does not update a wal location to redis with an offset" do
- expect { duplicate_job.update_latest_wal_location! }
- .to not_change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
- .from(stored_wal_location_with_offset[:main])
- .and not_change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
- .from(stored_wal_location_with_offset[:ci])
+ context "when the old offset is not bigger then the existing one" do
+ let(:existing_wal) do
+ {
+ main: '0/D525E3A0',
+ ci: 'AB/12340'
+ }
+ end
+
+ let(:stored_wal_location_with_offset) do
+ {
+ # offset is relative to `existing_wal`
+ main: ['0/D525E3A8', '8'],
+ ci: ['AB/12345', '5']
+ }
+ end
+
+ let(:new_wal_location_with_offset) do
+ {
+ # offset is relative to `existing_wal`
+ main: ['0/D525E3A2', '2'],
+ ci: ['AB/12342', '2']
+ }
+ end
+
+ it "does not update a wal location to redis with an offset" do
+ expect { duplicate_job.update_latest_wal_location! }
+ .to not_change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
+ .from(stored_wal_location_with_offset[:main])
+ .and not_change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
+ .from(stored_wal_location_with_offset[:ci])
+ end
end
end
end
- end
- describe '#latest_wal_locations' do
- context 'when job was deduplicated and wal locations were already persisted' do
- before do
- rpush_to_redis_key(wal_location_key(idempotency_key, :main), wal_locations[:main], 1024)
- rpush_to_redis_key(wal_location_key(idempotency_key, :ci), wal_locations[:ci], 1024)
- end
+ describe '#latest_wal_locations' do
+ context 'when job was deduplicated and wal locations were already persisted' do
+ before do
+ rpush_to_redis_key(wal_location_key(idempotency_key, :main), wal_locations[:main], 1024)
+ rpush_to_redis_key(wal_location_key(idempotency_key, :ci), wal_locations[:ci], 1024)
+ end
- it { expect(duplicate_job.latest_wal_locations).to eq(wal_locations) }
- end
+ it { expect(duplicate_job.latest_wal_locations).to eq(wal_locations) }
+ end
- context 'when job is not deduplication and wal locations were not persisted' do
- it { expect(duplicate_job.latest_wal_locations).to be_empty }
+ context 'when job is not deduplication and wal locations were not persisted' do
+ it { expect(duplicate_job.latest_wal_locations).to be_empty }
+ end
end
- end
- describe '#delete!' do
- context "when we didn't track the definition" do
- it { expect { duplicate_job.delete! }.not_to raise_error }
- end
+ describe '#delete!' do
+ context "when we didn't track the definition" do
+ it { expect { duplicate_job.delete! }.not_to raise_error }
+ end
- context 'when the key exists in redis' do
- before do
- set_idempotency_key(idempotency_key, 'existing-jid')
- set_idempotency_key(deduplicated_flag_key, 1)
- wal_locations.each do |config_name, location|
- set_idempotency_key(existing_wal_location_key(idempotency_key, config_name), location)
- set_idempotency_key(wal_location_key(idempotency_key, config_name), location)
+ context 'when the key exists in redis' do
+ before do
+ set_idempotency_key(idempotency_key, 'existing-jid')
+ set_idempotency_key(deduplicated_flag_key, 1)
+ wal_locations.each do |config_name, location|
+ set_idempotency_key(existing_wal_location_key(idempotency_key, config_name), location)
+ set_idempotency_key(wal_location_key(idempotency_key, config_name), location)
+ end
end
- end
- shared_examples 'deleting the duplicate job' do
- shared_examples 'deleting keys from redis' do |key_name|
- it "removes the #{key_name} from redis" do
- expect { duplicate_job.delete! }
- .to change { read_idempotency_key_with_ttl(key) }
- .from([from_value, -1])
- .to([nil, -2])
+ shared_examples 'deleting the duplicate job' do
+ shared_examples 'deleting keys from redis' do |key_name|
+ it "removes the #{key_name} from redis" do
+ expect { duplicate_job.delete! }
+ .to change { read_idempotency_key_with_ttl(key) }
+ .from([from_value, -1])
+ .to([nil, -2])
+ end
+ end
+
+ shared_examples 'does not delete key from redis' do |key_name|
+ it "does not remove the #{key_name} from redis" do
+ expect { duplicate_job.delete! }
+ .to not_change { read_idempotency_key_with_ttl(key) }
+ .from([from_value, -1])
+ end
+ end
+
+ it_behaves_like 'deleting keys from redis', 'idempotent key' do
+ let(:key) { idempotency_key }
+ let(:from_value) { 'existing-jid' }
+ end
+
+ it_behaves_like 'deleting keys from redis', 'deduplication counter key' do
+ let(:key) { deduplicated_flag_key }
+ let(:from_value) { '1' }
+ end
+
+ it_behaves_like 'deleting keys from redis', 'existing wal location keys for main database' do
+ let(:key) { existing_wal_location_key(idempotency_key, :main) }
+ let(:from_value) { wal_locations[:main] }
+ end
+
+ it_behaves_like 'deleting keys from redis', 'existing wal location keys for ci database' do
+ let(:key) { existing_wal_location_key(idempotency_key, :ci) }
+ let(:from_value) { wal_locations[:ci] }
+ end
+
+ it_behaves_like 'deleting keys from redis', 'latest wal location keys for main database' do
+ let(:key) { wal_location_key(idempotency_key, :main) }
+ let(:from_value) { wal_locations[:main] }
+ end
+
+ it_behaves_like 'deleting keys from redis', 'latest wal location keys for ci database' do
+ let(:key) { wal_location_key(idempotency_key, :ci) }
+ let(:from_value) { wal_locations[:ci] }
end
end
- shared_examples 'does not delete key from redis' do |key_name|
- it "does not remove the #{key_name} from redis" do
- expect { duplicate_job.delete! }
- .to not_change { read_idempotency_key_with_ttl(key) }
- .from([from_value, -1])
+ context 'when the idempotency key is not part of the job' do
+ it_behaves_like 'deleting the duplicate job'
+
+ it 'recalculates the idempotency hash' do
+ expect(duplicate_job).to receive(:idempotency_hash).and_call_original
+
+ duplicate_job.delete!
end
end
- it_behaves_like 'deleting keys from redis', 'idempotent key' do
- let(:key) { idempotency_key }
- let(:from_value) { 'existing-jid' }
+ context 'when the idempotency key is part of the job' do
+ let(:idempotency_key) { 'not the same as what we calculate' }
+ let(:job) { super().merge('idempotency_key' => idempotency_key) }
+
+ it_behaves_like 'deleting the duplicate job'
+
+ it 'does not recalculate the idempotency hash' do
+ expect(duplicate_job).not_to receive(:idempotency_hash)
+
+ duplicate_job.delete!
+ end
end
+ end
+ end
- it_behaves_like 'deleting keys from redis', 'deduplication counter key' do
- let(:key) { deduplicated_flag_key }
- let(:from_value) { '1' }
+ describe '#set_deduplicated_flag!' do
+ context 'when the job is reschedulable' do
+ before do
+ allow(duplicate_job).to receive(:reschedulable?) { true }
end
- it_behaves_like 'deleting keys from redis', 'existing wal location keys for main database' do
- let(:key) { existing_wal_location_key(idempotency_key, :main) }
- let(:from_value) { wal_locations[:main] }
+ it 'sets the key in Redis' do
+ duplicate_job.set_deduplicated_flag!
+
+ flag = with_redis { |redis| redis.get(deduplicated_flag_key) }
+
+ expect(flag).to eq(described_class::DEDUPLICATED_FLAG_VALUE.to_s)
end
- it_behaves_like 'deleting keys from redis', 'existing wal location keys for ci database' do
- let(:key) { existing_wal_location_key(idempotency_key, :ci) }
- let(:from_value) { wal_locations[:ci] }
+ it 'sets, gets and cleans up the deduplicated flag' do
+ expect(duplicate_job.should_reschedule?).to eq(false)
+
+ duplicate_job.set_deduplicated_flag!
+ expect(duplicate_job.should_reschedule?).to eq(true)
+
+ duplicate_job.delete!
+ expect(duplicate_job.should_reschedule?).to eq(false)
end
+ end
- it_behaves_like 'deleting keys from redis', 'latest wal location keys for main database' do
- let(:key) { wal_location_key(idempotency_key, :main) }
- let(:from_value) { wal_locations[:main] }
+ context 'when the job is not reschedulable' do
+ before do
+ allow(duplicate_job).to receive(:reschedulable?) { false }
end
- it_behaves_like 'deleting keys from redis', 'latest wal location keys for ci database' do
- let(:key) { wal_location_key(idempotency_key, :ci) }
- let(:from_value) { wal_locations[:ci] }
+ it 'does not set the key in Redis' do
+ duplicate_job.set_deduplicated_flag!
+
+ flag = with_redis { |redis| redis.get(deduplicated_flag_key) }
+
+ expect(flag).to be_nil
end
- end
- context 'when the idempotency key is not part of the job' do
- it_behaves_like 'deleting the duplicate job'
+ it 'does not set the deduplicated flag' do
+ expect(duplicate_job.should_reschedule?).to eq(false)
- it 'recalculates the idempotency hash' do
- expect(duplicate_job).to receive(:idempotency_hash).and_call_original
+ duplicate_job.set_deduplicated_flag!
+ expect(duplicate_job.should_reschedule?).to eq(false)
duplicate_job.delete!
+ expect(duplicate_job.should_reschedule?).to eq(false)
end
end
+ end
+
+ describe '#duplicate?' do
+ it "raises an error if the check wasn't performed" do
+ expect { duplicate_job.duplicate? }.to raise_error /Call `#check!` first/
+ end
- context 'when the idempotency key is part of the job' do
- let(:idempotency_key) { 'not the same as what we calculate' }
- let(:job) { super().merge('idempotency_key' => idempotency_key) }
+ it 'returns false if the existing jid equals the job jid' do
+ duplicate_job.check!
- it_behaves_like 'deleting the duplicate job'
+ expect(duplicate_job.duplicate?).to be(false)
+ end
- it 'does not recalculate the idempotency hash' do
- expect(duplicate_job).not_to receive(:idempotency_hash)
+ it 'returns false if the existing jid is different from the job jid' do
+ set_idempotency_key(idempotency_key, 'a different jid')
+ duplicate_job.check!
- duplicate_job.delete!
+ expect(duplicate_job.duplicate?).to be(true)
+ end
+ end
+
+ def existing_wal_location_key(idempotency_key, connection_name)
+ "#{idempotency_key}:#{connection_name}:existing_wal_location"
+ end
+
+ def wal_location_key(idempotency_key, connection_name)
+ "#{idempotency_key}:#{connection_name}:wal_location"
+ end
+
+ def set_idempotency_key(key, value = '1')
+ with_redis { |r| r.set(key, value) }
+ end
+
+ def rpush_to_redis_key(key, wal, offset)
+ with_redis { |r| r.rpush(key, [wal, offset]) }
+ end
+
+ def read_idempotency_key_with_ttl(key)
+ with_redis do |redis|
+ redis.pipelined do |p|
+ p.get(key)
+ p.ttl(key)
end
end
end
+
+ def read_range_from_redis(key)
+ with_redis do |redis|
+ redis.lrange(key, 0, -1)
+ end
+ end
+ end
+
+ context 'with multi-store feature flags turned on' do
+ def with_redis(&block)
+ Gitlab::Redis::DuplicateJobs.with(&block)
+ end
+
+ it 'use Gitlab::Redis::DuplicateJobs.with' do
+ expect(Gitlab::Redis::DuplicateJobs).to receive(:with).and_call_original
+ expect(Sidekiq).not_to receive(:redis)
+
+ duplicate_job.check!
+ end
+
+ it_behaves_like 'tracking duplicates in redis'
+ end
+
+ context 'when both multi-store feature flags are off' do
+ def with_redis(&block)
+ Sidekiq.redis(&block)
+ end
+
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_duplicate_jobs: false)
+ stub_feature_flags(use_primary_store_as_default_for_duplicate_jobs: false)
+ end
+
+ it 'use Sidekiq.redis' do
+ expect(Sidekiq).to receive(:redis).and_call_original
+ expect(Gitlab::Redis::DuplicateJobs).not_to receive(:with)
+
+ duplicate_job.check!
+ end
+
+ it_behaves_like 'tracking duplicates in redis'
end
describe '#scheduled?' do
@@ -449,75 +586,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
end
end
- describe '#set_deduplicated_flag!' do
- context 'when the job is reschedulable' do
- before do
- allow(duplicate_job).to receive(:reschedulable?) { true }
- end
-
- it 'sets the key in Redis' do
- duplicate_job.set_deduplicated_flag!
-
- flag = Sidekiq.redis { |redis| redis.get(deduplicated_flag_key) }
-
- expect(flag).to eq(described_class::DEDUPLICATED_FLAG_VALUE.to_s)
- end
-
- it 'sets, gets and cleans up the deduplicated flag' do
- expect(duplicate_job.should_reschedule?).to eq(false)
-
- duplicate_job.set_deduplicated_flag!
- expect(duplicate_job.should_reschedule?).to eq(true)
-
- duplicate_job.delete!
- expect(duplicate_job.should_reschedule?).to eq(false)
- end
- end
-
- context 'when the job is not reschedulable' do
- before do
- allow(duplicate_job).to receive(:reschedulable?) { false }
- end
-
- it 'does not set the key in Redis' do
- duplicate_job.set_deduplicated_flag!
-
- flag = Sidekiq.redis { |redis| redis.get(deduplicated_flag_key) }
-
- expect(flag).to be_nil
- end
-
- it 'does not set the deduplicated flag' do
- expect(duplicate_job.should_reschedule?).to eq(false)
-
- duplicate_job.set_deduplicated_flag!
- expect(duplicate_job.should_reschedule?).to eq(false)
-
- duplicate_job.delete!
- expect(duplicate_job.should_reschedule?).to eq(false)
- end
- end
- end
-
- describe '#duplicate?' do
- it "raises an error if the check wasn't performed" do
- expect { duplicate_job.duplicate? }.to raise_error /Call `#check!` first/
- end
-
- it 'returns false if the existing jid equals the job jid' do
- duplicate_job.check!
-
- expect(duplicate_job.duplicate?).to be(false)
- end
-
- it 'returns false if the existing jid is different from the job jid' do
- set_idempotency_key(idempotency_key, 'a different jid')
- duplicate_job.check!
-
- expect(duplicate_job.duplicate?).to be(true)
- end
- end
-
describe '#scheduled_at' do
let(:scheduled_at) { 42 }
let(:job) do
@@ -592,35 +660,4 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
end
end
end
-
- def existing_wal_location_key(idempotency_key, connection_name)
- "#{idempotency_key}:#{connection_name}:existing_wal_location"
- end
-
- def wal_location_key(idempotency_key, connection_name)
- "#{idempotency_key}:#{connection_name}:wal_location"
- end
-
- def set_idempotency_key(key, value = '1')
- Sidekiq.redis { |r| r.set(key, value) }
- end
-
- def rpush_to_redis_key(key, wal, offset)
- Sidekiq.redis { |r| r.rpush(key, [wal, offset]) }
- end
-
- def read_idempotency_key_with_ttl(key)
- Sidekiq.redis do |redis|
- redis.pipelined do |p|
- p.get(key)
- p.ttl(key)
- end
- end
- end
-
- def read_range_from_redis(key)
- Sidekiq.redis do |redis|
- redis.lrange(key, 0, -1)
- end
- end
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 9beeb9e8737..6d2827b1877 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -4481,7 +4481,7 @@ RSpec.describe Ci::Build do
describe '#collect_coverage_reports!' do
subject { build.collect_coverage_reports!(coverage_report) }
- let(:coverage_report) { Gitlab::Ci::Reports::CoverageReports.new }
+ let(:coverage_report) { Gitlab::Ci::Reports::CoverageReport.new }
it { expect(coverage_report.files).to eq({}) }
diff --git a/spec/models/concerns/pg_full_text_searchable_spec.rb b/spec/models/concerns/pg_full_text_searchable_spec.rb
index b6da481024a..84209999ab2 100644
--- a/spec/models/concerns/pg_full_text_searchable_spec.rb
+++ b/spec/models/concerns/pg_full_text_searchable_spec.rb
@@ -99,6 +99,17 @@ RSpec.describe PgFullTextSearchable do
it 'does not support searching by non-Latin characters' do
expect(model_class.pg_full_text_search('日本')).to be_empty
end
+
+ context 'when search term has a URL' do
+ let(:with_url) { model_class.create!(project: project, title: 'issue with url', description: 'sample url,https://gitlab.com/gitlab-org/gitlab') }
+
+ it 'allows searching by full URL, ignoring the scheme' do
+ with_url.update_search_data!
+
+ expect(model_class.pg_full_text_search('https://gitlab.com/gitlab-org/gitlab')).to contain_exactly(with_url)
+ expect(model_class.pg_full_text_search('gopher://gitlab.com/gitlab-org/gitlab')).to contain_exactly(with_url)
+ end
+ end
end
describe '#update_search_data!' do
diff --git a/spec/requests/api/graphql/user/starred_projects_query_spec.rb b/spec/requests/api/graphql/user/starred_projects_query_spec.rb
index 37a85b98e5f..75a17ed34c4 100644
--- a/spec/requests/api/graphql/user/starred_projects_query_spec.rb
+++ b/spec/requests/api/graphql/user/starred_projects_query_spec.rb
@@ -17,7 +17,6 @@ RSpec.describe 'Getting starredProjects of the user' do
let_it_be(:user, reload: true) { create(:user) }
let(:user_fields) { 'starredProjects { nodes { id } }' }
- let(:current_user) { nil }
let(:starred_projects) do
post_graphql(query, current_user: current_user)
@@ -34,21 +33,23 @@ RSpec.describe 'Getting starredProjects of the user' do
user.toggle_star(project_c)
end
- it_behaves_like 'a working graphql query' do
- before do
- post_graphql(query)
- end
- end
+ context 'anonymous access' do
+ let(:current_user) { nil }
- it 'found only public project' do
- expect(starred_projects).to contain_exactly(
- a_graphql_entity_for(project_a)
- )
+ it 'returns nothing' do
+ expect(starred_projects).to be_nil
+ end
end
context 'the current user is the user' do
let(:current_user) { user }
+ it_behaves_like 'a working graphql query' do
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+ end
+
it 'found all projects' do
expect(starred_projects).to contain_exactly(
a_graphql_entity_for(project_a),
diff --git a/spec/support/helpers/test_env.rb b/spec/support/helpers/test_env.rb
index 11f469c1d27..7c865dd7e11 100644
--- a/spec/support/helpers/test_env.rb
+++ b/spec/support/helpers/test_env.rb
@@ -53,7 +53,7 @@ module TestEnv
'wip' => 'b9238ee',
'csv' => '3dd0896',
'v1.1.0' => 'b83d6e3',
- 'add-ipython-files' => 'a867a602',
+ 'add-ipython-files' => '4963fef',
'add-pdf-file' => 'e774ebd',
'squash-large-files' => '54cec52',
'add-pdf-text-binary' => '79faa7b',
diff --git a/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
index 7d51c90522a..5dc1af55685 100644
--- a/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
+++ b/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
@@ -18,7 +18,10 @@ RSpec.shared_context 'structured_logger' do
"correlation_id" => 'cid',
"error_message" => "wrong number of arguments (2 for 3)",
"error_class" => "ArgumentError",
- "error_backtrace" => []
+ "error_backtrace" => [],
+ "exception.message" => "wrong number of arguments (2 for 3)",
+ "exception.class" => "ArgumentError",
+ "exception.backtrace" => []
}
end
@@ -28,7 +31,10 @@ RSpec.shared_context 'structured_logger' do
let(:clock_thread_cputime_start) { 0.222222299 }
let(:clock_thread_cputime_end) { 1.333333799 }
let(:start_payload) do
- job.except('error_backtrace', 'error_class', 'error_message').merge(
+ job.except(
+ 'error_message', 'error_class', 'error_backtrace',
+ 'exception.backtrace', 'exception.class', 'exception.message'
+ ).merge(
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: start',
'job_status' => 'start',
'pid' => Process.pid,
@@ -68,7 +74,10 @@ RSpec.shared_context 'structured_logger' do
'job_status' => 'fail',
'error_class' => 'ArgumentError',
'error_message' => 'Something went wrong',
- 'error_backtrace' => be_a(Array).and(be_present)
+ 'error_backtrace' => be_a(Array).and(be_present),
+ 'exception.class' => 'ArgumentError',
+ 'exception.message' => 'Something went wrong',
+ 'exception.backtrace' => be_a(Array).and(be_present)
)
end
diff --git a/spec/support/shared_examples/lib/gitlab/redis/multi_store_feature_flags_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/redis/multi_store_feature_flags_shared_examples.rb
new file mode 100644
index 00000000000..a5e4df1c272
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/redis/multi_store_feature_flags_shared_examples.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'multi store feature flags' do |use_primary_and_secondary_stores, use_primary_store_as_default|
+ context "with feature flag :#{use_primary_and_secondary_stores} is enabled" do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores => true)
+ end
+
+ it 'multi store is enabled' do
+ subject.with do |redis_instance|
+ expect(redis_instance.use_primary_and_secondary_stores?).to be true
+ end
+ end
+ end
+
+ context "with feature flag :#{use_primary_and_secondary_stores} is disabled" do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores => false)
+ end
+
+ it 'multi store is disabled' do
+ subject.with do |redis_instance|
+ expect(redis_instance.use_primary_and_secondary_stores?).to be false
+ end
+ end
+ end
+
+ context "with feature flag :#{use_primary_store_as_default} is enabled" do
+ before do
+ stub_feature_flags(use_primary_store_as_default => true)
+ end
+
+ it 'primary store is enabled' do
+ subject.with do |redis_instance|
+ expect(redis_instance.use_primary_store_as_default?).to be true
+ end
+ end
+ end
+
+ context "with feature flag :#{use_primary_store_as_default} is disabled" do
+ before do
+ stub_feature_flags(use_primary_store_as_default => false)
+ end
+
+ it 'primary store is disabled' do
+ subject.with do |redis_instance|
+ expect(redis_instance.use_primary_store_as_default?).to be false
+ end
+ end
+ end
+end
diff --git a/spec/tooling/danger/project_helper_spec.rb b/spec/tooling/danger/project_helper_spec.rb
index 78e9c8e9c62..f48ca5b8f8c 100644
--- a/spec/tooling/danger/project_helper_spec.rb
+++ b/spec/tooling/danger/project_helper_spec.rb
@@ -101,14 +101,15 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'Rakefile' | [:backend]
'FOO_VERSION' | [:backend]
- 'lib/scripts/bar.rb' | [:backend, :tooling]
- 'lib/scripts/bar.js' | [:frontend, :tooling]
+ 'scripts/glfm/bar.rb' | [:backend]
+ 'scripts/glfm/bar.js' | [:frontend]
+ 'scripts/lib/glfm/bar.rb' | [:backend]
+ 'scripts/lib/glfm/bar.js' | [:frontend]
'scripts/bar.rb' | [:backend, :tooling]
'scripts/bar.js' | [:frontend, :tooling]
- 'lib/scripts/subdir/bar.rb' | [:backend, :tooling]
- 'lib/scripts/subdir/bar.js' | [:frontend, :tooling]
'scripts/subdir/bar.rb' | [:backend, :tooling]
'scripts/subdir/bar.js' | [:frontend, :tooling]
+ 'scripts/foo' | [:tooling]
'Dangerfile' | [:tooling]
'danger/bundle_size/Dangerfile' | [:tooling]
@@ -118,7 +119,6 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'.gitlab-ci.yml' | [:tooling]
'.gitlab/ci/cng.gitlab-ci.yml' | [:tooling]
'.gitlab/ci/ee-specific-checks.gitlab-ci.yml' | [:tooling]
- 'scripts/foo' | [:tooling]
'tooling/danger/foo' | [:tooling]
'ee/tooling/danger/foo' | [:tooling]
'lefthook.yml' | [:tooling]