Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/db/schema_spec.rb5
-rw-r--r--spec/factories/alert_management/alerts.rb4
-rw-r--r--spec/factories/gitlab/database/background_migration/batched_migrations.rb1
-rw-r--r--spec/features/projects/ci/editor_spec.rb10
-rw-r--r--spec/features/projects/releases/user_views_edit_release_spec.rb4
-rw-r--r--spec/frontend/editor/schema/ci/ci_schema_spec.js1
-rw-r--r--spec/frontend/issues/list/components/issues_list_app_spec.js26
-rw-r--r--spec/frontend/issues/list/utils_spec.js24
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js3
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js3
-rw-r--r--spec/frontend/pipeline_editor/components/file-nav/pipeline_editor_file_nav_spec.js40
-rw-r--r--spec/frontend/pipeline_editor/pipeline_editor_home_spec.js92
-rw-r--r--spec/initializers/validate_database_config_spec.rb3
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb7
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_spec.rb85
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb20
-rw-r--r--spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb105
-rw-r--r--spec/lib/gitlab/patch/database_config_spec.rb3
-rw-r--r--spec/lib/gitlab/subscription_portal_spec.rb46
-rw-r--r--spec/migrations/20220503035221_add_gitlab_schema_to_batched_background_migrations_spec.rb34
-rw-r--r--spec/migrations/20220505044348_fix_automatic_iterations_cadences_start_date_spec.rb3
-rw-r--r--spec/migrations/20220512190659_remove_web_hooks_web_hook_logs_web_hook_id_fk_spec.rb33
-rw-r--r--spec/migrations/remove_not_null_contraint_on_title_from_sprints_spec.rb2
-rw-r--r--spec/models/hooks/web_hook_spec.rb4
-rw-r--r--spec/models/merge_request_spec.rb53
-rw-r--r--spec/requests/api/graphql/project/merge_request/pipelines_spec.rb69
-rw-r--r--spec/requests/api/usage_data_spec.rb21
-rw-r--r--spec/requests/mailgun/webhooks_controller_spec.rb149
-rw-r--r--spec/requests/members/mailgun/permanent_failure_spec.rb128
-rw-r--r--spec/rubocop/formatter/todo_formatter_spec.rb9
-rw-r--r--spec/rubocop/todo_dir_spec.rb6
-rw-r--r--spec/scripts/lib/glfm/update_specification_spec.rb75
-rw-r--r--spec/services/members/mailgun/process_webhook_service_spec.rb30
-rw-r--r--spec/services/projects/update_pages_service_spec.rb12
-rw-r--r--spec/support/helpers/graphql_helpers.rb4
-rw-r--r--spec/support/helpers/subscription_portal_helper.rb13
-rw-r--r--spec/support/matchers/background_migrations_matchers.rb6
-rw-r--r--spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb138
-rw-r--r--spec/tasks/rubocop_rake_spec.rb11
39 files changed, 903 insertions, 379 deletions
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index e21c73976a8..0895e13c3c3 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -94,7 +94,10 @@ RSpec.describe 'Database schema' do
vulnerability_identifiers: %w[external_id],
vulnerability_scanners: %w[external_id],
security_scans: %w[pipeline_id], # foreign key is not added as ci_pipeline table will be moved into different db soon
- vulnerability_reads: %w[cluster_agent_id]
+ vulnerability_reads: %w[cluster_agent_id],
+ # See: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/87584
+ # Fixes performance issues with the deletion of web-hooks with many log entries
+ web_hook_logs: %w[web_hook_id]
}.with_indifferent_access.freeze
context 'for table' do
diff --git a/spec/factories/alert_management/alerts.rb b/spec/factories/alert_management/alerts.rb
index 7e9e58edc1e..443e43d5fd1 100644
--- a/spec/factories/alert_management/alerts.rb
+++ b/spec/factories/alert_management/alerts.rb
@@ -95,10 +95,6 @@ FactoryBot.define do
severity { 'unknown' }
end
- trait :threat_monitoring do
- domain { :threat_monitoring }
- end
-
trait :prometheus do
monitoring_tool { Gitlab::AlertManagement::Payload::MONITORING_TOOLS[:prometheus] }
payload do
diff --git a/spec/factories/gitlab/database/background_migration/batched_migrations.rb b/spec/factories/gitlab/database/background_migration/batched_migrations.rb
index 5ff90ff44b9..ea0fb571cc4 100644
--- a/spec/factories/gitlab/database/background_migration/batched_migrations.rb
+++ b/spec/factories/gitlab/database/background_migration/batched_migrations.rb
@@ -12,6 +12,7 @@ FactoryBot.define do
sequence(:job_arguments) { |n| [["column_#{n}"], ["column_#{n}_convert_to_bigint"]] }
total_tuple_count { 10_000 }
pause_ms { 100 }
+ gitlab_schema { :gitlab_main }
trait(:paused) do
status { 0 }
diff --git a/spec/features/projects/ci/editor_spec.rb b/spec/features/projects/ci/editor_spec.rb
index 2f960c09936..8197fe46c7b 100644
--- a/spec/features/projects/ci/editor_spec.rb
+++ b/spec/features/projects/ci/editor_spec.rb
@@ -12,8 +12,6 @@ RSpec.describe 'Pipeline Editor', :js do
let(:other_branch) { 'test' }
before do
- stub_feature_flags(pipeline_editor_file_tree: false)
-
sign_in(user)
project.add_developer(user)
@@ -70,14 +68,8 @@ RSpec.describe 'Pipeline Editor', :js do
expect(page).to have_content('Pipeline Editor')
end
- describe 'Branch Switcher (pipeline_editor_file_tree disabled)' do
- it_behaves_like 'default branch switcher behavior'
- end
-
- describe 'Branch Switcher (pipeline_editor_file_tree enabled)' do
+ describe 'Branch Switcher' do
before do
- stub_feature_flags(pipeline_editor_file_tree: true)
-
visit project_ci_pipeline_editor_path(project)
wait_for_requests
diff --git a/spec/features/projects/releases/user_views_edit_release_spec.rb b/spec/features/projects/releases/user_views_edit_release_spec.rb
index f08f5529472..6551b254643 100644
--- a/spec/features/projects/releases/user_views_edit_release_spec.rb
+++ b/spec/features/projects/releases/user_views_edit_release_spec.rb
@@ -30,10 +30,12 @@ RSpec.describe 'User edits Release', :js do
it 'renders the breadcrumbs' do
within('.breadcrumbs') do
- expect(page).to have_content("#{project.creator.name} #{project.name} Edit Release")
+ expect(page).to have_content("#{project.creator.name} #{project.name} Releases #{release.name} Edit Release")
expect(page).to have_link(project.creator.name, href: user_path(project.creator))
expect(page).to have_link(project.name, href: project_path(project))
+ expect(page).to have_link(_('Releases'), href: project_releases_path(project))
+ expect(page).to have_link(release.name, href: project_release_path(project, release))
expect(page).to have_link('Edit Release', href: edit_project_release_path(project, release))
end
end
diff --git a/spec/frontend/editor/schema/ci/ci_schema_spec.js b/spec/frontend/editor/schema/ci/ci_schema_spec.js
index 628c34a27c1..c59806a5d60 100644
--- a/spec/frontend/editor/schema/ci/ci_schema_spec.js
+++ b/spec/frontend/editor/schema/ci/ci_schema_spec.js
@@ -38,6 +38,7 @@ const ajv = new Ajv({
strictTuples: false,
allowMatchingProperties: true,
});
+ajv.addKeyword('markdownDescription');
AjvFormats(ajv);
const schema = ajv.compile(CiSchema);
diff --git a/spec/frontend/issues/list/components/issues_list_app_spec.js b/spec/frontend/issues/list/components/issues_list_app_spec.js
index d92ba527b5c..7de653e3069 100644
--- a/spec/frontend/issues/list/components/issues_list_app_spec.js
+++ b/spec/frontend/issues/list/components/issues_list_app_spec.js
@@ -301,17 +301,23 @@ describe('CE IssuesListApp component', () => {
describe('initial url params', () => {
describe('page', () => {
it('page_after is set from the url params', () => {
- setWindowLocation('?page_after=randomCursorString');
+ setWindowLocation('?page_after=randomCursorString&first_page_size=20');
wrapper = mountComponent();
- expect(wrapper.vm.$route.query).toMatchObject({ page_after: 'randomCursorString' });
+ expect(wrapper.vm.$route.query).toMatchObject({
+ page_after: 'randomCursorString',
+ first_page_size: '20',
+ });
});
it('page_before is set from the url params', () => {
- setWindowLocation('?page_before=anotherRandomCursorString');
+ setWindowLocation('?page_before=anotherRandomCursorString&last_page_size=20');
wrapper = mountComponent();
- expect(wrapper.vm.$route.query).toMatchObject({ page_before: 'anotherRandomCursorString' });
+ expect(wrapper.vm.$route.query).toMatchObject({
+ page_before: 'anotherRandomCursorString',
+ last_page_size: '20',
+ });
});
});
@@ -675,10 +681,10 @@ describe('CE IssuesListApp component', () => {
});
describe.each`
- event | paramName | paramValue
- ${'next-page'} | ${'page_after'} | ${'endCursor'}
- ${'previous-page'} | ${'page_before'} | ${'startCursor'}
- `('when "$event" event is emitted by IssuableList', ({ event, paramName, paramValue }) => {
+ event | params
+ ${'next-page'} | ${{ page_after: 'endCursor', page_before: undefined, first_page_size: 20, last_page_size: undefined }}
+ ${'previous-page'} | ${{ page_after: undefined, page_before: 'startCursor', first_page_size: undefined, last_page_size: 20 }}
+ `('when "$event" event is emitted by IssuableList', ({ event, params }) => {
beforeEach(() => {
wrapper = mountComponent({
data: {
@@ -697,9 +703,9 @@ describe('CE IssuesListApp component', () => {
expect(scrollUp).toHaveBeenCalled();
});
- it(`updates url with "${paramName}" param`, () => {
+ it(`updates url`, () => {
expect(wrapper.vm.$router.push).toHaveBeenCalledWith({
- query: expect.objectContaining({ [paramName]: paramValue }),
+ query: expect.objectContaining(params),
});
});
});
diff --git a/spec/frontend/issues/list/utils_spec.js b/spec/frontend/issues/list/utils_spec.js
index ce0477883d7..e8ffba9bc80 100644
--- a/spec/frontend/issues/list/utils_spec.js
+++ b/spec/frontend/issues/list/utils_spec.js
@@ -42,27 +42,37 @@ describe('getInitialPageParams', () => {
'returns the correct page params for sort key %s with afterCursor',
(sortKey) => {
const firstPageSize = sortKey === RELATIVE_POSITION_ASC ? PAGE_SIZE_MANUAL : PAGE_SIZE;
+ const lastPageSize = undefined;
const afterCursor = 'randomCursorString';
const beforeCursor = undefined;
-
- expect(getInitialPageParams(sortKey, afterCursor, beforeCursor)).toEqual({
+ const pageParams = getInitialPageParams(
+ sortKey,
firstPageSize,
+ lastPageSize,
afterCursor,
- });
+ beforeCursor,
+ );
+
+ expect(pageParams).toEqual({ firstPageSize, afterCursor });
},
);
it.each(Object.keys(urlSortParams))(
'returns the correct page params for sort key %s with beforeCursor',
(sortKey) => {
- const firstPageSize = sortKey === RELATIVE_POSITION_ASC ? PAGE_SIZE_MANUAL : PAGE_SIZE;
+ const firstPageSize = undefined;
+ const lastPageSize = PAGE_SIZE;
const afterCursor = undefined;
const beforeCursor = 'anotherRandomCursorString';
-
- expect(getInitialPageParams(sortKey, afterCursor, beforeCursor)).toEqual({
+ const pageParams = getInitialPageParams(
+ sortKey,
firstPageSize,
+ lastPageSize,
+ afterCursor,
beforeCursor,
- });
+ );
+
+ expect(pageParams).toEqual({ lastPageSize, beforeCursor });
},
);
});
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js
index 057312828ff..84f01f10f21 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js
@@ -10,6 +10,7 @@ import {
MISSING_MANIFEST_WARNING_TOOLTIP,
NOT_AVAILABLE_TEXT,
NOT_AVAILABLE_SIZE,
+ COPY_IMAGE_PATH_TITLE,
} from '~/packages_and_registries/container_registry/explorer/constants/index';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import DetailsRow from '~/vue_shared/components/registry/details_row.vue';
@@ -150,7 +151,7 @@ describe('tags list row', () => {
expect(findClipboardButton().attributes()).toMatchObject({
text: tag.location,
- title: tag.location,
+ title: COPY_IMAGE_PATH_TITLE,
});
});
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js
index 690d827ec67..979e1500d7d 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js
@@ -13,6 +13,7 @@ import {
IMAGE_MIGRATING_STATE,
SCHEDULED_STATUS,
ROOT_IMAGE_TEXT,
+ COPY_IMAGE_PATH_TITLE,
} from '~/packages_and_registries/container_registry/explorer/constants';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import ListItem from '~/vue_shared/components/registry/list_item.vue';
@@ -106,7 +107,7 @@ describe('Image List Row', () => {
const button = findClipboardButton();
expect(button.exists()).toBe(true);
expect(button.props('text')).toBe(item.location);
- expect(button.props('title')).toBe(item.location);
+ expect(button.props('title')).toBe(COPY_IMAGE_PATH_TITLE);
});
describe('cleanup status component', () => {
diff --git a/spec/frontend/pipeline_editor/components/file-nav/pipeline_editor_file_nav_spec.js b/spec/frontend/pipeline_editor/components/file-nav/pipeline_editor_file_nav_spec.js
index a61796dbed2..d503aff40b8 100644
--- a/spec/frontend/pipeline_editor/components/file-nav/pipeline_editor_file_nav_spec.js
+++ b/spec/frontend/pipeline_editor/components/file-nav/pipeline_editor_file_nav_spec.js
@@ -23,7 +23,6 @@ describe('Pipeline editor file nav', () => {
const createComponent = ({
appStatus = EDITOR_APP_STATUS_VALID,
isNewCiConfigFile = false,
- pipelineEditorFileTree = false,
} = {}) => {
mockApollo.clients.defaultClient.cache.writeQuery({
query: getAppStatus,
@@ -38,11 +37,6 @@ describe('Pipeline editor file nav', () => {
wrapper = extendedWrapper(
shallowMount(PipelineEditorFileNav, {
apolloProvider: mockApollo,
- provide: {
- glFeatures: {
- pipelineEditorFileTree,
- },
- },
propsData: {
isNewCiConfigFile,
},
@@ -66,24 +60,12 @@ describe('Pipeline editor file nav', () => {
it('renders the branch switcher', () => {
expect(findBranchSwitcher().exists()).toBe(true);
});
-
- it('does not render the file tree button', () => {
- expect(findFileTreeBtn().exists()).toBe(false);
- });
-
- it('does not render the file tree popover', () => {
- expect(findPopoverContainer().exists()).toBe(false);
- });
});
- describe('with pipelineEditorFileTree feature flag ON', () => {
+ describe('file tree', () => {
describe('when editor is in the empty state', () => {
beforeEach(() => {
- createComponent({
- appStatus: EDITOR_APP_STATUS_EMPTY,
- isNewCiConfigFile: false,
- pipelineEditorFileTree: true,
- });
+ createComponent({ appStatus: EDITOR_APP_STATUS_EMPTY, isNewCiConfigFile: false });
});
it('does not render the file tree button', () => {
@@ -97,11 +79,7 @@ describe('Pipeline editor file nav', () => {
describe('when user is about to create their config file for the first time', () => {
beforeEach(() => {
- createComponent({
- appStatus: EDITOR_APP_STATUS_VALID,
- isNewCiConfigFile: true,
- pipelineEditorFileTree: true,
- });
+ createComponent({ appStatus: EDITOR_APP_STATUS_VALID, isNewCiConfigFile: true });
});
it('does not render the file tree button', () => {
@@ -115,11 +93,7 @@ describe('Pipeline editor file nav', () => {
describe('when app is in a global loading state', () => {
it('renders the file tree button with a loading icon', () => {
- createComponent({
- appStatus: EDITOR_APP_STATUS_LOADING,
- isNewCiConfigFile: false,
- pipelineEditorFileTree: true,
- });
+ createComponent({ appStatus: EDITOR_APP_STATUS_LOADING, isNewCiConfigFile: false });
expect(findFileTreeBtn().exists()).toBe(true);
expect(findFileTreeBtn().attributes('loading')).toBe('true');
@@ -128,11 +102,7 @@ describe('Pipeline editor file nav', () => {
describe('when editor has a non-empty config file open', () => {
beforeEach(() => {
- createComponent({
- appStatus: EDITOR_APP_STATUS_VALID,
- isNewCiConfigFile: false,
- pipelineEditorFileTree: true,
- });
+ createComponent({ appStatus: EDITOR_APP_STATUS_VALID, isNewCiConfigFile: false });
});
it('renders the file tree button', () => {
diff --git a/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js b/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js
index bf0f7fd8c9f..6307a5aefe7 100644
--- a/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js
+++ b/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js
@@ -247,81 +247,63 @@ describe('Pipeline editor home wrapper', () => {
await nextTick();
};
- describe('with pipelineEditorFileTree feature flag OFF', () => {
+ describe('button toggle', () => {
beforeEach(() => {
- createComponent();
+ createComponent({
+ stubs: {
+ GlButton,
+ PipelineEditorFileNav,
+ },
+ });
});
- it('hides the file tree', () => {
- expect(findFileTreeBtn().exists()).toBe(false);
- expect(findPipelineEditorFileTree().exists()).toBe(false);
+ it('shows button toggle', () => {
+ expect(findFileTreeBtn().exists()).toBe(true);
});
- });
-
- describe('with pipelineEditorFileTree feature flag ON', () => {
- describe('button toggle', () => {
- beforeEach(() => {
- createComponent({
- glFeatures: {
- pipelineEditorFileTree: true,
- },
- stubs: {
- GlButton,
- PipelineEditorFileNav,
- },
- });
- });
-
- it('shows button toggle', () => {
- expect(findFileTreeBtn().exists()).toBe(true);
- });
- it('toggles the drawer on button click', async () => {
- await toggleFileTree();
+ it('toggles the drawer on button click', async () => {
+ await toggleFileTree();
- expect(findPipelineEditorFileTree().exists()).toBe(true);
+ expect(findPipelineEditorFileTree().exists()).toBe(true);
- await toggleFileTree();
+ await toggleFileTree();
- expect(findPipelineEditorFileTree().exists()).toBe(false);
- });
+ expect(findPipelineEditorFileTree().exists()).toBe(false);
+ });
- it('sets the display state in local storage', async () => {
- await toggleFileTree();
+ it('sets the display state in local storage', async () => {
+ await toggleFileTree();
- expect(localStorage.getItem(FILE_TREE_DISPLAY_KEY)).toBe('true');
+ expect(localStorage.getItem(FILE_TREE_DISPLAY_KEY)).toBe('true');
- await toggleFileTree();
+ await toggleFileTree();
- expect(localStorage.getItem(FILE_TREE_DISPLAY_KEY)).toBe('false');
- });
+ expect(localStorage.getItem(FILE_TREE_DISPLAY_KEY)).toBe('false');
});
+ });
- describe('when file tree display state is saved in local storage', () => {
- beforeEach(() => {
- localStorage.setItem(FILE_TREE_DISPLAY_KEY, 'true');
- createComponent({
- glFeatures: { pipelineEditorFileTree: true },
- stubs: { PipelineEditorFileNav },
- });
+ describe('when file tree display state is saved in local storage', () => {
+ beforeEach(() => {
+ localStorage.setItem(FILE_TREE_DISPLAY_KEY, 'true');
+ createComponent({
+ stubs: { PipelineEditorFileNav },
});
+ });
- it('shows the file tree by default', () => {
- expect(findPipelineEditorFileTree().exists()).toBe(true);
- });
+ it('shows the file tree by default', () => {
+ expect(findPipelineEditorFileTree().exists()).toBe(true);
});
+ });
- describe('when file tree display state is not saved in local storage', () => {
- beforeEach(() => {
- createComponent({
- glFeatures: { pipelineEditorFileTree: true },
- stubs: { PipelineEditorFileNav },
- });
+ describe('when file tree display state is not saved in local storage', () => {
+ beforeEach(() => {
+ createComponent({
+ stubs: { PipelineEditorFileNav },
});
+ });
- it('hides the file tree by default', () => {
- expect(findPipelineEditorFileTree().exists()).toBe(false);
- });
+ it('hides the file tree by default', () => {
+ expect(findPipelineEditorFileTree().exists()).toBe(false);
});
});
});
diff --git a/spec/initializers/validate_database_config_spec.rb b/spec/initializers/validate_database_config_spec.rb
index 5f3f950a852..23a3d9a2950 100644
--- a/spec/initializers/validate_database_config_spec.rb
+++ b/spec/initializers/validate_database_config_spec.rb
@@ -14,9 +14,6 @@ RSpec.describe 'validate database config' do
end
before do
- allow(File).to receive(:exist?).and_call_original
- allow(File).to receive(:exist?).with(Rails.root.join("config/database_geo.yml")).and_return(false)
-
# The `AS::ConfigurationFile` calls `read` in `def initialize`
# thus we cannot use `expect_next_instance_of`
# rubocop:disable RSpec/AnyInstanceOf
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
index f147e8204e6..97459d4a7be 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
@@ -311,6 +311,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
let(:table_name) { :_test_batched_migrations_test_table }
let(:column_name) { :some_id }
let(:job_arguments) { [:some_id, :some_id_convert_to_bigint] }
+ let(:gitlab_schemas) { Gitlab::Database.gitlab_schemas_for_connection(connection) }
let(:migration_status) { :active }
@@ -358,7 +359,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
it 'completes the migration' do
expect(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:find_for_configuration)
- .with('CopyColumnUsingBackgroundMigrationJob', table_name, column_name, job_arguments)
+ .with(gitlab_schemas, 'CopyColumnUsingBackgroundMigrationJob', table_name, column_name, job_arguments)
.and_return(batched_migration)
expect(batched_migration).to receive(:finalize!).and_call_original
@@ -399,7 +400,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
it 'is a no-op' do
expect(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:find_for_configuration)
- .with('CopyColumnUsingBackgroundMigrationJob', table_name, column_name, job_arguments)
+ .with(gitlab_schemas, 'CopyColumnUsingBackgroundMigrationJob', table_name, column_name, job_arguments)
.and_return(batched_migration)
configuration = {
@@ -426,7 +427,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
context 'when the migration does not exist' do
it 'is a no-op' do
expect(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:find_for_configuration)
- .with('CopyColumnUsingBackgroundMigrationJob', table_name, column_name, [:some, :other, :arguments])
+ .with(gitlab_schemas, 'CopyColumnUsingBackgroundMigrationJob', table_name, column_name, [:some, :other, :arguments])
.and_return(nil)
configuration = {
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
index a1c979bba50..8819171cfd0 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
@@ -78,23 +78,41 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
describe '.active_migration' do
+ let(:connection) { Gitlab::Database.database_base_models[:main].connection }
let!(:migration1) { create(:batched_background_migration, :finished) }
- context 'without migrations on hold' do
+ subject(:active_migration) { described_class.active_migration(connection: connection) }
+
+ around do |example|
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ example.run
+ end
+ end
+
+ context 'when there are no migrations on hold' do
let!(:migration2) { create(:batched_background_migration, :active) }
let!(:migration3) { create(:batched_background_migration, :active) }
it 'returns the first active migration according to queue order' do
- expect(described_class.active_migration).to eq(migration2)
+ expect(active_migration).to eq(migration2)
end
end
- context 'with migrations are on hold' do
+ context 'when there are migrations on hold' do
let!(:migration2) { create(:batched_background_migration, :active, on_hold_until: 10.minutes.from_now) }
let!(:migration3) { create(:batched_background_migration, :active, on_hold_until: 2.minutes.ago) }
it 'returns the first active migration that is not on hold according to queue order' do
- expect(described_class.active_migration).to eq(migration3)
+ expect(active_migration).to eq(migration3)
+ end
+ end
+
+ context 'when there are migrations not available for the current connection' do
+ let!(:migration2) { create(:batched_background_migration, :active, gitlab_schema: :gitlab_not_existing) }
+ let!(:migration3) { create(:batched_background_migration, :active, gitlab_schema: :gitlab_main) }
+
+ it 'returns the first active migration that is available for the current connection' do
+ expect(active_migration).to eq(migration3)
end
end
end
@@ -553,25 +571,43 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
describe '.for_configuration' do
- let!(:migration) do
- create(
- :batched_background_migration,
+ let!(:attributes) do
+ {
job_class_name: 'MyJobClass',
table_name: :projects,
column_name: :id,
- job_arguments: [[:id], [:id_convert_to_bigint]]
- )
+ job_arguments: [[:id], [:id_convert_to_bigint]],
+ gitlab_schema: :gitlab_main
+ }
end
+ let!(:migration) { create(:batched_background_migration, attributes) }
+
before do
- create(:batched_background_migration, job_class_name: 'OtherClass')
- create(:batched_background_migration, table_name: 'other_table')
- create(:batched_background_migration, column_name: 'other_column')
- create(:batched_background_migration, job_arguments: %w[other arguments])
+ create(:batched_background_migration, attributes.merge(job_class_name: 'OtherClass'))
+ create(:batched_background_migration, attributes.merge(table_name: 'other_table'))
+ create(:batched_background_migration, attributes.merge(column_name: 'other_column'))
+ create(:batched_background_migration, attributes.merge(job_arguments: %w[other arguments]))
end
it 'finds the migration matching the given configuration parameters' do
- actual = described_class.for_configuration('MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])
+ actual = described_class.for_configuration(:gitlab_main, 'MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])
+
+ expect(actual).to contain_exactly(migration)
+ end
+
+ it 'filters by gitlab schemas available for the connection' do
+ actual = described_class.for_configuration(:gitlab_ci, 'MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])
+
+ expect(actual).to be_empty
+ end
+
+ it 'doesn not filter by gitlab schemas available for the connection if the column is nor present' do
+ skip_if_multiple_databases_not_setup
+
+ expect(described_class).to receive(:gitlab_schema_column_exists?).and_return(false)
+
+ actual = described_class.for_configuration(:gitlab_main, 'MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])
expect(actual).to contain_exactly(migration)
end
@@ -579,7 +615,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
describe '.find_for_configuration' do
it 'returns nill if such migration does not exists' do
- expect(described_class.find_for_configuration('MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])).to be_nil
+ expect(described_class.find_for_configuration(:gitlab_main, 'MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])).to be_nil
end
it 'returns the migration when it exists' do
@@ -588,10 +624,25 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
job_class_name: 'MyJobClass',
table_name: :projects,
column_name: :id,
- job_arguments: [[:id], [:id_convert_to_bigint]]
+ job_arguments: [[:id], [:id_convert_to_bigint]],
+ gitlab_schema: :gitlab_main
)
- expect(described_class.find_for_configuration('MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])).to eq(migration)
+ expect(described_class.find_for_configuration(:gitlab_main, 'MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])).to eq(migration)
+ end
+ end
+
+ describe '.for_gitlab_schema' do
+ let!(:migration) { create(:batched_background_migration, gitlab_schema: :gitlab_main) }
+
+ before do
+ create(:batched_background_migration, gitlab_schema: :gitlab_not_existing)
+ end
+
+ it 'finds the migrations matching the given gitlab schema' do
+ actual = described_class.for_gitlab_schema(:gitlab_main)
+
+ expect(actual).to contain_exactly(migration)
end
end
end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 04fe1fad10e..5d9963bf3ea 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -2242,10 +2242,16 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
}
end
+ let(:migration_attributes) do
+ configuration.merge(gitlab_schema: Gitlab::Database.gitlab_schemas_for_connection(model.connection).first)
+ end
+
subject(:ensure_batched_background_migration_is_finished) { model.ensure_batched_background_migration_is_finished(**configuration) }
it 'raises an error when migration exists and is not marked as finished' do
- create(:batched_background_migration, :active, configuration)
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!).twice
+
+ create(:batched_background_migration, :active, migration_attributes)
allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
allow(runner).to receive(:finalize).with(job_class_name, table, column_name, job_arguments).and_return(false)
@@ -2265,13 +2271,19 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
it 'does not raise error when migration exists and is marked as finished' do
- create(:batched_background_migration, :finished, configuration)
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
+
+ create(:batched_background_migration, :finished, migration_attributes)
expect { ensure_batched_background_migration_is_finished }
.not_to raise_error
end
it 'logs a warning when migration does not exist' do
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
+
+ create(:batched_background_migration, :active, migration_attributes.merge(gitlab_schema: :gitlab_something_else))
+
expect(Gitlab::AppLogger).to receive(:warn)
.with("Could not find batched background migration for the given configuration: #{configuration}")
@@ -2280,6 +2292,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
it 'finalizes the migration' do
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!).twice
+
migration = create(:batched_background_migration, :active, configuration)
allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
@@ -2291,6 +2305,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
context 'when the flag finalize is false' do
it 'does not finalize the migration' do
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
+
create(:batched_background_migration, :active, configuration)
allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
diff --git a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
index d1a66036149..87b6c65bf7e 100644
--- a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
before do
allow(Gitlab::Database::PgClass).to receive(:for_table).and_call_original
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
end
context 'when such migration already exists' do
@@ -27,7 +28,8 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
batch_class_name: 'MyBatchClass',
batch_size: 200,
sub_batch_size: 20,
- job_arguments: [[:id], [:id_convert_to_bigint]]
+ job_arguments: [[:id], [:id_convert_to_bigint]],
+ gitlab_schema: :gitlab_ci
)
expect do
@@ -41,7 +43,8 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
batch_max_value: 1000,
batch_class_name: 'MyBatchClass',
batch_size: 100,
- sub_batch_size: 10)
+ sub_batch_size: 10,
+ gitlab_schema: :gitlab_ci)
end.not_to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }
end
end
@@ -60,7 +63,8 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
batch_class_name: 'MyBatchClass',
batch_size: 100,
max_batch_size: 10000,
- sub_batch_size: 10)
+ sub_batch_size: 10,
+ gitlab_schema: :gitlab_ci)
end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to have_attributes(
@@ -76,7 +80,8 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
sub_batch_size: 10,
job_arguments: %w[],
status_name: :active,
- total_tuple_count: pgclass_info.cardinality_estimate)
+ total_tuple_count: pgclass_info.cardinality_estimate,
+ gitlab_schema: 'gitlab_ci')
end
context 'when the job interval is lower than the minimum' do
@@ -162,11 +167,29 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
end
end
end
+
+ context 'when gitlab_schema is not given' do
+ it 'fetches gitlab_schema from the migration context' do
+ expect(migration).to receive(:gitlab_schema_from_context).and_return(:gitlab_ci)
+
+ expect do
+ migration.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
+
+ created_migration = Gitlab::Database::BackgroundMigration::BatchedMigration.last
+
+ expect(created_migration.gitlab_schema).to eq('gitlab_ci')
+ end
+ end
end
describe '#finalize_batched_background_migration' do
let!(:batched_migration) { create(:batched_background_migration, job_class_name: 'MyClass', table_name: :projects, column_name: :id, job_arguments: []) }
+ before do
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
+ end
+
it 'finalizes the migration' do
allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
expect(runner).to receive(:finalize).with('MyClass', :projects, :id, [])
@@ -204,4 +227,78 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
end
end
end
+
+ describe '#delete_batched_background_migration' do
+ before do
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
+ end
+
+ context 'when migration exists' do
+ it 'deletes it' do
+ create(
+ :batched_background_migration,
+ job_class_name: 'MyJobClass',
+ table_name: :projects,
+ column_name: :id,
+ interval: 10.minutes,
+ min_value: 5,
+ max_value: 1005,
+ batch_class_name: 'MyBatchClass',
+ batch_size: 200,
+ sub_batch_size: 20,
+ job_arguments: [[:id], [:id_convert_to_bigint]]
+ )
+
+ expect do
+ migration.delete_batched_background_migration(
+ 'MyJobClass',
+ :projects,
+ :id,
+ [[:id], [:id_convert_to_bigint]])
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.from(1).to(0)
+ end
+ end
+
+ context 'when migration does not exist' do
+ it 'does nothing' do
+ create(
+ :batched_background_migration,
+ job_class_name: 'SomeOtherJobClass',
+ table_name: :projects,
+ column_name: :id,
+ interval: 10.minutes,
+ min_value: 5,
+ max_value: 1005,
+ batch_class_name: 'MyBatchClass',
+ batch_size: 200,
+ sub_batch_size: 20,
+ job_arguments: [[:id], [:id_convert_to_bigint]]
+ )
+
+ expect do
+ migration.delete_batched_background_migration(
+ 'MyJobClass',
+ :projects,
+ :id,
+ [[:id], [:id_convert_to_bigint]])
+ end.not_to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }
+ end
+ end
+ end
+
+ describe '#gitlab_schema_from_context' do
+ context 'when allowed_gitlab_schemas is not available' do
+ it 'defaults to :gitlab_main' do
+ expect(migration.gitlab_schema_from_context).to eq(:gitlab_main)
+ end
+ end
+
+ context 'when allowed_gitlab_schemas is available' do
+ it 'uses schema from allowed_gitlab_schema' do
+ expect(migration).to receive(:allowed_gitlab_schemas).and_return([:gitlab_ci])
+
+ expect(migration.gitlab_schema_from_context).to eq(:gitlab_ci)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/patch/database_config_spec.rb b/spec/lib/gitlab/patch/database_config_spec.rb
index 73dc84bb2ef..b06d28dbcd5 100644
--- a/spec/lib/gitlab/patch/database_config_spec.rb
+++ b/spec/lib/gitlab/patch/database_config_spec.rb
@@ -11,9 +11,6 @@ RSpec.describe Gitlab::Patch::DatabaseConfig do
let(:configuration) { Rails::Application::Configuration.new(Rails.root) }
before do
- allow(File).to receive(:exist?).and_call_original
- allow(File).to receive(:exist?).with(Rails.root.join("config/database_geo.yml")).and_return(false)
-
# The `AS::ConfigurationFile` calls `read` in `def initialize`
# thus we cannot use `expect_next_instance_of`
# rubocop:disable RSpec/AnyInstanceOf
diff --git a/spec/lib/gitlab/subscription_portal_spec.rb b/spec/lib/gitlab/subscription_portal_spec.rb
index 8d5a39baf77..098a58bff83 100644
--- a/spec/lib/gitlab/subscription_portal_spec.rb
+++ b/spec/lib/gitlab/subscription_portal_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe ::Gitlab::SubscriptionPortal do
using RSpec::Parameterized::TableSyntax
+ include SubscriptionPortalHelper
let(:env_value) { nil }
@@ -13,9 +14,9 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
describe '.default_subscriptions_url' do
where(:test, :development, :result) do
- false | false | 'https://customers.gitlab.com'
- false | true | 'https://customers.staging.gitlab.com'
- true | false | 'https://customers.staging.gitlab.com'
+ false | false | prod_customers_url
+ false | true | staging_customers_url
+ true | false | staging_customers_url
end
before do
@@ -34,7 +35,7 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
subject { described_class.subscriptions_url }
context 'when CUSTOMER_PORTAL_URL ENV is unset' do
- it { is_expected.to eq('https://customers.staging.gitlab.com') }
+ it { is_expected.to eq(staging_customers_url) }
end
context 'when CUSTOMER_PORTAL_URL ENV is set' do
@@ -54,17 +55,17 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
context 'url methods' do
where(:method_name, :result) do
- :default_subscriptions_url | 'https://customers.staging.gitlab.com'
- :payment_form_url | 'https://customers.staging.gitlab.com/payment_forms/cc_validation'
- :payment_validation_form_id | 'payment_method_validation'
- :registration_validation_form_url | 'https://customers.staging.gitlab.com/payment_forms/cc_registration_validation'
- :subscriptions_graphql_url | 'https://customers.staging.gitlab.com/graphql'
- :subscriptions_more_minutes_url | 'https://customers.staging.gitlab.com/buy_pipeline_minutes'
- :subscriptions_more_storage_url | 'https://customers.staging.gitlab.com/buy_storage'
- :subscriptions_manage_url | 'https://customers.staging.gitlab.com/subscriptions'
- :subscriptions_instance_review_url | 'https://customers.staging.gitlab.com/instance_review'
- :subscriptions_gitlab_plans_url | 'https://customers.staging.gitlab.com/gitlab_plans'
- :edit_account_url | 'https://customers.staging.gitlab.com/customers/edit'
+ :default_subscriptions_url | staging_customers_url
+ :payment_form_url | "#{staging_customers_url}/payment_forms/cc_validation"
+ :payment_validation_form_id | 'payment_method_validation'
+ :registration_validation_form_url | "#{staging_customers_url}/payment_forms/cc_registration_validation"
+ :subscriptions_graphql_url | "#{staging_customers_url}/graphql"
+ :subscriptions_more_minutes_url | "#{staging_customers_url}/buy_pipeline_minutes"
+ :subscriptions_more_storage_url | "#{staging_customers_url}/buy_storage"
+ :subscriptions_manage_url | "#{staging_customers_url}/subscriptions"
+ :subscriptions_instance_review_url | "#{staging_customers_url}/instance_review"
+ :subscriptions_gitlab_plans_url | "#{staging_customers_url}/gitlab_plans"
+ :edit_account_url | "#{staging_customers_url}/customers/edit"
end
with_them do
@@ -79,7 +80,10 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
let(:group_id) { 153 }
- it { is_expected.to eq("https://customers.staging.gitlab.com/gitlab/namespaces/#{group_id}/extra_seats") }
+ it do
+ url = "#{staging_customers_url}/gitlab/namespaces/#{group_id}/extra_seats"
+ is_expected.to eq(url)
+ end
end
describe '.upgrade_subscription_url' do
@@ -88,7 +92,10 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
let(:group_id) { 153 }
let(:plan_id) { 5 }
- it { is_expected.to eq("https://customers.staging.gitlab.com/gitlab/namespaces/#{group_id}/upgrade/#{plan_id}") }
+ it do
+ url = "#{staging_customers_url}/gitlab/namespaces/#{group_id}/upgrade/#{plan_id}"
+ is_expected.to eq(url)
+ end
end
describe '.renew_subscription_url' do
@@ -96,6 +103,9 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
let(:group_id) { 153 }
- it { is_expected.to eq("https://customers.staging.gitlab.com/gitlab/namespaces/#{group_id}/renew") }
+ it do
+ url = "#{staging_customers_url}/gitlab/namespaces/#{group_id}/renew"
+ is_expected.to eq(url)
+ end
end
end
diff --git a/spec/migrations/20220503035221_add_gitlab_schema_to_batched_background_migrations_spec.rb b/spec/migrations/20220503035221_add_gitlab_schema_to_batched_background_migrations_spec.rb
new file mode 100644
index 00000000000..5002c665c79
--- /dev/null
+++ b/spec/migrations/20220503035221_add_gitlab_schema_to_batched_background_migrations_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe AddGitlabSchemaToBatchedBackgroundMigrations do
+ it 'sets gitlab_schema for existing methods to "gitlab_main" and default to NULL' do
+ batched_migrations = table(:batched_background_migrations)
+ batched_migration = batched_migrations.create!(
+ id: 1, created_at: Time.now, updated_at: Time.now,
+ max_value: 100, batch_size: 100, sub_batch_size: 10, interval: 120,
+ job_class_name: 'TestJob', table_name: '_test', column_name: 'id'
+ )
+
+ reversible_migration do |migration|
+ migration.before -> {
+ batched_migrations.reset_column_information
+ column = batched_migrations.columns.find { |column| column.name == 'gitlab_schema' }
+
+ expect(column).to be_nil
+ }
+
+ migration.after -> {
+ expect(batched_migration.reload.gitlab_schema).to eq('gitlab_main')
+
+ batched_migrations.reset_column_information
+ column = batched_migrations.columns.find { |column| column.name == 'gitlab_schema' }
+
+ expect(column).to be
+ expect(column.default).to be_nil
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20220505044348_fix_automatic_iterations_cadences_start_date_spec.rb b/spec/migrations/20220505044348_fix_automatic_iterations_cadences_start_date_spec.rb
index 8bc336a6b26..575157f8331 100644
--- a/spec/migrations/20220505044348_fix_automatic_iterations_cadences_start_date_spec.rb
+++ b/spec/migrations/20220505044348_fix_automatic_iterations_cadences_start_date_spec.rb
@@ -4,8 +4,7 @@ require 'spec_helper'
require_migration!
-RSpec.describe FixAutomaticIterationsCadencesStartDate,
- quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/362446' do
+RSpec.describe FixAutomaticIterationsCadencesStartDate do
let(:migration) { described_class.new }
let(:namespaces) { table(:namespaces) }
let(:sprints) { table(:sprints) }
diff --git a/spec/migrations/20220512190659_remove_web_hooks_web_hook_logs_web_hook_id_fk_spec.rb b/spec/migrations/20220512190659_remove_web_hooks_web_hook_logs_web_hook_id_fk_spec.rb
new file mode 100644
index 00000000000..fa94a73582d
--- /dev/null
+++ b/spec/migrations/20220512190659_remove_web_hooks_web_hook_logs_web_hook_id_fk_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe RemoveWebHooksWebHookLogsWebHookIdFk do
+ let(:web_hooks) { table(:web_hooks) }
+ let(:logs) { table(:web_hook_logs) }
+
+ let!(:hook) { web_hooks.create! }
+
+ let!(:log_a) { logs.create!(web_hook_id: hook.id, response_body: 'msg-a') }
+ let!(:log_b) { logs.create!(web_hook_id: hook.id, response_body: 'msg-b') }
+
+ describe '#up' do
+ it 'allows us to delete web-hooks and leave web-hook logs intact' do
+ migrate!
+
+ expect { hook.delete }.not_to change(logs, :count)
+
+ expect(logs.pluck(:response_body)).to match_array %w[msg-a msg-b]
+ end
+ end
+
+ describe '#down' do
+ it 'ensures referential integrity of hook logs' do
+ migrate!
+ schema_migrate_down!
+
+ expect { hook.delete }.to change(logs, :count).by(-2)
+ end
+ end
+end
diff --git a/spec/migrations/remove_not_null_contraint_on_title_from_sprints_spec.rb b/spec/migrations/remove_not_null_contraint_on_title_from_sprints_spec.rb
index fdafc4a5a89..198644fe183 100644
--- a/spec/migrations/remove_not_null_contraint_on_title_from_sprints_spec.rb
+++ b/spec/migrations/remove_not_null_contraint_on_title_from_sprints_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require_migration!
-RSpec.describe RemoveNotNullContraintOnTitleFromSprints, :migration, schema: 20220304052335 do
+RSpec.describe RemoveNotNullContraintOnTitleFromSprints, :migration do
let(:migration) { described_class.new }
let(:namespaces) { table(:namespaces) }
let(:sprints) { table(:sprints) }
diff --git a/spec/models/hooks/web_hook_spec.rb b/spec/models/hooks/web_hook_spec.rb
index dd954e08156..290cf500852 100644
--- a/spec/models/hooks/web_hook_spec.rb
+++ b/spec/models/hooks/web_hook_spec.rb
@@ -130,11 +130,11 @@ RSpec.describe WebHook do
end
describe '#destroy' do
- it 'cascades to web_hook_logs' do
+ it 'does not cascade to web_hook_logs' do
web_hook = create(:project_hook)
create_list(:web_hook_log, 3, web_hook: web_hook)
- expect { web_hook.destroy! }.to change(web_hook.web_hook_logs, :count).by(-3)
+ expect { web_hook.destroy! }.not_to change(web_hook.web_hook_logs, :count)
end
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index d40c78b5b60..32639b3e7d2 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -5077,4 +5077,57 @@ RSpec.describe MergeRequest, factory_default: :keep do
expect(assignees).to match_array([subject.merge_request_assignees[0]])
end
end
+
+ describe '#recent_diff_head_shas' do
+ let_it_be(:merge_request_with_diffs) do
+ params = {
+ target_project: project,
+ source_project: project,
+ target_branch: 'master',
+ source_branch: 'feature'
+ }
+
+ create(:merge_request, params).tap do |mr|
+ 4.times { mr.merge_request_diffs.create! }
+ mr.create_merge_head_diff
+ end
+ end
+
+ let(:shas) do
+ # re-find to avoid caching the association
+ described_class.find(merge_request_with_diffs.id).merge_request_diffs.order(id: :desc).pluck(:head_commit_sha)
+ end
+
+ shared_examples 'correctly sorted and limited diff_head_shas' do
+ it 'has up to MAX_RECENT_DIFF_HEAD_SHAS, ordered most recent first' do
+ stub_const('MergeRequest::MAX_RECENT_DIFF_HEAD_SHAS', 3)
+
+ expect(subject.recent_diff_head_shas).to eq(shas.first(3))
+ end
+
+ it 'supports limits' do
+ expect(subject.recent_diff_head_shas(2)).to eq(shas.first(2))
+ end
+ end
+
+ context 'when the association is not loaded' do
+ subject(:mr) { merge_request_with_diffs }
+
+ include_examples 'correctly sorted and limited diff_head_shas'
+ end
+
+ context 'when the association is loaded' do
+ subject(:mr) do
+ described_class.where(id: merge_request_with_diffs.id).preload(:merge_request_diffs).first
+ end
+
+ include_examples 'correctly sorted and limited diff_head_shas'
+
+ it 'does not issue any queries' do
+ expect(subject).to be_a(described_class) # preload here
+
+ expect { subject.recent_diff_head_shas }.not_to exceed_query_limit(0)
+ end
+ end
+ end
end
diff --git a/spec/requests/api/graphql/project/merge_request/pipelines_spec.rb b/spec/requests/api/graphql/project/merge_request/pipelines_spec.rb
index 820a5d818c7..4dc272b5c2e 100644
--- a/spec/requests/api/graphql/project/merge_request/pipelines_spec.rb
+++ b/spec/requests/api/graphql/project/merge_request/pipelines_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'Query.project.mergeRequests.pipelines' do
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:author) { create(:user) }
+ let_it_be(:mr_nodes_path) { [:data, :project, :merge_requests, :nodes] }
let_it_be(:merge_requests) do
[
create(:merge_request, author: author, source_project: project),
@@ -33,8 +34,49 @@ RSpec.describe 'Query.project.mergeRequests.pipelines' do
GQL
end
- def run_query(first = nil)
- post_graphql(query, current_user: author, variables: { path: project.full_path, first: first })
+ before do
+ merge_requests.first(2).each do |mr|
+ shas = mr.recent_diff_head_shas
+
+ shas.each do |sha|
+ create(:ci_pipeline, :success, project: project, ref: mr.source_branch, sha: sha)
+ end
+ end
+ end
+
+ it 'produces correct results' do
+ r = run_query(3)
+
+ nodes = graphql_dig_at(r, *mr_nodes_path)
+
+ expect(nodes).to all(match('iid' => be_present, 'pipelines' => include('count' => be_a(Integer))))
+ expect(graphql_dig_at(r, *mr_nodes_path, :pipelines, :count)).to contain_exactly(1, 1, 0)
+ end
+
+ it 'is scalable', :request_store, :use_clean_rails_memory_store_caching do
+ baseline = ActiveRecord::QueryRecorder.new { run_query(1) }
+
+ expect { run_query(2) }.not_to exceed_query_limit(baseline)
+ end
+ end
+
+ describe '.nodes' do
+ let(:query) do
+ <<~GQL
+ query($path: ID!, $first: Int) {
+ project(fullPath: $path) {
+ mergeRequests(first: $first) {
+ nodes {
+ iid
+ pipelines {
+ count
+ nodes { id }
+ }
+ }
+ }
+ }
+ }
+ GQL
end
before do
@@ -48,18 +90,27 @@ RSpec.describe 'Query.project.mergeRequests.pipelines' do
end
it 'produces correct results' do
- run_query(2)
-
- p_nodes = graphql_data_at(:project, :merge_requests, :nodes)
+ r = run_query
- expect(p_nodes).to all(match('iid' => be_present, 'pipelines' => match('count' => 1)))
+ expect(graphql_dig_at(r, *mr_nodes_path, :pipelines, :nodes, :id).uniq.size).to eq 3
end
it 'is scalable', :request_store, :use_clean_rails_memory_store_caching do
- # warm up
- run_query
+ baseline = ActiveRecord::QueryRecorder.new { run_query(1) }
- expect { run_query(2) }.to(issue_same_number_of_queries_as { run_query(1) }.ignoring_cached_queries)
+ expect { run_query(2) }.not_to exceed_query_limit(baseline)
end
+
+ it 'requests merge_request_diffs at most once' do
+ r = ActiveRecord::QueryRecorder.new { run_query(2) }
+
+ expect(r.log.grep(/merge_request_diffs/)).to contain_exactly(a_string_including('SELECT'))
+ end
+ end
+
+ def run_query(first = nil)
+ run_with_clean_state(query,
+ context: { current_user: author },
+ variables: { path: project.full_path, first: first })
end
end
diff --git a/spec/requests/api/usage_data_spec.rb b/spec/requests/api/usage_data_spec.rb
index 84f16446bc4..ea50c404d92 100644
--- a/spec/requests/api/usage_data_spec.rb
+++ b/spec/requests/api/usage_data_spec.rb
@@ -7,9 +7,7 @@ RSpec.describe API::UsageData do
describe 'POST /usage_data/increment_counter' do
let(:endpoint) { '/usage_data/increment_counter' }
- let(:known_event) { "#{known_event_prefix}_#{known_event_postfix}" }
- let(:known_event_prefix) { "static_site_editor" }
- let(:known_event_postfix) { 'commits' }
+ let(:known_event) { "diff_searches" }
let(:unknown_event) { 'unknown' }
context 'without CSRF token' do
@@ -57,27 +55,18 @@ RSpec.describe API::UsageData do
end
context 'with correct params' do
- using RSpec::Parameterized::TableSyntax
-
- where(:prefix, :event) do
- 'static_site_editor' | 'merge_requests'
- 'static_site_editor' | 'commits'
- end
-
before do
stub_application_setting(usage_ping_enabled: true)
stub_feature_flags(usage_data_api: true)
allow(Gitlab::RequestForgeryProtection).to receive(:verified?).and_return(true)
end
- with_them do
- it 'returns status :ok' do
- expect(Gitlab::UsageDataCounters::BaseCounter).to receive(:count).with(event)
+ it 'returns status :ok' do
+ expect(Gitlab::UsageDataCounters::BaseCounter).to receive(:count).with("searches")
- post api(endpoint, user), params: { event: "#{prefix}_#{event}" }
+ post api(endpoint, user), params: { event: known_event }
- expect(response).to have_gitlab_http_status(:ok)
- end
+ expect(response).to have_gitlab_http_status(:ok)
end
end
diff --git a/spec/requests/mailgun/webhooks_controller_spec.rb b/spec/requests/mailgun/webhooks_controller_spec.rb
new file mode 100644
index 00000000000..ae6dc89d003
--- /dev/null
+++ b/spec/requests/mailgun/webhooks_controller_spec.rb
@@ -0,0 +1,149 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mailgun::WebhooksController do
+ let(:mailgun_signing_key) { 'abc123' }
+ let(:valid_signature) do
+ {
+ timestamp: "1625056677",
+ token: "eb944d0ace7227667a1b97d2d07276ae51d2b849ed2cfa68f3",
+ signature: "9790cc6686eb70f0b1f869180d906870cdfd496d27fee81da0aa86b9e539e790"
+ }
+ end
+
+ let(:event_data) { {} }
+
+ before do
+ stub_application_setting(mailgun_events_enabled: true, mailgun_signing_key: mailgun_signing_key)
+ end
+
+ def post_request(override_params = {})
+ post mailgun_webhooks_path, params: standard_params.merge(override_params)
+ end
+
+ describe '#process_webhook' do
+ it 'returns 406 when integration is not enabled' do
+ stub_application_setting(mailgun_events_enabled: false)
+
+ post_request
+
+ expect(response).to have_gitlab_http_status(:not_acceptable)
+ end
+
+ it 'returns 404 when signing key is not configured' do
+ stub_application_setting(mailgun_signing_key: nil)
+
+ post_request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns 404 when signature invalid' do
+ post_request(
+ 'signature' => valid_signature.merge('signature' => 'xxx')
+ )
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns 200 when signature is valid' do
+ post_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'invite email failures' do
+ let_it_be(:member) { create(:project_member, :invited) }
+
+ let(:event_data) do
+ {
+ 'event': 'failed',
+ 'severity': 'permanent',
+ 'tags': [Members::Mailgun::INVITE_EMAIL_TAG],
+ 'user-variables': {
+ Members::Mailgun::INVITE_EMAIL_TOKEN_KEY => member.raw_invite_token
+ }
+ }
+ end
+
+ it 'marks the member invite email success as false' do
+ expect { post_request }.to change { member.reload.invite_email_success }.from(true).to(false)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'supports legacy URL' do
+ expect do
+ post members_mailgun_permanent_failures_path, params: {
+ 'signature' => valid_signature,
+ 'event-data' => event_data
+ }
+ end.to change { member.reload.invite_email_success }.from(true).to(false)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'does not change the invite status if failure is temporary' do
+ expect do
+ post_request({ 'event-data' => event_data.merge(severity: 'temporary') })
+ end.not_to change { member.reload.invite_email_success }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ def standard_params
+ {
+ "signature": valid_signature,
+ "event-data": {
+ "severity": "permanent",
+ "tags": ["invite_email"],
+ "timestamp": 1521233195.375624,
+ "storage": {
+ "url": "_anything_",
+ "key": "_anything_"
+ },
+ "log-level": "error",
+ "id": "_anything_",
+ "campaigns": [],
+ "reason": "suppress-bounce",
+ "user-variables": {
+ "invite_token": '12345'
+ },
+ "flags": {
+ "is-routed": false,
+ "is-authenticated": true,
+ "is-system-test": false,
+ "is-test-mode": false
+ },
+ "recipient-domain": "example.com",
+ "envelope": {
+ "sender": "bob@mg.gitlab.com",
+ "transport": "smtp",
+ "targets": "alice@example.com"
+ },
+ "message": {
+ "headers": {
+ "to": "Alice <alice@example.com>",
+ "message-id": "20130503192659.13651.20287@mg.gitlab.com",
+ "from": "Bob <bob@mg.gitlab.com>",
+ "subject": "Test permanent_fail webhook"
+ },
+ "attachments": [],
+ "size": 111
+ },
+ "recipient": "alice@example.com",
+ "event": "failed",
+ "delivery-status": {
+ "attempt-no": 1,
+ "message": "",
+ "code": 605,
+ "description": "Not delivering to previously bounced address",
+ "session-seconds": 0
+ }
+ }.merge(event_data)
+ }
+ end
+end
diff --git a/spec/requests/members/mailgun/permanent_failure_spec.rb b/spec/requests/members/mailgun/permanent_failure_spec.rb
deleted file mode 100644
index e47aedf8e94..00000000000
--- a/spec/requests/members/mailgun/permanent_failure_spec.rb
+++ /dev/null
@@ -1,128 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'receive a permanent failure' do
- describe 'POST /members/mailgun/permanent_failures', :aggregate_failures do
- let_it_be(:member) { create(:project_member, :invited) }
-
- let(:raw_invite_token) { member.raw_invite_token }
- let(:mailgun_events) { true }
- let(:mailgun_signing_key) { 'abc123' }
-
- subject(:post_request) { post members_mailgun_permanent_failures_path(standard_params) }
-
- before do
- stub_application_setting(mailgun_events_enabled: mailgun_events, mailgun_signing_key: mailgun_signing_key)
- end
-
- it 'marks the member invite email success as false' do
- expect { post_request }.to change { member.reload.invite_email_success }.from(true).to(false)
-
- expect(response).to have_gitlab_http_status(:ok)
- end
-
- context 'when the change to a member is not made' do
- context 'with incorrect signing key' do
- context 'with incorrect signing key' do
- let(:mailgun_signing_key) { '_foobar_' }
-
- it 'does not change member status and responds as not_found' do
- expect { post_request }.not_to change { member.reload.invite_email_success }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'with nil signing key' do
- let(:mailgun_signing_key) { nil }
-
- it 'does not change member status and responds as not_found' do
- expect { post_request }.not_to change { member.reload.invite_email_success }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-
- context 'when the feature is not enabled' do
- let(:mailgun_events) { false }
-
- it 'does not change member status and responds as expected' do
- expect { post_request }.not_to change { member.reload.invite_email_success }
-
- expect(response).to have_gitlab_http_status(:not_acceptable)
- end
- end
-
- context 'when it is not an invite email' do
- before do
- stub_const('::Members::Mailgun::INVITE_EMAIL_TAG', '_foobar_')
- end
-
- it 'does not change member status and responds as expected' do
- expect { post_request }.not_to change { member.reload.invite_email_success }
-
- expect(response).to have_gitlab_http_status(:not_acceptable)
- end
- end
- end
-
- def standard_params
- {
- "signature": {
- "timestamp": "1625056677",
- "token": "eb944d0ace7227667a1b97d2d07276ae51d2b849ed2cfa68f3",
- "signature": "9790cc6686eb70f0b1f869180d906870cdfd496d27fee81da0aa86b9e539e790"
- },
- "event-data": {
- "severity": "permanent",
- "tags": ["invite_email"],
- "timestamp": 1521233195.375624,
- "storage": {
- "url": "_anything_",
- "key": "_anything_"
- },
- "log-level": "error",
- "id": "_anything_",
- "campaigns": [],
- "reason": "suppress-bounce",
- "user-variables": {
- "invite_token": raw_invite_token
- },
- "flags": {
- "is-routed": false,
- "is-authenticated": true,
- "is-system-test": false,
- "is-test-mode": false
- },
- "recipient-domain": "example.com",
- "envelope": {
- "sender": "bob@mg.gitlab.com",
- "transport": "smtp",
- "targets": "alice@example.com"
- },
- "message": {
- "headers": {
- "to": "Alice <alice@example.com>",
- "message-id": "20130503192659.13651.20287@mg.gitlab.com",
- "from": "Bob <bob@mg.gitlab.com>",
- "subject": "Test permanent_fail webhook"
- },
- "attachments": [],
- "size": 111
- },
- "recipient": "alice@example.com",
- "event": "failed",
- "delivery-status": {
- "attempt-no": 1,
- "message": "",
- "code": 605,
- "description": "Not delivering to previously bounced address",
- "session-seconds": 0
- }
- }
- }
- end
- end
-end
diff --git a/spec/rubocop/formatter/todo_formatter_spec.rb b/spec/rubocop/formatter/todo_formatter_spec.rb
index e1b1de33bfe..fcff028f07d 100644
--- a/spec/rubocop/formatter/todo_formatter_spec.rb
+++ b/spec/rubocop/formatter/todo_formatter_spec.rb
@@ -14,17 +14,18 @@ RSpec.describe RuboCop::Formatter::TodoFormatter do
let(:real_tmp_dir) { File.join(tmp_dir, 'real') }
let(:symlink_tmp_dir) { File.join(tmp_dir, 'symlink') }
let(:rubocop_todo_dir) { "#{symlink_tmp_dir}/.rubocop_todo" }
- let(:options) { { rubocop_todo_dir: rubocop_todo_dir } }
let(:todo_dir) { RuboCop::TodoDir.new(rubocop_todo_dir) }
- subject(:formatter) { described_class.new(stdout, options) }
+ subject(:formatter) { described_class.new(stdout) }
around do |example|
FileUtils.mkdir(real_tmp_dir)
FileUtils.symlink(real_tmp_dir, symlink_tmp_dir)
Dir.chdir(symlink_tmp_dir) do
- example.run
+ described_class.with_base_directory(rubocop_todo_dir) do
+ example.run
+ end
end
end
@@ -38,8 +39,6 @@ RSpec.describe RuboCop::Formatter::TodoFormatter do
let(:offense_autocorrect) { fake_offense('B/AutoCorrect') }
before do
- stub_const("#{described_class}::MAX_OFFENSE_COUNT", 1)
-
stub_rubocop_registry(
'A/Offense' => { autocorrectable: false },
'B/AutoCorrect' => { autocorrectable: true }
diff --git a/spec/rubocop/todo_dir_spec.rb b/spec/rubocop/todo_dir_spec.rb
index ae59def885d..a5c12e23896 100644
--- a/spec/rubocop/todo_dir_spec.rb
+++ b/spec/rubocop/todo_dir_spec.rb
@@ -42,12 +42,6 @@ RSpec.describe RuboCop::TodoDir do
end
end
- describe '#directory' do
- subject { todo_dir.directory }
-
- it { is_expected.to eq(directory) }
- end
-
describe '#read' do
let(:content) { 'a' }
diff --git a/spec/scripts/lib/glfm/update_specification_spec.rb b/spec/scripts/lib/glfm/update_specification_spec.rb
index 941883cd1e0..e8d34b13efa 100644
--- a/spec/scripts/lib/glfm/update_specification_spec.rb
+++ b/spec/scripts/lib/glfm/update_specification_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Glfm::UpdateSpecification, '#process' do
let(:ghfm_spec_txt_uri) { described_class::GHFM_SPEC_TXT_URI }
let(:ghfm_spec_txt_uri_io) { StringIO.new(ghfm_spec_txt_contents) }
let(:ghfm_spec_txt_path) { described_class::GHFM_SPEC_TXT_PATH }
- let(:ghfm_spec_txt_local_io) { StringIO.new }
+ let(:ghfm_spec_txt_local_io) { StringIO.new(ghfm_spec_txt_contents) }
let(:glfm_intro_txt_path) { described_class::GLFM_INTRO_TXT_PATH }
let(:glfm_intro_txt_io) { StringIO.new(glfm_intro_txt_contents) }
@@ -71,11 +71,15 @@ RSpec.describe Glfm::UpdateSpecification, '#process' do
end
before do
+ # Mock default ENV var values
+ allow(ENV).to receive(:[]).with('UPDATE_GHFM_SPEC_TXT').and_return(nil)
+ allow(ENV).to receive(:[]).and_call_original
+
# We mock out the URI and local file IO objects with real StringIO, instead of just mock
# objects. This gives better and more realistic coverage, while still avoiding
# actual network and filesystem I/O during the spec run.
allow(URI).to receive(:open).with(ghfm_spec_txt_uri) { ghfm_spec_txt_uri_io }
- allow(File).to receive(:open).with(ghfm_spec_txt_path, 'w') { ghfm_spec_txt_local_io }
+ allow(File).to receive(:open).with(ghfm_spec_txt_path) { ghfm_spec_txt_local_io }
allow(File).to receive(:open).with(glfm_intro_txt_path) { glfm_intro_txt_io }
allow(File).to receive(:open).with(glfm_examples_txt_path) { glfm_examples_txt_io }
allow(File).to receive(:open).with(glfm_spec_txt_path, 'w') { glfm_spec_txt_io }
@@ -85,45 +89,64 @@ RSpec.describe Glfm::UpdateSpecification, '#process' do
end
describe 'retrieving latest GHFM spec.txt' do
- context 'with success' do
- it 'downloads and saves' do
+ context 'when UPDATE_GHFM_SPEC_TXT is not true (default)' do
+ it 'does not download' do
+ expect(URI).not_to receive(:open).with(ghfm_spec_txt_uri)
+
subject.process
expect(reread_io(ghfm_spec_txt_local_io)).to eq(ghfm_spec_txt_contents)
end
end
- context 'with error handling' do
- context 'with a version mismatch' do
- let(:ghfm_spec_txt_contents) do
- <<~GHFM_SPEC_TXT_CONTENTS
- ---
- title: GitHub Flavored Markdown Spec
- version: 0.30
- ...
- GHFM_SPEC_TXT_CONTENTS
- end
+ context 'when UPDATE_GHFM_SPEC_TXT is true' do
+ let(:ghfm_spec_txt_local_io) { StringIO.new }
- it 'raises an error' do
- expect { subject.process }.to raise_error /version mismatch.*expected.*29.*got.*30/i
- end
+ before do
+ allow(ENV).to receive(:[]).with('UPDATE_GHFM_SPEC_TXT').and_return('true')
+ allow(File).to receive(:open).with(ghfm_spec_txt_path, 'w') { ghfm_spec_txt_local_io }
end
- context 'with a failed read of file lines' do
- let(:ghfm_spec_txt_contents) { '' }
+ context 'with success' do
+ it 'downloads and saves' do
+ subject.process
- it 'raises an error if lines cannot be read' do
- expect { subject.process }.to raise_error /unable to read lines/i
+ expect(reread_io(ghfm_spec_txt_local_io)).to eq(ghfm_spec_txt_contents)
end
end
- context 'with a failed re-read of file string' do
- before do
- allow(ghfm_spec_txt_uri_io).to receive(:read).and_return(nil)
+ context 'with error handling' do
+ context 'with a version mismatch' do
+ let(:ghfm_spec_txt_contents) do
+ <<~GHFM_SPEC_TXT_CONTENTS
+ ---
+ title: GitHub Flavored Markdown Spec
+ version: 0.30
+ ...
+ GHFM_SPEC_TXT_CONTENTS
+ end
+
+ it 'raises an error' do
+ expect { subject.process }.to raise_error /version mismatch.*expected.*29.*got.*30/i
+ end
end
- it 'raises an error if file is blank' do
- expect { subject.process }.to raise_error /unable to read string/i
+ context 'with a failed read of file lines' do
+ let(:ghfm_spec_txt_contents) { '' }
+
+ it 'raises an error if lines cannot be read' do
+ expect { subject.process }.to raise_error /unable to read lines/i
+ end
+ end
+
+ context 'with a failed re-read of file string' do
+ before do
+ allow(ghfm_spec_txt_uri_io).to receive(:read).and_return(nil)
+ end
+
+ it 'raises an error if file is blank' do
+ expect { subject.process }.to raise_error /unable to read string/i
+ end
end
end
end
diff --git a/spec/services/members/mailgun/process_webhook_service_spec.rb b/spec/services/members/mailgun/process_webhook_service_spec.rb
index d6a21183395..3b657c05bd8 100644
--- a/spec/services/members/mailgun/process_webhook_service_spec.rb
+++ b/spec/services/members/mailgun/process_webhook_service_spec.rb
@@ -39,4 +39,34 @@ RSpec.describe Members::Mailgun::ProcessWebhookService do
end
end
end
+
+ describe '#should_process?' do
+ it 'processes permanent failures for member invite emails' do
+ payload = { 'event' => 'failed', 'severity' => 'permanent', 'tags' => [Members::Mailgun::INVITE_EMAIL_TAG] }
+ service = described_class.new(payload)
+
+ expect(service.should_process?).to eq(true)
+ end
+
+ it 'does not process temporary failures' do
+ payload = { 'event' => 'failed', 'severity' => 'temporary', 'tags' => [Members::Mailgun::INVITE_EMAIL_TAG] }
+ service = described_class.new(payload)
+
+ expect(service.should_process?).to eq(false)
+ end
+
+ it 'does not process non member invite emails' do
+ payload = { 'event' => 'failed', 'severity' => 'permanent', 'tags' => [] }
+ service = described_class.new(payload)
+
+ expect(service.should_process?).to eq(false)
+ end
+
+ it 'does not process other types of events' do
+ payload = { 'event' => 'delivered', 'tags' => [Members::Mailgun::INVITE_EMAIL_TAG] }
+ service = described_class.new(payload)
+
+ expect(service.should_process?).to eq(false)
+ end
+ end
end
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index 6407b8d3940..777162b6196 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe Projects::UpdatePagesService do
let(:file) { fixture_file_upload("spec/fixtures/pages.zip") }
let(:empty_file) { fixture_file_upload("spec/fixtures/pages_empty.zip") }
+ let(:empty_metadata_filename) { "spec/fixtures/pages_empty.zip.meta" }
let(:metadata_filename) { "spec/fixtures/pages.zip.meta" }
let(:metadata) { fixture_file_upload(metadata_filename) if File.exist?(metadata_filename) }
@@ -91,6 +92,17 @@ RSpec.describe Projects::UpdatePagesService do
end
end
+ context 'when archive does not have pages directory' do
+ let(:file) { empty_file }
+ let(:metadata_filename) { empty_metadata_filename }
+
+ it 'returns an error' do
+ expect(execute).not_to eq(:success)
+
+ expect(GenericCommitStatus.last.description).to eq("Error: The `public/` folder is missing, or not declared in `.gitlab-ci.yml`.")
+ end
+ end
+
it 'limits pages size' do
stub_application_setting(max_pages_size: 1)
expect(execute).not_to eq(:success)
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index db8d45f61ea..7aaf11498cf 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -699,13 +699,13 @@ module GraphqlHelpers
end
# assumes query_string and user to be let-bound in the current context
- def execute_query(query_type, schema: empty_schema, graphql: query_string, raise_on_error: false)
+ def execute_query(query_type = Types::QueryType, schema: empty_schema, graphql: query_string, raise_on_error: false, variables: {})
schema.query(query_type)
r = schema.execute(
graphql,
context: { current_user: user },
- variables: {}
+ variables: variables
)
if raise_on_error && r.to_h['errors'].present?
diff --git a/spec/support/helpers/subscription_portal_helper.rb b/spec/support/helpers/subscription_portal_helper.rb
new file mode 100644
index 00000000000..53e8f78371d
--- /dev/null
+++ b/spec/support/helpers/subscription_portal_helper.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module SubscriptionPortalHelper
+ def staging_customers_url
+ 'https://customers.staging.gitlab.com'
+ end
+
+ def prod_customers_url
+ 'https://customers.gitlab.com'
+ end
+end
+
+SubscriptionPortalHelper.prepend_mod
diff --git a/spec/support/matchers/background_migrations_matchers.rb b/spec/support/matchers/background_migrations_matchers.rb
index b471323dd72..c5b3e140585 100644
--- a/spec/support/matchers/background_migrations_matchers.rb
+++ b/spec/support/matchers/background_migrations_matchers.rb
@@ -65,11 +65,13 @@ RSpec::Matchers.define :be_scheduled_migration_with_multiple_args do |*expected|
end
end
-RSpec::Matchers.define :have_scheduled_batched_migration do |table_name: nil, column_name: nil, job_arguments: [], **attributes|
+RSpec::Matchers.define :have_scheduled_batched_migration do |gitlab_schema: :gitlab_main, table_name: nil, column_name: nil, job_arguments: [], **attributes|
define_method :matches? do |migration|
+ reset_column_information(Gitlab::Database::BackgroundMigration::BatchedMigration)
+
batched_migrations =
Gitlab::Database::BackgroundMigration::BatchedMigration
- .for_configuration(migration, table_name, column_name, job_arguments)
+ .for_configuration(gitlab_schema, migration, table_name, column_name, job_arguments)
expect(batched_migrations.count).to be(1)
expect(batched_migrations).to all(have_attributes(attributes)) if attributes.present?
diff --git a/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb b/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
index 26731f34ed6..ee0c2dbfff4 100644
--- a/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
+++ b/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
@@ -98,10 +98,26 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
end
context 'when the feature flag is enabled' do
+ let(:base_model) { Gitlab::Database.database_base_models[tracking_database] }
+
before do
stub_feature_flags(feature_flag => true)
- allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration).and_return(nil)
+ allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration)
+ .with(connection: base_model.connection)
+ .and_return(nil)
+ end
+
+ context 'when database config is shared' do
+ it 'does nothing' do
+ expect(Gitlab::Database).to receive(:db_config_share_with)
+ .with(base_model.connection_db_config).and_return('main')
+
+ expect(worker).not_to receive(:active_migration)
+ expect(worker).not_to receive(:run_active_migration)
+
+ worker.perform
+ end
end
context 'when no active migrations exist' do
@@ -121,6 +137,7 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
before do
allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration)
+ .with(connection: base_model.connection)
.and_return(migration)
allow(migration).to receive(:interval_elapsed?).with(variance: interval_variance).and_return(true)
@@ -205,4 +222,123 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
end
end
end
+
+ describe 'executing an entire migration', :freeze_time, if: Gitlab::Database.has_config?(tracking_database) do
+ include Gitlab::Database::DynamicModelHelpers
+
+ let(:migration_class) do
+ Class.new(Gitlab::BackgroundMigration::BatchedMigrationJob) do
+ def perform(matching_status)
+ each_sub_batch(
+ operation_name: :update_all,
+ batching_scope: -> (relation) { relation.where(status: matching_status) }
+ ) do |sub_batch|
+ sub_batch.update_all(some_column: 0)
+ end
+ end
+ end
+ end
+
+ let!(:migration) do
+ create(
+ :batched_background_migration,
+ :active,
+ table_name: table_name,
+ column_name: :id,
+ max_value: migration_records,
+ batch_size: batch_size,
+ sub_batch_size: sub_batch_size,
+ job_class_name: 'ExampleDataMigration',
+ job_arguments: [1]
+ )
+ end
+
+ let(:table_name) { 'example_data' }
+ let(:batch_size) { 5 }
+ let(:sub_batch_size) { 2 }
+ let(:number_of_batches) { 10 }
+ let(:migration_records) { batch_size * number_of_batches }
+
+ let(:connection) { Gitlab::Database.database_base_models[tracking_database].connection }
+ let(:example_data) { define_batchable_model(table_name, connection: connection) }
+
+ around do |example|
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ example.run
+ end
+ end
+
+ before do
+ # Create example table populated with test data to migrate.
+ #
+ # Test data should have two records that won't be updated:
+ # - one record beyond the migration's range
+ # - one record that doesn't match the migration job's batch condition
+ connection.execute(<<~SQL)
+ CREATE TABLE #{table_name} (
+ id integer primary key,
+ some_column integer,
+ status smallint);
+
+ INSERT INTO #{table_name} (id, some_column, status)
+ SELECT generate_series, generate_series, 1
+ FROM generate_series(1, #{migration_records + 1});
+
+ UPDATE #{table_name}
+ SET status = 0
+ WHERE some_column = #{migration_records - 5};
+ SQL
+
+ stub_feature_flags(execute_batched_migrations_on_schedule: true)
+
+ stub_const('Gitlab::BackgroundMigration::ExampleDataMigration', migration_class)
+ end
+
+ subject(:full_migration_run) do
+ # process all batches, then do an extra execution to mark the job as finished
+ (number_of_batches + 1).times do
+ described_class.new.perform
+
+ travel_to((migration.interval + described_class::INTERVAL_VARIANCE).seconds.from_now)
+ end
+ end
+
+ it 'marks the migration record as finished' do
+ expect { full_migration_run }.to change { migration.reload.status }.from(1).to(3) # active -> finished
+ end
+
+ it 'creates job records for each processed batch', :aggregate_failures do
+ expect { full_migration_run }.to change { migration.reload.batched_jobs.count }.from(0)
+
+ final_min_value = migration.batched_jobs.reduce(1) do |next_min_value, batched_job|
+ expect(batched_job.min_value).to eq(next_min_value)
+
+ batched_job.max_value + 1
+ end
+
+ final_max_value = final_min_value - 1
+ expect(final_max_value).to eq(migration_records)
+ end
+
+ it 'marks all job records as succeeded', :aggregate_failures do
+ expect { full_migration_run }.to change { migration.reload.batched_jobs.count }.from(0)
+
+ expect(migration.batched_jobs).to all(be_succeeded)
+ end
+
+ it 'updates matching records in the range', :aggregate_failures do
+ expect { full_migration_run }
+ .to change { example_data.where('status = 1 AND some_column <> 0').count }
+ .from(migration_records).to(1)
+
+ record_outside_range = example_data.last
+
+ expect(record_outside_range.status).to eq(1)
+ expect(record_outside_range.some_column).not_to eq(0)
+ end
+
+ it 'does not update non-matching records in the range' do
+ expect { full_migration_run }.not_to change { example_data.where('status <> 1 AND some_column <> 0').count }
+ end
+ end
end
diff --git a/spec/tasks/rubocop_rake_spec.rb b/spec/tasks/rubocop_rake_spec.rb
index cf7e45aae28..a92d7dc2e52 100644
--- a/spec/tasks/rubocop_rake_spec.rb
+++ b/spec/tasks/rubocop_rake_spec.rb
@@ -8,6 +8,7 @@ require 'fileutils'
require_relative '../support/silence_stdout'
require_relative '../support/helpers/next_instance_of'
require_relative '../support/helpers/rake_helpers'
+require_relative '../../rubocop/formatter/todo_formatter'
require_relative '../../rubocop/todo_dir'
RSpec.describe 'rubocop rake tasks', :silence_stdout do
@@ -29,22 +30,22 @@ RSpec.describe 'rubocop rake tasks', :silence_stdout do
around do |example|
Dir.chdir(tmp_dir) do
- with_inflections do
- example.run
+ ::RuboCop::Formatter::TodoFormatter.with_base_directory(rubocop_todo_dir) do
+ with_inflections do
+ example.run
+ end
end
end
end
before do
- allow(RuboCop::TodoDir).to receive(:new).and_return(todo_dir)
-
# This Ruby file will trigger the following 3 offenses.
File.write('a.rb', <<~RUBY)
a+b
RUBY
- # Mimic GitLab's .rubocop_todo.yml avoids relying on RuboCop's
+ # Mimicking GitLab's .rubocop_todo.yml avoids relying on RuboCop's
# default.yml configuration.
File.write('.rubocop.yml', <<~YAML)
<% unless ENV['REVEAL_RUBOCOP_TODO'] == '1' %>