Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec/lib
diff options
context:
space:
mode:
Diffstat (limited to 'spec/lib')
-rw-r--r--spec/lib/api/entities/bulk_imports/entity_failure_spec.rb14
-rw-r--r--spec/lib/api/entities/diff_spec.rb3
-rw-r--r--spec/lib/api/entities/group_spec.rb24
-rw-r--r--spec/lib/api/entities/merge_request_basic_spec.rb2
-rw-r--r--spec/lib/api/helpers_spec.rb90
-rw-r--r--spec/lib/api/ml/mlflow/api_helpers_spec.rb52
-rw-r--r--spec/lib/atlassian/jira_connect/client_spec.rb6
-rw-r--r--spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb6
-rw-r--r--spec/lib/backup/database_model_spec.rb185
-rw-r--r--spec/lib/backup/repositories_spec.rb8
-rw-r--r--spec/lib/banzai/filter/custom_emoji_filter_spec.rb4
-rw-r--r--spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb16
-rw-r--r--spec/lib/banzai/filter/markdown_engines/base_spec.rb6
-rw-r--r--spec/lib/banzai/filter/markdown_engines/glfm_markdown_spec.rb17
-rw-r--r--spec/lib/banzai/filter/markdown_filter_spec.rb24
-rw-r--r--spec/lib/banzai/filter/references/alert_reference_filter_spec.rb10
-rw-r--r--spec/lib/banzai/filter/references/commit_reference_filter_spec.rb10
-rw-r--r--spec/lib/banzai/filter/references/design_reference_filter_spec.rb4
-rw-r--r--spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb4
-rw-r--r--spec/lib/banzai/filter/references/issue_reference_filter_spec.rb4
-rw-r--r--spec/lib/banzai/filter/references/label_reference_filter_spec.rb4
-rw-r--r--spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb4
-rw-r--r--spec/lib/banzai/filter/references/project_reference_filter_spec.rb8
-rw-r--r--spec/lib/banzai/filter/references/reference_cache_spec.rb10
-rw-r--r--spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb10
-rw-r--r--spec/lib/banzai/filter/references/work_item_reference_filter_spec.rb4
-rw-r--r--spec/lib/banzai/issuable_extractor_spec.rb4
-rw-r--r--spec/lib/banzai/pipeline/gfm_pipeline_spec.rb15
-rw-r--r--spec/lib/banzai/reference_parser/snippet_parser_spec.rb4
-rw-r--r--spec/lib/click_house/iterator_spec.rb10
-rw-r--r--spec/lib/container_registry/gitlab_api_client_spec.rb20
-rw-r--r--spec/lib/container_registry/tag_spec.rb25
-rw-r--r--spec/lib/feature/definition_spec.rb6
-rw-r--r--spec/lib/gitlab/application_setting_fetcher_spec.rb224
-rw-r--r--spec/lib/gitlab/auth/two_factor_auth_verifier_spec.rb29
-rw-r--r--spec/lib/gitlab/auth_spec.rb10
-rw-r--r--spec/lib/gitlab/background_migration/backfill_issue_search_data_namespace_id_spec.rb78
-rw-r--r--spec/lib/gitlab/background_migration/backfill_owasp_top_ten_of_vulnerability_reads_spec.rb225
-rw-r--r--spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_artifact_spec.rb94
-rw-r--r--spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_chat_data_spec.rb67
-rw-r--r--spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_config_spec.rb73
-rw-r--r--spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_metadata_spec.rb73
-rw-r--r--spec/lib/gitlab/background_migration/backfill_vs_code_settings_version_spec.rb84
-rw-r--r--spec/lib/gitlab/background_migration/drop_vulnerabilities_without_finding_id_spec.rb124
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/importers/pull_request_importer_spec.rb14
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/importers/pull_request_notes_importer_spec.rb81
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/importers/users_importer_spec.rb7
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/mentions_converter_spec.rb118
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/user_from_mention_spec.rb67
-rw-r--r--spec/lib/gitlab/cache/import/caching_spec.rb52
-rw-r--r--spec/lib/gitlab/checks/changes_access_spec.rb6
-rw-r--r--spec/lib/gitlab/checks/diff_check_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/lfs_check_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/lfs_integrity_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/build/image_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/build/policy/changes_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/build/rules_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/config/entry/auto_cancel_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/entry/bridge_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/config/entry/image_spec.rb64
-rw-r--r--spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/entry/needs_spec.rb21
-rw-r--r--spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb57
-rw-r--r--spec/lib/gitlab/ci/config/entry/rules_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/config/entry/service_spec.rb47
-rw-r--r--spec/lib/gitlab/ci/config/entry/workflow_spec.rb67
-rw-r--r--spec/lib/gitlab/ci/config/external/context_spec.rb85
-rw-r--r--spec/lib/gitlab/ci/config/external/file/component_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/config/external/file/remote_spec.rb34
-rw-r--r--spec/lib/gitlab/ci/parsers/sbom/cyclonedx_properties_spec.rb225
-rw-r--r--spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb50
-rw-r--r--spec/lib/gitlab/ci/parsers/sbom/source/trivy_spec.rb25
-rw-r--r--spec/lib/gitlab/ci/parsers/security/common_spec.rb28
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/command_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb49
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb72
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb15
-rw-r--r--spec/lib/gitlab/ci/reports/security/report_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb45
-rw-r--r--spec/lib/gitlab/ci/yaml_processor/test_cases/include_spec.rb80
-rw-r--r--spec/lib/gitlab/ci/yaml_processor/test_cases/interruptible_spec.rb5
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb32
-rw-r--r--spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects/job_artifact_object_spec.rb90
-rw-r--r--spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects_cleaner_spec.rb263
-rw-r--r--spec/lib/gitlab/current_settings_spec.rb209
-rw-r--r--spec/lib/gitlab/data_builder/pipeline_spec.rb8
-rw-r--r--spec/lib/gitlab/data_builder/push_spec.rb4
-rw-r--r--spec/lib/gitlab/database/click_house_client_spec.rb8
-rw-r--r--spec/lib/gitlab/database/dictionary_spec.rb52
-rw-r--r--spec/lib/gitlab/database/gitlab_schema_spec.rb56
-rw-r--r--spec/lib/gitlab/database/migration_helpers/v2_spec.rb83
-rw-r--r--spec/lib/gitlab/database/namespace_each_batch_spec.rb174
-rw-r--r--spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb3
-rw-r--r--spec/lib/gitlab/database/partitioning/int_range_partition_spec.rb173
-rw-r--r--spec/lib/gitlab/database/partitioning/int_range_strategy_spec.rb317
-rw-r--r--spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb468
-rw-r--r--spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb4
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/uniqueness_helpers_spec.rb71
-rw-r--r--spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb2
-rw-r--r--spec/lib/gitlab/database/sharding_key_spec.rb43
-rw-r--r--spec/lib/gitlab/dependency_linker/package_json_linker_spec.rb16
-rw-r--r--spec/lib/gitlab/dependency_linker_spec.rb13
-rw-r--r--spec/lib/gitlab/diff/file_collection/paginated_merge_request_diff_spec.rb23
-rw-r--r--spec/lib/gitlab/diff/file_spec.rb2
-rw-r--r--spec/lib/gitlab/diff/highlight_spec.rb162
-rw-r--r--spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb2
-rw-r--r--spec/lib/gitlab/error_tracking_spec.rb32
-rw-r--r--spec/lib/gitlab/event_store/event_spec.rb7
-rw-r--r--spec/lib/gitlab/git/changed_path_spec.rb31
-rw-r--r--spec/lib/gitlab/git/commit_spec.rb2
-rw-r--r--spec/lib/gitlab/git/compare_spec.rb47
-rw-r--r--spec/lib/gitlab/git/push_spec.rb14
-rw-r--r--spec/lib/gitlab/git/repository_cleaner_spec.rb4
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb156
-rw-r--r--spec/lib/gitlab/git/tag_spec.rb2
-rw-r--r--spec/lib/gitlab/git/tree_spec.rb12
-rw-r--r--spec/lib/gitlab/git_access_project_spec.rb4
-rw-r--r--spec/lib/gitlab/git_access_spec.rb12
-rw-r--r--spec/lib/gitlab/git_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/analysis_service_spec.rb137
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb30
-rw-r--r--spec/lib/gitlab/github_gists_import/importer/gists_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/attachments_downloader_spec.rb46
-rw-r--r--spec/lib/gitlab/github_import/events_cache_spec.rb79
-rw-r--r--spec/lib/gitlab/github_import/importer/attachments/base_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/attachments/issues_importer_spec.rb67
-rw-r--r--spec/lib/gitlab/github_import/importer/attachments/merge_requests_importer_spec.rb68
-rw-r--r--spec/lib/gitlab/github_import/importer/attachments/notes_importer_spec.rb79
-rw-r--r--spec/lib/gitlab/github_import/importer/attachments/releases_importer_spec.rb67
-rw-r--r--spec/lib/gitlab/github_import/importer/collaborators_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/events/commented_spec.rb69
-rw-r--r--spec/lib/gitlab/github_import/importer/events/merged_spec.rb27
-rw-r--r--spec/lib/gitlab/github_import/importer/events/reviewed_spec.rb85
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb12
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/issues_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/notes_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests/review_importer_spec.rb7
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb6
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests/reviews_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/replay_events_importer_spec.rb139
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb211
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/markdown_text_spec.rb6
-rw-r--r--spec/lib/gitlab/github_import/parallel_scheduling_spec.rb85
-rw-r--r--spec/lib/gitlab/github_import/representation/issue_event_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/representation/note_text_spec.rb34
-rw-r--r--spec/lib/gitlab/github_import/representation/replay_event_spec.rb24
-rw-r--r--spec/lib/gitlab/github_import/settings_spec.rb52
-rw-r--r--spec/lib/gitlab/github_import/user_finder_spec.rb42
-rw-r--r--spec/lib/gitlab/gon_helper_spec.rb1
-rw-r--r--spec/lib/gitlab/highlight_spec.rb29
-rw-r--r--spec/lib/gitlab/http_spec.rb51
-rw-r--r--spec/lib/gitlab/import/page_counter_spec.rb (renamed from spec/lib/gitlab/github_import/page_counter_spec.rb)4
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml9
-rw-r--r--spec/lib/gitlab/import_export/group/relation_tree_restorer_spec.rb37
-rw-r--r--spec/lib/gitlab/import_export/import_failure_service_spec.rb3
-rw-r--r--spec/lib/gitlab/import_export/project/tree_restorer_spec.rb18
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml7
-rw-r--r--spec/lib/gitlab/instrumentation/redis_client_middleware_spec.rb224
-rw-r--r--spec/lib/gitlab/legacy_github_import/comment_formatter_spec.rb10
-rw-r--r--spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb16
-rw-r--r--spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb16
-rw-r--r--spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb8
-rw-r--r--spec/lib/gitlab/legacy_http_spec.rb448
-rw-r--r--spec/lib/gitlab/memory/watchdog/handlers/sidekiq_handler_spec.rb6
-rw-r--r--spec/lib/gitlab/namespaced_session_store_spec.rb25
-rw-r--r--spec/lib/gitlab/pagination/keyset/iterator_spec.rb6
-rw-r--r--spec/lib/gitlab/pagination/keyset/paginator_spec.rb25
-rw-r--r--spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb2
-rw-r--r--spec/lib/gitlab/quick_actions/extractor_spec.rb8
-rw-r--r--spec/lib/gitlab/redis/cluster_util_spec.rb13
-rw-r--r--spec/lib/gitlab/redis/cross_slot_spec.rb12
-rw-r--r--spec/lib/gitlab/redis/multi_store_spec.rb210
-rw-r--r--spec/lib/gitlab/redis/sessions_spec.rb21
-rw-r--r--spec/lib/gitlab/redis/shared_state_spec.rb1
-rw-r--r--spec/lib/gitlab/runtime_spec.rb4
-rw-r--r--spec/lib/gitlab/security/scan_configuration_spec.rb101
-rw-r--r--spec/lib/gitlab/sidekiq_config_spec.rb3
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb4
-rw-r--r--spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb7
-rw-r--r--spec/lib/gitlab/sidekiq_status_spec.rb2
-rw-r--r--spec/lib/gitlab/ssh/commit_spec.rb2
-rw-r--r--spec/lib/gitlab/ssh/signature_spec.rb33
-rw-r--r--spec/lib/gitlab/themes_spec.rb14
-rw-r--r--spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb140
-rw-r--r--spec/lib/gitlab/tracking/event_definition_spec.rb10
-rw-r--r--spec/lib/gitlab/tracking/event_definition_validate_all_spec.rb12
-rw-r--r--spec/lib/gitlab/tracking_spec.rb32
-rw-r--r--spec/lib/gitlab/usage/metric_definition_spec.rb56
-rw-r--r--spec/lib/gitlab/usage/metric_definition_validate_all_spec.rb14
-rw-r--r--spec/lib/gitlab/usage/service_ping_report_spec.rb5
-rw-r--r--spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb12
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb58
-rw-r--r--spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb8
-rw-r--r--spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb8
-rw-r--r--spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb8
-rw-r--r--spec/lib/integrations/google_cloud_platform/artifact_registry/client_spec.rb11
-rw-r--r--spec/lib/sidebars/organizations/menus/scope_menu_spec.rb5
-rw-r--r--spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb29
-rw-r--r--spec/lib/sidebars/projects/super_sidebar_menus/deploy_menu_spec.rb3
209 files changed, 6657 insertions, 2227 deletions
diff --git a/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb b/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb
index 217e6c11630..20563bfb685 100644
--- a/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb
+++ b/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb
@@ -19,10 +19,10 @@ RSpec.describe API::Entities::BulkImports::EntityFailure, feature_category: :imp
end
describe 'exception message' do
- it 'truncates exception message to 72 characters' do
- failure.update!(exception_message: 'a' * 100)
+ it 'truncates exception message to 255 characters' do
+ failure.update!(exception_message: 'a' * 500)
- expect(subject[:exception_message].length).to eq(72)
+ expect(subject[:exception_message].length).to eq(255)
end
it 'removes paths from the message' do
@@ -30,5 +30,13 @@ RSpec.describe API::Entities::BulkImports::EntityFailure, feature_category: :imp
expect(subject[:exception_message]).to eq('Test [FILTERED]')
end
+
+ it 'removes long paths without clipping the message' do
+ exception_message = "Test #{'/abc' * 300} #{'a' * 500}"
+ failure.update!(exception_message: exception_message)
+ filtered_message = "Test [FILTERED] #{'a' * 500}"
+
+ expect(subject[:exception_message]).to eq(filtered_message.truncate(255))
+ end
end
end
diff --git a/spec/lib/api/entities/diff_spec.rb b/spec/lib/api/entities/diff_spec.rb
index 27d9ed44c98..535567d4d8d 100644
--- a/spec/lib/api/entities/diff_spec.rb
+++ b/spec/lib/api/entities/diff_spec.rb
@@ -23,7 +23,8 @@ RSpec.describe ::API::Entities::Diff, feature_category: :source_code_management
b_mode: diff.b_mode,
new_file: diff.new_file?,
renamed_file: diff.renamed_file?,
- deleted_file: diff.deleted_file?
+ deleted_file: diff.deleted_file?,
+ generated_file: diff.generated?
}
)
end
diff --git a/spec/lib/api/entities/group_spec.rb b/spec/lib/api/entities/group_spec.rb
new file mode 100644
index 00000000000..270ac323c7d
--- /dev/null
+++ b/spec/lib/api/entities/group_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::Group, feature_category: :groups_and_projects do
+ let_it_be(:group) do
+ base_group = create(:group) { |g| create(:project_statistics, namespace_id: g.id) }
+ Group.with_statistics.find(base_group.id)
+ end
+
+ subject(:json) { described_class.new(group, { with_custom_attributes: true, statistics: true }).as_json }
+
+ it 'returns expected data' do
+ expect(json.keys).to(
+ include(
+ :organization_id, :path, :description, :visibility, :share_with_group_lock, :require_two_factor_authentication,
+ :two_factor_grace_period, :project_creation_level, :auto_devops_enabled,
+ :subgroup_creation_level, :emails_disabled, :emails_enabled, :lfs_enabled, :default_branch_protection,
+ :default_branch_protection_defaults, :avatar_url, :request_access_enabled, :full_name, :full_path, :created_at,
+ :parent_id, :organization_id, :shared_runners_setting, :custom_attributes, :statistics
+ )
+ )
+ end
+end
diff --git a/spec/lib/api/entities/merge_request_basic_spec.rb b/spec/lib/api/entities/merge_request_basic_spec.rb
index 0cf0a57fa87..621d57b49cd 100644
--- a/spec/lib/api/entities/merge_request_basic_spec.rb
+++ b/spec/lib/api/entities/merge_request_basic_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe ::API::Entities::MergeRequestBasic, feature_category: :code_revie
# stub the `head_commit_sha` as it will trigger a
# backward compatibility query that is out-of-scope
# for this test whenever it is `nil`
- allow_any_instance_of(MergeRequestDiff).to receive(:head_commit_sha).and_return(Gitlab::Git::BLANK_SHA)
+ allow_any_instance_of(MergeRequestDiff).to receive(:head_commit_sha).and_return(Gitlab::Git::SHA1_BLANK_SHA)
query = scope.all
batch = ActiveRecord::QueryRecorder.new do
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
index 21b3b8e6927..d1dee70e34d 100644
--- a/spec/lib/api/helpers_spec.rb
+++ b/spec/lib/api/helpers_spec.rb
@@ -406,6 +406,37 @@ RSpec.describe API::Helpers, feature_category: :shared do
end
end
+ describe '#find_organization!' do
+ let_it_be(:organization) { create(:organization) }
+ let_it_be(:user) { create(:user) }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:initial_current_user).and_return(user)
+ end
+
+ context 'when user is authenticated' do
+ it 'returns requested organization' do
+ expect(helper.find_organization!(organization.id)).to eq(organization)
+ end
+ end
+
+ context 'when user is not authenticated' do
+ let(:user) { nil }
+
+ it 'returns requested organization' do
+ expect(helper.find_organization!(organization.id)).to eq(organization)
+ end
+ end
+
+ context 'when organization does not exist' do
+ it 'returns nil' do
+ expect(helper).to receive(:render_api_error!).with('404 Organization Not Found', 404)
+ expect(helper.find_organization!(non_existing_record_id)).to be_nil
+ end
+ end
+ end
+
describe '#find_group!' do
let_it_be(:group) { create(:group, :public) }
let_it_be(:user) { create(:user) }
@@ -457,7 +488,7 @@ RSpec.describe API::Helpers, feature_category: :shared do
end
end
- context 'support for IDs and paths as arguments' do
+ context 'with support for IDs and paths as arguments' do
let_it_be(:group) { create(:group) }
let(:user) { group.first_owner }
@@ -505,6 +536,34 @@ RSpec.describe API::Helpers, feature_category: :shared do
end
end
+ context 'with support for organization as an argument' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:organization) { create(:organization) }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(group.first_owner)
+ allow(helper).to receive(:job_token_authentication?).and_return(false)
+ allow(helper).to receive(:authenticate_non_public?).and_return(false)
+ end
+
+ subject { helper.find_group!(group.id, organization: organization) }
+
+ context 'when group exists in the organization' do
+ before do
+ group.update!(organization: organization)
+ end
+
+ it { is_expected.to eq(group) }
+ end
+
+ context 'when group does not exist in the organization' do
+ it 'returns nil' do
+ expect(helper).to receive(:render_api_error!).with('404 Group Not Found', 404)
+ is_expected.to be_nil
+ end
+ end
+ end
+
describe '#find_group_by_full_path!' do
let_it_be(:group) { create(:group, :public) }
let_it_be(:user) { create(:user) }
@@ -674,23 +733,15 @@ RSpec.describe API::Helpers, feature_category: :shared do
let(:send_authorized_project_scope) { helper.authorized_project_scope?(project) }
- where(:job_token_authentication, :route_setting, :feature_flag, :same_job_project, :expected_result) do
- false | false | false | false | true
- false | false | false | true | true
- false | false | true | false | true
- false | false | true | true | true
- false | true | false | false | true
- false | true | false | true | true
- false | true | true | false | true
- false | true | true | true | true
- true | false | false | false | true
- true | false | false | true | true
- true | false | true | false | true
- true | false | true | true | true
- true | true | false | false | false
- true | true | false | true | false
- true | true | true | false | false
- true | true | true | true | true
+ where(:job_token_authentication, :route_setting, :same_job_project, :expected_result) do
+ false | false | false | true
+ false | false | true | true
+ false | true | false | true
+ false | true | true | true
+ true | false | false | true
+ true | false | true | true
+ true | true | false | false
+ true | true | true | true
end
with_them do
@@ -699,9 +750,6 @@ RSpec.describe API::Helpers, feature_category: :shared do
allow(helper).to receive(:route_authentication_setting).and_return(job_token_scope: route_setting ? :project : nil)
allow(helper).to receive(:current_authenticated_job).and_return(job)
allow(job).to receive(:project).and_return(same_job_project ? project : other_project)
-
- stub_feature_flags(ci_job_token_scope: false)
- stub_feature_flags(ci_job_token_scope: project) if feature_flag
end
it 'returns the expected result' do
diff --git a/spec/lib/api/ml/mlflow/api_helpers_spec.rb b/spec/lib/api/ml/mlflow/api_helpers_spec.rb
index 3e7a0187d86..f45fccfba4c 100644
--- a/spec/lib/api/ml/mlflow/api_helpers_spec.rb
+++ b/spec/lib/api/ml/mlflow/api_helpers_spec.rb
@@ -71,4 +71,56 @@ RSpec.describe API::Ml::Mlflow::ApiHelpers, feature_category: :mlops do
end
end
end
+
+ describe '#gitlab_tags' do
+ describe 'when tags param is not supplied' do
+ let(:params) { {} }
+
+ it 'returns nil' do
+ expect(gitlab_tags).to be nil
+ end
+ end
+
+ describe 'when tags param is supplied' do
+ let(:params) { { tags: input } }
+
+ using RSpec::Parameterized::TableSyntax
+
+ subject { gitlab_tags }
+
+ where(:input, :output) do
+ [] | nil
+ [{}] | {}
+ [{ key: 'foo', value: 'bar' }] | {}
+ [{ key: "gitlab.version", value: "1.2.3" }] | { "version" => "1.2.3" }
+ [{ key: "foo", value: "bar" }, { key: "gitlab.foo", value: "baz" }] | { "foo" => "baz" }
+ end
+ with_them do
+ it 'is correct' do
+ is_expected.to eq(output)
+ end
+ end
+ end
+ end
+
+ describe '#custom_version' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { custom_version }
+
+ where(:input, :output) do
+ [] | nil
+ [{}] | nil
+ [{ key: 'foo', value: 'bar' }] | nil
+ [{ key: "gitlab.version", value: "1.2.3" }] | "1.2.3"
+ [{ key: "foo", value: "bar" }, { key: "gitlab.foo", value: "baz" }] | nil
+ end
+ with_them do
+ let(:params) { { tags: input } }
+
+ it 'is correct' do
+ is_expected.to eq(output)
+ end
+ end
+ end
end
diff --git a/spec/lib/atlassian/jira_connect/client_spec.rb b/spec/lib/atlassian/jira_connect/client_spec.rb
index a692d76da77..e1159b9fab2 100644
--- a/spec/lib/atlassian/jira_connect/client_spec.rb
+++ b/spec/lib/atlassian/jira_connect/client_spec.rb
@@ -433,16 +433,16 @@ RSpec.describe Atlassian::JiraConnect::Client, feature_category: :integrations d
end
it 'avoids N+1 database queries' do
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
subject.send(:store_dev_info, project: project, merge_requests: merge_requests)
- end.count
+ end
merge_requests << create(:merge_request, :unique_branches, source_project: project)
expect do
subject.send(:store_dev_info, project: project,
merge_requests: merge_requests)
- end.not_to exceed_query_limit(control_count)
+ end.not_to exceed_query_limit(control)
end
end
diff --git a/spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb b/spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb
index 0ed320e863c..15cb4994d46 100644
--- a/spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb
+++ b/spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb
@@ -24,9 +24,9 @@ RSpec.describe Atlassian::JiraConnect::Serializers::PullRequestEntity, feature_c
subject { described_class.represent(merge_requests, user_notes_count: user_notes_count).as_json }
it 'avoids N+1 database queries' do
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
described_class.represent(merge_requests, user_notes_count: user_notes_count)
- end.count
+ end
merge_requests << create(:merge_request, :unique_branches)
@@ -35,7 +35,7 @@ RSpec.describe Atlassian::JiraConnect::Serializers::PullRequestEntity, feature_c
records: merge_requests, associations: { merge_request_reviewers: :reviewer }
).call
- expect { subject }.not_to exceed_query_limit(control_count)
+ expect { subject }.not_to exceed_query_limit(control)
end
it 'uses counts from user_notes_count' do
diff --git a/spec/lib/backup/database_model_spec.rb b/spec/lib/backup/database_model_spec.rb
deleted file mode 100644
index 9fab5cbc1c0..00000000000
--- a/spec/lib/backup/database_model_spec.rb
+++ /dev/null
@@ -1,185 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature_category: :backup_restore do
- using RSpec::Parameterized::TableSyntax
-
- let(:gitlab_database_name) { 'main' }
-
- describe '#connection' do
- subject(:connection) { described_class.new(gitlab_database_name).connection }
-
- it 'an instance of a ActiveRecord::Base.connection' do
- connection.is_a? ActiveRecord::Base.connection.class # rubocop:disable Database/MultipleDatabases -- We actually need an ActiveRecord::Base here
- end
- end
-
- describe '#config' do
- let(:application_config) do
- {
- adapter: 'postgresql',
- host: 'some_host',
- port: '5432'
- }
- end
-
- subject(:config) { described_class.new(gitlab_database_name).config }
-
- before do
- allow(
- Gitlab::Database.database_base_models_with_gitlab_shared[gitlab_database_name].connection_db_config
- ).to receive(:configuration_hash).and_return(application_config)
- end
-
- shared_examples 'no configuration is overridden' do
- it 'ActiveRecord backup configuration is expected to equal application configuration' do
- expect(config[:activerecord]).to eq(application_config)
- end
-
- it 'PostgreSQL ENV is expected to equal application configuration' do
- expect(config[:pg_env]).to eq(
- {
- 'PGHOST' => application_config[:host],
- 'PGPORT' => application_config[:port]
- }
- )
- end
- end
-
- shared_examples 'environment variables override application configuration' do
- let(:active_record_key) { described_class::SUPPORTED_OVERRIDES.invert[pg_env] }
-
- it 'ActiveRecord backup configuration overrides application configuration' do
- expect(config[:activerecord]).to eq(application_config.merge(active_record_key => overridden_value))
- end
-
- it 'PostgreSQL ENV overrides application configuration' do
- expect(config[:pg_env]).to include({ pg_env => overridden_value })
- end
- end
-
- context 'when no GITLAB_BACKUP_PG* variables are set' do
- it_behaves_like 'no configuration is overridden'
- end
-
- context 'when generic database configuration is overridden' do
- where(:env_variable, :overridden_value) do
- 'GITLAB_BACKUP_PGHOST' | 'test.invalid.'
- 'GITLAB_BACKUP_PGUSER' | 'some_user'
- 'GITLAB_BACKUP_PGPORT' | '1543'
- 'GITLAB_BACKUP_PGPASSWORD' | 'secret'
- 'GITLAB_BACKUP_PGSSLMODE' | 'allow'
- 'GITLAB_BACKUP_PGSSLKEY' | 'some_key'
- 'GITLAB_BACKUP_PGSSLCERT' | '/path/to/cert'
- 'GITLAB_BACKUP_PGSSLROOTCERT' | '/path/to/root/cert'
- 'GITLAB_BACKUP_PGSSLCRL' | '/path/to/crl'
- 'GITLAB_BACKUP_PGSSLCOMPRESSION' | '1'
- 'GITLAB_OVERRIDE_PGHOST' | 'test.invalid.'
- 'GITLAB_OVERRIDE_PGUSER' | 'some_user'
- 'GITLAB_OVERRIDE_PGPORT' | '1543'
- 'GITLAB_OVERRIDE_PGPASSWORD' | 'secret'
- 'GITLAB_OVERRIDE_PGSSLMODE' | 'allow'
- 'GITLAB_OVERRIDE_PGSSLKEY' | 'some_key'
- 'GITLAB_OVERRIDE_PGSSLCERT' | '/path/to/cert'
- 'GITLAB_OVERRIDE_PGSSLROOTCERT' | '/path/to/root/cert'
- 'GITLAB_OVERRIDE_PGSSLCRL' | '/path/to/crl'
- 'GITLAB_OVERRIDE_PGSSLCOMPRESSION' | '1'
- end
-
- with_them do
- let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_(\w+)/, 2] }
-
- before do
- stub_env(env_variable, overridden_value)
- end
-
- it_behaves_like 'environment variables override application configuration'
- end
- end
-
- context 'when specific database configuration is overridden' do
- context 'and environment variables are for the current database name' do
- where(:env_variable, :overridden_value) do
- 'GITLAB_BACKUP_MAIN_PGHOST' | 'test.invalid.'
- 'GITLAB_BACKUP_MAIN_PGUSER' | 'some_user'
- 'GITLAB_BACKUP_MAIN_PGPORT' | '1543'
- 'GITLAB_BACKUP_MAIN_PGPASSWORD' | 'secret'
- 'GITLAB_BACKUP_MAIN_PGSSLMODE' | 'allow'
- 'GITLAB_BACKUP_MAIN_PGSSLKEY' | 'some_key'
- 'GITLAB_BACKUP_MAIN_PGSSLCERT' | '/path/to/cert'
- 'GITLAB_BACKUP_MAIN_PGSSLROOTCERT' | '/path/to/root/cert'
- 'GITLAB_BACKUP_MAIN_PGSSLCRL' | '/path/to/crl'
- 'GITLAB_BACKUP_MAIN_PGSSLCOMPRESSION' | '1'
- 'GITLAB_OVERRIDE_MAIN_PGHOST' | 'test.invalid.'
- 'GITLAB_OVERRIDE_MAIN_PGUSER' | 'some_user'
- 'GITLAB_OVERRIDE_MAIN_PGPORT' | '1543'
- 'GITLAB_OVERRIDE_MAIN_PGPASSWORD' | 'secret'
- 'GITLAB_OVERRIDE_MAIN_PGSSLMODE' | 'allow'
- 'GITLAB_OVERRIDE_MAIN_PGSSLKEY' | 'some_key'
- 'GITLAB_OVERRIDE_MAIN_PGSSLCERT' | '/path/to/cert'
- 'GITLAB_OVERRIDE_MAIN_PGSSLROOTCERT' | '/path/to/root/cert'
- 'GITLAB_OVERRIDE_MAIN_PGSSLCRL' | '/path/to/crl'
- 'GITLAB_OVERRIDE_MAIN_PGSSLCOMPRESSION' | '1'
- end
-
- with_them do
- let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_MAIN_(\w+)/, 2] }
-
- before do
- stub_env(env_variable, overridden_value)
- end
-
- it_behaves_like 'environment variables override application configuration'
- end
- end
-
- context 'and environment variables are for another database' do
- where(:env_variable, :overridden_value) do
- 'GITLAB_BACKUP_CI_PGHOST' | 'test.invalid.'
- 'GITLAB_BACKUP_CI_PGUSER' | 'some_user'
- 'GITLAB_BACKUP_CI_PGPORT' | '1543'
- 'GITLAB_BACKUP_CI_PGPASSWORD' | 'secret'
- 'GITLAB_BACKUP_CI_PGSSLMODE' | 'allow'
- 'GITLAB_BACKUP_CI_PGSSLKEY' | 'some_key'
- 'GITLAB_BACKUP_CI_PGSSLCERT' | '/path/to/cert'
- 'GITLAB_BACKUP_CI_PGSSLROOTCERT' | '/path/to/root/cert'
- 'GITLAB_BACKUP_CI_PGSSLCRL' | '/path/to/crl'
- 'GITLAB_BACKUP_CI_PGSSLCOMPRESSION' | '1'
- 'GITLAB_OVERRIDE_CI_PGHOST' | 'test.invalid.'
- 'GITLAB_OVERRIDE_CI_PGUSER' | 'some_user'
- 'GITLAB_OVERRIDE_CI_PGPORT' | '1543'
- 'GITLAB_OVERRIDE_CI_PGPASSWORD' | 'secret'
- 'GITLAB_OVERRIDE_CI_PGSSLMODE' | 'allow'
- 'GITLAB_OVERRIDE_CI_PGSSLKEY' | 'some_key'
- 'GITLAB_OVERRIDE_CI_PGSSLCERT' | '/path/to/cert'
- 'GITLAB_OVERRIDE_CI_PGSSLROOTCERT' | '/path/to/root/cert'
- 'GITLAB_OVERRIDE_CI_PGSSLCRL' | '/path/to/crl'
- 'GITLAB_OVERRIDE_CI_PGSSLCOMPRESSION' | '1'
- end
-
- with_them do
- let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_CI_(\w+)/, 1] }
-
- before do
- stub_env(env_variable, overridden_value)
- end
-
- it_behaves_like 'no configuration is overridden'
- end
- end
-
- context 'when both GITLAB_BACKUP_PGUSER and GITLAB_BACKUP_MAIN_PGUSER variable are present' do
- before do
- stub_env('GITLAB_BACKUP_PGUSER', 'generic_user')
- stub_env('GITLAB_BACKUP_MAIN_PGUSER', 'specfic_user')
- end
-
- it 'prefers more specific GITLAB_BACKUP_MAIN_PGUSER' do
- expect(config.dig(:activerecord, :username)).to eq('specfic_user')
- expect(config.dig(:pg_env, 'PGUSER')).to eq('specfic_user')
- end
- end
- end
- end
-end
diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/repositories_spec.rb
index 024f6c5db96..679be62393e 100644
--- a/spec/lib/backup/repositories_spec.rb
+++ b/spec/lib/backup/repositories_spec.rb
@@ -68,20 +68,20 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
end
it 'avoids N+1 database queries' do
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
subject.dump(destination, backup_id)
- end.count
+ end
create_list(:project, 2, :repository)
create_list(:snippet, 2, :repository)
- # Number of expected queries are 2 more than control_count
+ # Number of expected queries are 2 more than control.count
# to account for the queries for project.design_management_repository
# for each project.
# We are using 2 projects here.
expect do
subject.dump(destination, backup_id)
- end.not_to exceed_query_limit(control_count + 2)
+ end.not_to exceed_query_limit(control).with_threshold(2)
end
describe 'storages' do
diff --git a/spec/lib/banzai/filter/custom_emoji_filter_spec.rb b/spec/lib/banzai/filter/custom_emoji_filter_spec.rb
index 4fc9d9dd4f6..701a45aa54d 100644
--- a/spec/lib/banzai/filter/custom_emoji_filter_spec.rb
+++ b/spec/lib/banzai/filter/custom_emoji_filter_spec.rb
@@ -47,13 +47,13 @@ RSpec.describe Banzai::Filter::CustomEmojiFilter, feature_category: :team_planni
it 'does not do N+1 query' do
create(:custom_emoji, name: 'party-parrot', group: group)
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
filter('<p>:tanuki:</p>')
end
expect do
filter('<p>:tanuki:</p> <p>:party-parrot:</p>')
- end.not_to exceed_all_query_limit(control_count.count)
+ end.not_to exceed_all_query_limit(control)
end
it 'uses custom emoji from ancestor group' do
diff --git a/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb b/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb
index 06bb0edc92c..d14f218763f 100644
--- a/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb
+++ b/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb
@@ -259,15 +259,15 @@ RSpec.describe Banzai::Filter::IssuableReferenceExpansionFilter, feature_categor
# warm up
filter(link, context)
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
filter(link, context)
- end.count
+ end
- expect(control_count).to eq 12
+ expect(control.count).to eq 12
expect do
filter("#{link} #{link2}", context)
- end.not_to exceed_all_query_limit(control_count)
+ end.not_to exceed_all_query_limit(control)
end
end
end
@@ -419,15 +419,15 @@ RSpec.describe Banzai::Filter::IssuableReferenceExpansionFilter, feature_categor
# warm up
filter(link, context)
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
filter(link, context)
- end.count
+ end
- expect(control_count).to eq 10
+ expect(control.count).to eq 10
expect do
filter("#{link} #{link2}", context)
- end.not_to exceed_all_query_limit(control_count)
+ end.not_to exceed_all_query_limit(control)
end
end
end
diff --git a/spec/lib/banzai/filter/markdown_engines/base_spec.rb b/spec/lib/banzai/filter/markdown_engines/base_spec.rb
index e7b32876610..3114f367fac 100644
--- a/spec/lib/banzai/filter/markdown_engines/base_spec.rb
+++ b/spec/lib/banzai/filter/markdown_engines/base_spec.rb
@@ -14,4 +14,10 @@ RSpec.describe Banzai::Filter::MarkdownEngines::Base, feature_category: :team_pl
expect(engine.send(:sourcepos_disabled?)).to be_truthy
end
+
+ it 'accepts a nil context' do
+ engine = described_class.new(nil)
+
+ expect(engine.context).to eq({})
+ end
end
diff --git a/spec/lib/banzai/filter/markdown_engines/glfm_markdown_spec.rb b/spec/lib/banzai/filter/markdown_engines/glfm_markdown_spec.rb
new file mode 100644
index 00000000000..da58b824a06
--- /dev/null
+++ b/spec/lib/banzai/filter/markdown_engines/glfm_markdown_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Banzai::Filter::MarkdownEngines::GlfmMarkdown, feature_category: :team_planning do
+ it 'defaults to generating sourcepos' do
+ engine = described_class.new({})
+
+ expect(engine.render('# hi')).to eq %(<h1 data-sourcepos="1:1-1:4">hi</h1>\n)
+ end
+
+ it 'turns off sourcepos' do
+ engine = described_class.new({ no_sourcepos: true })
+
+ expect(engine.render('# hi')).to eq %(<h1>hi</h1>\n)
+ end
+end
diff --git a/spec/lib/banzai/filter/markdown_filter_spec.rb b/spec/lib/banzai/filter/markdown_filter_spec.rb
index b4fb715b8f0..30869d67661 100644
--- a/spec/lib/banzai/filter/markdown_filter_spec.rb
+++ b/spec/lib/banzai/filter/markdown_filter_spec.rb
@@ -8,18 +8,30 @@ RSpec.describe Banzai::Filter::MarkdownFilter, feature_category: :team_planning
describe 'markdown engine from context' do
it 'finds the correct engine' do
- expect(described_class.render_engine(:common_mark)).to eq Banzai::Filter::MarkdownEngines::CommonMark
+ expect(described_class.new('foo', { markdown_engine: :common_mark }).render_engine)
+ .to eq Banzai::Filter::MarkdownEngines::CommonMark
end
- it 'defaults to the DEFAULT_ENGINE' do
- default_engine = Banzai::Filter::MarkdownFilter::DEFAULT_ENGINE.to_s.classify
- default = "Banzai::Filter::MarkdownEngines::#{default_engine}".constantize
+ it 'defaults to the RUST_ENGINE' do
+ default_engine = Banzai::Filter::MarkdownFilter::RUST_ENGINE.to_s.classify
+ engine = "Banzai::Filter::MarkdownEngines::#{default_engine}".constantize
- expect(described_class.render_engine(nil)).to eq default
+ expect(described_class.new('foo', {}).render_engine).to eq engine
+ end
+
+ context 'when :markdown_rust feature flag is turned off' do
+ it 'defaults to the RUBY_ENGINE' do
+ stub_feature_flags(markdown_rust: false)
+
+ ruby_engine = Banzai::Filter::MarkdownFilter::RUBY_ENGINE.to_s.classify
+ engine = "Banzai::Filter::MarkdownEngines::#{ruby_engine}".constantize
+
+ expect(described_class.new('foo', {}).render_engine).to eq engine
+ end
end
it 'raise error for unrecognized engines' do
- expect { described_class.render_engine(:foo_bar) }.to raise_error(NameError)
+ expect { described_class.new('foo', { markdown_engine: :foo_bar }).render_engine }.to raise_error(NameError)
end
end
diff --git a/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb b/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb
index 9a2e68aaae0..0bdd64c360d 100644
--- a/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb
@@ -230,11 +230,11 @@ RSpec.describe Banzai::Filter::References::AlertReferenceFilter, feature_categor
it 'does not have N+1 per multiple references per project', :use_sql_query_cache do
markdown = alert_reference.to_s
- max_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
reference_filter(markdown)
- end.count
+ end
- expect(max_count).to eq 1
+ expect(control.count).to eq 1
markdown = "#{alert_reference} ^alert#2 ^alert#3 ^alert#4 #{alert2_reference}"
@@ -248,11 +248,9 @@ RSpec.describe Banzai::Filter::References::AlertReferenceFilter, feature_categor
# 1x2 for alerts in each project
# Total == 7
# TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359
- max_count += 6
-
expect do
reference_filter(markdown)
- end.not_to exceed_all_query_limit(max_count)
+ end.not_to exceed_all_query_limit(control).with_threshold(6)
end
end
end
diff --git a/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb b/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb
index 35a3f20f7b7..730554857df 100644
--- a/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb
@@ -283,11 +283,11 @@ RSpec.describe Banzai::Filter::References::CommitReferenceFilter, feature_catego
it 'does not have N+1 per multiple references per project', :use_sql_query_cache do
markdown = commit_reference.to_s
- max_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
reference_filter(markdown)
- end.count
+ end
- expect(max_count).to eq 0
+ expect(control.count).to eq 0
markdown = "#{commit_reference} 8b95f2f1 8b95f2f2 8b95f2f3 #{commit2_reference} #{commit3_reference}"
@@ -298,11 +298,9 @@ RSpec.describe Banzai::Filter::References::CommitReferenceFilter, feature_catego
# 1 for loading the namespaces associated to the project
# 1 for loading the routes associated with the namespace
# Total = 5
- max_count += 5
-
expect do
reference_filter(markdown)
- end.not_to exceed_all_query_limit(max_count)
+ end.not_to exceed_all_query_limit(control).with_threshold(5)
end
end
end
diff --git a/spec/lib/banzai/filter/references/design_reference_filter_spec.rb b/spec/lib/banzai/filter/references/design_reference_filter_spec.rb
index fd03d7c0d27..678d6619101 100644
--- a/spec/lib/banzai/filter/references/design_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/design_reference_filter_spec.rb
@@ -240,7 +240,7 @@ RSpec.describe Banzai::Filter::References::DesignReferenceFilter, feature_catego
* #1[not a valid reference.gif]
MD
- baseline = ActiveRecord::QueryRecorder.new { process(one_ref_per_project) }
+ control = ActiveRecord::QueryRecorder.new { process(one_ref_per_project) }
# each project mentioned requires 2 queries:
#
@@ -253,7 +253,7 @@ RSpec.describe Banzai::Filter::References::DesignReferenceFilter, feature_catego
# In addition there is a 1 query overhead for all the projects at the
# start. Currently, the baseline for 2 projects is `2 * 2 + 1 = 5` queries
#
- expect { process(multiple_references) }.not_to exceed_query_limit(baseline.count)
+ expect { process(multiple_references) }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
index 823f006c98a..acc59c85cbf 100644
--- a/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
@@ -338,9 +338,9 @@ RSpec.describe Banzai::Filter::References::ExternalIssueReferenceFilter, feature
single_reference = "External Issue #{issue1.to_reference}"
multiple_references = "External Issues #{issue1.to_reference} and #{issue2.to_reference}"
- control_count = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }.count
+ control = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }
- expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control_count)
+ expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control)
end
end
end
diff --git a/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
index d16188e99a3..fd947e3e9cb 100644
--- a/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
@@ -41,9 +41,9 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter, feature_categor
single_reference = "Issue #{issue.to_reference}"
multiple_references = "Issues #{issue.to_reference} and #{another_issue.to_reference}"
- control_count = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }.count
+ control = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }
- expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control_count)
+ expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/lib/banzai/filter/references/label_reference_filter_spec.rb b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
index 81b08a4c516..bcc256813c9 100644
--- a/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
@@ -35,13 +35,13 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor
# Run this once to establish a baseline
reference_filter("Label #{reference}")
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
reference_filter("Label #{reference}")
end
labels_markdown = Array.new(10, "Label #{reference}").join('\n')
- expect { reference_filter(labels_markdown) }.not_to exceed_all_query_limit(control_count.count)
+ expect { reference_filter(labels_markdown) }.not_to exceed_all_query_limit(control)
end
it 'includes a data-project attribute' do
diff --git a/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
index ccc8478c7d8..e3036993f7b 100644
--- a/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
@@ -26,9 +26,9 @@ RSpec.describe Banzai::Filter::References::MergeRequestReferenceFilter, feature_
single_reference = "Merge request #{merge.to_reference}"
multiple_references = "Merge requests #{merge.to_reference} and #{another_merge.to_reference}"
- control_count = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }.count
+ control = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }
- expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control_count)
+ expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/lib/banzai/filter/references/project_reference_filter_spec.rb b/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
index c55fff78756..12af94507b6 100644
--- a/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
@@ -115,17 +115,17 @@ RSpec.describe Banzai::Filter::References::ProjectReferenceFilter, feature_categ
# warm up first
reference_filter(markdown)
- max_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
reference_filter(markdown)
- end.count
+ end
- expect(max_count).to eq 2
+ expect(control.count).to eq 2
markdown = "#{normal_project_reference} #{invalidate_reference(normal_project_reference)} #{group_project_reference} #{nested_project_reference}"
expect do
reference_filter(markdown)
- end.not_to exceed_all_query_limit(max_count)
+ end.not_to exceed_all_query_limit(control)
end
end
end
diff --git a/spec/lib/banzai/filter/references/reference_cache_spec.rb b/spec/lib/banzai/filter/references/reference_cache_spec.rb
index 04877931610..b4d9a08e4c6 100644
--- a/spec/lib/banzai/filter/references/reference_cache_spec.rb
+++ b/spec/lib/banzai/filter/references/reference_cache_spec.rb
@@ -70,13 +70,13 @@ RSpec.describe Banzai::Filter::References::ReferenceCache, feature_category: :te
filter_single = filter_class.new(doc_single, project: project)
cache_single = described_class.new(filter_single, { project: project }, {})
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
cache_single.load_references_per_parent(filter_single.nodes)
cache_single.load_parent_per_reference
cache_single.load_records_per_parent
- end.count
+ end
- expect(control_count).to eq 3
+ expect(control.count).to eq 3
# Since this is an issue filter that is not batching issue queries
# across projects, we have to account for that.
# 1 for for routes to find routes.source_id of projects matching paths
@@ -88,13 +88,11 @@ RSpec.describe Banzai::Filter::References::ReferenceCache, feature_category: :te
# 1x2 for groups
# 1x2 for work_item_types
# Total = 11
- max_count = control_count + 8
-
expect do
cache.load_references_per_parent(filter.nodes)
cache.load_parent_per_reference
cache.load_records_per_parent
- end.not_to exceed_query_limit(max_count)
+ end.not_to exceed_query_limit(control).with_threshold(8)
end
end
diff --git a/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb b/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb
index 00eac7262f4..51c5551dda8 100644
--- a/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb
@@ -229,11 +229,11 @@ RSpec.describe Banzai::Filter::References::SnippetReferenceFilter, feature_categ
it 'does not have N+1 per multiple references per project', :use_sql_query_cache do
markdown = "#{reference} $9999990"
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
reference_filter(markdown)
- end.count
+ end
- expect(control_count).to eq 1
+ expect(control.count).to eq 1
markdown = "#{reference} $9999990 $9999991 $9999992 $9999993 #{reference2} something/cool$12"
@@ -247,11 +247,9 @@ RSpec.describe Banzai::Filter::References::SnippetReferenceFilter, feature_categ
# 1x2 for snippets in each project == 2
# Total = 7
# TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359
- max_count = control_count + 6
-
expect do
reference_filter(markdown)
- end.not_to exceed_all_query_limit(max_count)
+ end.not_to exceed_all_query_limit(control).with_threshold(6)
end
end
end
diff --git a/spec/lib/banzai/filter/references/work_item_reference_filter_spec.rb b/spec/lib/banzai/filter/references/work_item_reference_filter_spec.rb
index e59e53891bf..cf245ccc72a 100644
--- a/spec/lib/banzai/filter/references/work_item_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/work_item_reference_filter_spec.rb
@@ -306,9 +306,9 @@ RSpec.describe Banzai::Filter::References::WorkItemReferenceFilter, feature_cate
single_reference = "Work item #{work_item.to_reference}"
multiple_references = "Work items #{work_item.to_reference} and #{another_work_item.to_reference}"
- control_count = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }.count
+ control = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }
- expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control_count)
+ expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control)
end
end
end
diff --git a/spec/lib/banzai/issuable_extractor_spec.rb b/spec/lib/banzai/issuable_extractor_spec.rb
index 5bbd98592e7..fe1a2bd9a2e 100644
--- a/spec/lib/banzai/issuable_extractor_spec.rb
+++ b/spec/lib/banzai/issuable_extractor_spec.rb
@@ -45,9 +45,9 @@ RSpec.describe Banzai::IssuableExtractor, feature_category: :team_planning do
second_call_queries = ActiveRecord::QueryRecorder.new do
extractor.extract([issue_link, work_item_link, merge_request_link])
- end.count
+ end
- expect(second_call_queries).to eq 0
+ expect(second_call_queries.count).to eq 0
end
end
end
diff --git a/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb b/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
index a845e4fa7f4..bb6d4eeefbc 100644
--- a/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
@@ -167,6 +167,21 @@ RSpec.describe Banzai::Pipeline::GfmPipeline, feature_category: :team_planning d
end
end
+ context 'when label reference is similar to a commit SHA' do
+ let(:numeric_commit_sha) { '8634272' }
+ let(:project) { create(:project, :repository) }
+ let(:label) { create(:label, project: project, id: numeric_commit_sha) }
+
+ it 'renders a label reference' do
+ expect(project.commit_by(oid: numeric_commit_sha)).to be_present
+
+ output = described_class.to_html(label.to_reference(format: :id), project: project)
+
+ expect(output).to include(label.name)
+ expect(output).to include(Gitlab::Routing.url_helpers.project_issues_path(project, label_name: label.name))
+ end
+ end
+
describe 'asset proxy' do
let(:project) { create(:project, :public) }
let(:image) { '![proxy](http://example.com/test.png)' }
diff --git a/spec/lib/banzai/reference_parser/snippet_parser_spec.rb b/spec/lib/banzai/reference_parser/snippet_parser_spec.rb
index 8f4148be2dc..0f3834c2dc8 100644
--- a/spec/lib/banzai/reference_parser/snippet_parser_spec.rb
+++ b/spec/lib/banzai/reference_parser/snippet_parser_spec.rb
@@ -37,11 +37,11 @@ RSpec.describe Banzai::ReferenceParser::SnippetParser, feature_category: :team_p
# Run this once to establish a baseline
visible_references(:public)
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
subject.nodes_visible_to_user(user, [link])
end
- expect { subject.nodes_visible_to_user(user, Array.new(10, link)) }.not_to exceed_all_query_limit(control_count.count)
+ expect { subject.nodes_visible_to_user(user, Array.new(10, link)) }.not_to exceed_all_query_limit(control)
end
it 'creates a reference for guest for a public snippet' do
diff --git a/spec/lib/click_house/iterator_spec.rb b/spec/lib/click_house/iterator_spec.rb
index fd054c0afe5..962ccc6d884 100644
--- a/spec/lib/click_house/iterator_spec.rb
+++ b/spec/lib/click_house/iterator_spec.rb
@@ -29,6 +29,16 @@ RSpec.describe ClickHouse::Iterator, :click_house, feature_category: :database d
expect(collect_ids_with_batch_size(15)).to match_array(expected_values)
end
+ context 'when min value is given' do
+ let(:iterator) { described_class.new(query_builder: query_builder, connection: connection, min_value: 5) }
+
+ it 'iterates from the given min value' do
+ expected_values = (5..10).to_a
+
+ expect(collect_ids_with_batch_size(5)).to match_array(expected_values)
+ end
+ end
+
context 'when there are no records for the given query' do
let(:query_builder) do
ClickHouse::QueryBuilder
diff --git a/spec/lib/container_registry/gitlab_api_client_spec.rb b/spec/lib/container_registry/gitlab_api_client_spec.rb
index 3c87af3a1c8..e13f639f048 100644
--- a/spec/lib/container_registry/gitlab_api_client_spec.rb
+++ b/spec/lib/container_registry/gitlab_api_client_spec.rb
@@ -256,6 +256,23 @@ RSpec.describe ContainerRegistry::GitlabApiClient, feature_category: :container_
it { is_expected.to eq(expected) }
end
+ context 'with referrers included' do
+ subject { client.tags(path, page_size: page_size, referrers: true) }
+
+ let(:expected) do
+ {
+ pagination: {},
+ response_body: ::Gitlab::Json.parse(response.to_json)
+ }
+ end
+
+ before do
+ stub_tags(path, page_size: page_size, input: { referrers: 'true' }, respond_with: response)
+ end
+
+ it { is_expected.to eq(expected) }
+ end
+
context 'with a response with a link header containing next page' do
let(:expected) do
{
@@ -961,7 +978,8 @@ RSpec.describe ContainerRegistry::GitlabApiClient, feature_category: :container_
last: input[:last],
name: input[:name],
sort: input[:sort],
- before: input[:before]
+ before: input[:before],
+ referrers: input[:referrers]
}.compact
url = "#{registry_api_url}/gitlab/v1/repositories/#{path}/tags/list/"
diff --git a/spec/lib/container_registry/tag_spec.rb b/spec/lib/container_registry/tag_spec.rb
index 8f9308f2127..42191cb121c 100644
--- a/spec/lib/container_registry/tag_spec.rb
+++ b/spec/lib/container_registry/tag_spec.rb
@@ -336,6 +336,31 @@ RSpec.describe ContainerRegistry::Tag, feature_category: :container_registry do
it { is_expected.to eq(nil) }
end
end
+
+ describe 'published_at=' do
+ subject do
+ tag.published_at = input
+ tag.published_at
+ end
+
+ context 'with a valid input' do
+ let(:input) { 2.days.ago.iso8601 }
+
+ it { is_expected.to eq(DateTime.iso8601(input)) }
+ end
+
+ context 'with a nil input' do
+ let(:input) { nil }
+
+ it { is_expected.to eq(nil) }
+ end
+
+ context 'with an invalid input' do
+ let(:input) { 'not a timestamp' }
+
+ it { is_expected.to eq(nil) }
+ end
+ end
end
end
end
diff --git a/spec/lib/feature/definition_spec.rb b/spec/lib/feature/definition_spec.rb
index 595725d357c..b75c780a33e 100644
--- a/spec/lib/feature/definition_spec.rb
+++ b/spec/lib/feature/definition_spec.rb
@@ -30,11 +30,11 @@ RSpec.describe Feature::Definition do
:name | 'ALL_CAPS' | /Feature flag 'ALL_CAPS' is invalid/
:name | nil | /Feature flag is missing name/
:path | nil | /Feature flag 'feature_flag' is missing path/
- :type | nil | /Feature flag 'feature_flag' is missing type/
+ :type | nil | /Feature flag 'feature_flag' is missing `type`/
:type | 'invalid' | /Feature flag 'feature_flag' type 'invalid' is invalid/
:path | 'development/invalid.yml' | /Feature flag 'feature_flag' has an invalid path/
- :path | 'invalid/feature_flag.yml' | /Feature flag 'feature_flag' has an invalid type/
- :default_enabled | nil | /Feature flag 'feature_flag' is missing default_enabled/
+ :path | 'invalid/feature_flag.yml' | /Feature flag 'feature_flag' has an invalid path/
+ :default_enabled | nil | /Feature flag 'feature_flag' is missing `default_enabled`/
end
with_them do
diff --git a/spec/lib/gitlab/application_setting_fetcher_spec.rb b/spec/lib/gitlab/application_setting_fetcher_spec.rb
new file mode 100644
index 00000000000..0225a7608cb
--- /dev/null
+++ b/spec/lib/gitlab/application_setting_fetcher_spec.rb
@@ -0,0 +1,224 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ApplicationSettingFetcher, feature_category: :cell do
+ before do
+ stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
+
+ described_class.clear_in_memory_application_settings!
+ end
+
+ describe '.clear_in_memory_application_settings!' do
+ subject(:clear_in_memory_application_settings!) { described_class.clear_in_memory_application_settings! }
+
+ before do
+ stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'true')
+
+ described_class.current_application_settings
+ end
+
+ it 'will re-initialize settings' do
+ expect(ApplicationSetting).to receive(:build_from_defaults).and_call_original
+
+ clear_in_memory_application_settings!
+ described_class.current_application_settings
+ end
+ end
+
+ describe '.current_application_settings' do
+ subject(:current_application_settings) { described_class.current_application_settings }
+
+ context 'when ENV["IN_MEMORY_APPLICATION_SETTINGS"] is true' do
+ before do
+ stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'true')
+ end
+
+ it 'returns an in-memory ApplicationSetting object' do
+ expect(ApplicationSetting).not_to receive(:current)
+ expect(ApplicationSetting).to receive(:build_from_defaults).and_call_original
+
+ expect(current_application_settings).to be_a(ApplicationSetting)
+ expect(current_application_settings).not_to be_persisted
+ end
+ end
+
+ context 'when ENV["IN_MEMORY_APPLICATION_SETTINGS"] is false' do
+ let_it_be(:settings) { create(:application_setting) }
+
+ context 'and an error is raised' do
+ before do
+ # The cached method is called twice:
+ # - ApplicationSettingFetcher
+ # - ApplicationSetting (CachedAttribute module)
+ # For this test, the first needs to raise an exception
+ # The second is swallowed on production so that should not raise an exception
+ # So we only let the first call raise an exception
+ # Alternatively, we could mock Rails.env.production? but I prefer not to
+ raise_exception = true
+ allow(ApplicationSetting).to receive(:cached).twice do
+ if raise_exception
+ raise_exception = false
+ raise(StandardError)
+ else
+ ApplicationSetting.last
+ end
+ end
+ end
+
+ it 'will retrieve uncached ApplicationSetting' do
+ expect(ApplicationSetting).to receive(:current).and_call_original
+
+ expect(current_application_settings).to eq(settings)
+ end
+ end
+
+ context 'and settings in cache' do
+ before do
+ # Warm the cache
+ ApplicationSetting.current
+ end
+
+ it 'fetches the settings from cache' do
+ expect(::ApplicationSetting).to receive(:cached).and_call_original
+
+ expect(ActiveRecord::QueryRecorder.new { current_application_settings }.count).to eq(0)
+ end
+ end
+
+ context 'and settings are not in cache' do
+ before do
+ allow(ApplicationSetting).to receive(:cached).and_return(nil)
+ end
+
+ context 'and we are running a Rake task' do
+ before do
+ allow(Gitlab::Runtime).to receive(:rake?).and_return(true)
+ end
+
+ context 'and database does not exist' do
+ before do
+ allow(::ApplicationSetting.database)
+ .to receive(:cached_table_exists?).and_raise(ActiveRecord::NoDatabaseError)
+ end
+
+ it 'uses Gitlab::FakeApplicationSettings' do
+ expect(current_application_settings).to be_a(Gitlab::FakeApplicationSettings)
+ end
+ end
+
+ context 'and database connection is not active' do
+ before do
+ allow(::ApplicationSetting.connection).to receive(:active?).and_return(false)
+ end
+
+ it 'uses Gitlab::FakeApplicationSettings' do
+ expect(current_application_settings).to be_a(Gitlab::FakeApplicationSettings)
+ end
+ end
+
+ context 'and table does not exist' do
+ before do
+ allow(::ApplicationSetting.database).to receive(:cached_table_exists?).and_return(false)
+ end
+
+ it 'uses Gitlab::FakeApplicationSettings' do
+ expect(current_application_settings).to be_a(Gitlab::FakeApplicationSettings)
+ end
+ end
+
+ context 'and database connection raises some error' do
+ before do
+ allow(::ApplicationSetting.connection).to receive(:active?).and_raise(StandardError)
+ end
+
+ it 'uses Gitlab::FakeApplicationSettings' do
+ expect(current_application_settings).to be_a(Gitlab::FakeApplicationSettings)
+ end
+ end
+
+ context 'and there are pending database migrations' do
+ before do
+ allow_next_instance_of(ActiveRecord::MigrationContext) do |migration_context|
+ allow(migration_context).to receive(:needs_migration?).and_return(true)
+ end
+ end
+
+ it 'uses Gitlab::FakeApplicationSettings' do
+ expect(current_application_settings).to be_a(Gitlab::FakeApplicationSettings)
+ end
+
+ context 'when a new setting is used but the migration did not run yet' do
+ let(:default_attributes) { { new_column: 'some_value' } }
+
+ before do
+ allow(ApplicationSetting).to receive(:defaults).and_return(default_attributes)
+ end
+
+ it 'uses the default value if present' do
+ expect(current_application_settings.new_column).to eq(
+ default_attributes[:new_column]
+ )
+ end
+ end
+ end
+ end
+
+ context 'and settings are in database' do
+ it 'returns settings from database' do
+ expect(current_application_settings).to eq(settings)
+ end
+ end
+
+ context 'and settings are not in the database' do
+ before do
+ allow(ApplicationSetting).to receive(:current).and_return(nil)
+ end
+
+ it 'returns default settings' do
+ expect(ApplicationSetting).to receive(:create_from_defaults).and_call_original
+
+ expect(current_application_settings).to eq(settings)
+ end
+ end
+
+ context 'when we hit a recursive loop' do
+ before do
+ allow(ApplicationSetting).to receive(:current).and_raise(ApplicationSetting::Recursion)
+ end
+
+ it 'recovers and returns in-memory settings' do
+ settings = described_class.current_application_settings
+
+ expect(settings).to be_a(ApplicationSetting)
+ expect(settings).not_to be_persisted
+ end
+ end
+ end
+ end
+ end
+
+ describe '.expire_current_application_settings' do
+ subject(:expire) { described_class.expire_current_application_settings }
+
+ it 'expires ApplicationSetting' do
+ expect(ApplicationSetting).to receive(:expire)
+
+ expire
+ end
+ end
+
+ describe '.current_application_settings?' do
+ subject(:settings?) { described_class.current_application_settings? }
+
+ context 'when settings exist' do
+ let_it_be(:settings) { create(:application_setting) }
+
+ it { is_expected.to be(true) }
+ end
+
+ context 'when settings do not exist' do
+ it { is_expected.to be(false) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/auth/two_factor_auth_verifier_spec.rb b/spec/lib/gitlab/auth/two_factor_auth_verifier_spec.rb
index e0ef45d5621..bccddaa50a0 100644
--- a/spec/lib/gitlab/auth/two_factor_auth_verifier_spec.rb
+++ b/spec/lib/gitlab/auth/two_factor_auth_verifier_spec.rb
@@ -169,4 +169,33 @@ RSpec.describe Gitlab::Auth::TwoFactorAuthVerifier do
end
end
end
+
+ describe '#two_factor_authentication_reason?' do
+ it 'returns false if two factor authentication is not required' do
+ allow(user).to receive(:require_two_factor_authentication?).and_return(false)
+
+ expect(subject.two_factor_authentication_reason).to be_falsey
+ end
+
+ it 'returns :global if two factor authentication is enabled globally' do
+ stub_application_setting require_two_factor_authentication: true
+
+ expect(subject.two_factor_authentication_reason).to eq(:global)
+ end
+
+ it 'returns :admin_2fa if the current user is an admin and two factor is enabled' do
+ stub_application_setting require_admin_two_factor_authentication: true
+
+ allow(user).to receive(:admin?).and_return(true)
+
+ expect(subject.two_factor_authentication_reason).to eq(:admin_2fa)
+ end
+
+ it 'returns :group if two factor authentication is enforced through a group setting' do
+ stub_application_setting require_two_factor_authentication: false
+ allow(user).to receive(:require_two_factor_authentication_from_group?).and_return(true)
+
+ expect(subject.two_factor_authentication_reason).to eq(:group)
+ end
+ end
end
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index 9974e24ad50..fd51ebbc8fa 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
end
it 'ADMIN_SCOPES contains all scopes for ADMIN access' do
- expect(subject::ADMIN_SCOPES).to match_array %i[sudo admin_mode]
+ expect(subject::ADMIN_SCOPES).to match_array %i[sudo admin_mode read_service_ping]
end
it 'REPOSITORY_SCOPES contains all scopes for REPOSITORY access' do
@@ -42,7 +42,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
end
it 'contains all non-default scopes' do
- expect(subject.all_available_scopes).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode read_observability write_observability create_runner k8s_proxy ai_features]
+ expect(subject.all_available_scopes).to match_array %i[api read_user read_api read_repository read_service_ping write_repository read_registry write_registry sudo admin_mode read_observability write_observability create_runner k8s_proxy ai_features]
end
it 'contains for non-admin user all non-default scopes without ADMIN access and without observability scopes' do
@@ -54,7 +54,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
it 'contains for admin user all non-default scopes with ADMIN access and without observability scopes' do
user = build_stubbed(:user, admin: true)
- expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner k8s_proxy ai_features]
+ expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository read_service_ping write_repository read_registry write_registry sudo admin_mode create_runner k8s_proxy ai_features]
end
it 'contains for project all resource bot scopes' do
@@ -72,7 +72,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
end
it 'optional_scopes contains all non-default scopes' do
- expect(subject.optional_scopes).to match_array %i[read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode openid profile email read_observability write_observability create_runner k8s_proxy ai_features]
+ expect(subject.optional_scopes).to match_array %i[read_user read_api read_repository write_repository read_registry read_service_ping write_registry sudo admin_mode openid profile email read_observability write_observability create_runner k8s_proxy ai_features]
end
context 'with observability_tracing feature flag' do
@@ -118,7 +118,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
it 'contains for admin user all non-default scopes with ADMIN access and without observability scopes' do
user = build_stubbed(:user, admin: true)
- expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner k8s_proxy ai_features]
+ expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry read_service_ping sudo admin_mode create_runner k8s_proxy ai_features]
end
it 'contains for project all resource bot scopes including observability scopes' do
diff --git a/spec/lib/gitlab/background_migration/backfill_issue_search_data_namespace_id_spec.rb b/spec/lib/gitlab/background_migration/backfill_issue_search_data_namespace_id_spec.rb
new file mode 100644
index 00000000000..ea5c7086ac2
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_issue_search_data_namespace_id_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillIssueSearchDataNamespaceId,
+ schema: 20240105144908, feature_category: :team_planning do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:issues) { table(:issues) }
+ let(:issue_search_data) { table(:issue_search_data) }
+ let(:issue_type) { table(:work_item_types).find_by!(namespace_id: nil, base_type: 0) }
+
+ let(:namespace_1) { namespaces.create!(name: 'namespace1', type: 'Group', path: 'namespace1') }
+ let(:namespace_2) { namespaces.create!(name: 'namespace2', type: 'Group', path: 'namespace2') }
+
+ let(:proj_ns_1) { namespaces.create!(name: 'pn1', path: 'pn1', type: 'Project', parent_id: namespace_1.id) }
+ let(:proj_ns_2) { namespaces.create!(name: 'pn2', path: 'pn2', type: 'Project', parent_id: namespace_2.id) }
+
+ let(:proj_1) do
+ projects.create!(name: 'proj1', path: 'proj1', namespace_id: namespace_1.id, project_namespace_id: proj_ns_1.id)
+ end
+
+ let(:proj_2) do
+ projects.create!(name: 'proj2', path: 'proj2', namespace_id: namespace_2.id, project_namespace_id: proj_ns_2.id)
+ end
+
+ let(:proj_1_issue_1) do
+ issues.create!(title: 'issue1', project_id: proj_1.id, namespace_id: proj_ns_1.id, work_item_type_id: issue_type.id)
+ end
+
+ let(:proj_1_issue_2) do
+ issues.create!(title: 'issue2', project_id: proj_1.id, namespace_id: proj_ns_1.id, work_item_type_id: issue_type.id)
+ end
+
+ let(:proj_2_issue_1) do
+ issues.create!(title: 'issue1', project_id: proj_2.id, namespace_id: proj_ns_2.id, work_item_type_id: issue_type.id)
+ end
+
+ let(:proj_2_issue_2) do
+ issues.create!(title: 'issue2', project_id: proj_2.id, namespace_id: proj_ns_2.id, work_item_type_id: issue_type.id)
+ end
+
+ let!(:proj_1_issue_1_search_data) do
+ issue_search_data.create!(namespace_id: nil, project_id: proj_1.id, issue_id: proj_1_issue_1.id)
+ end
+
+ let!(:proj_1_issue_2_search_data) do
+ issue_search_data.create!(namespace_id: nil, project_id: proj_1.id, issue_id: proj_1_issue_2.id)
+ end
+
+ let!(:proj_2_issue_1_search_data) do
+ issue_search_data.create!(namespace_id: nil, project_id: proj_2.id, issue_id: proj_2_issue_1.id)
+ end
+
+ let!(:proj_2_issue_2_search_data) do
+ issue_search_data.create!(namespace_id: nil, project_id: proj_2.id, issue_id: proj_2_issue_2.id)
+ end
+
+ let(:migration) do
+ described_class.new(
+ start_id: proj_1_issue_1.id,
+ end_id: proj_2_issue_2.id,
+ batch_table: :issues,
+ batch_column: :id,
+ sub_batch_size: 2,
+ pause_ms: 2,
+ connection: ApplicationRecord.connection
+ )
+ end
+
+ it 'backfills namespace_id for the specified records' do
+ migration.perform
+
+ [proj_1_issue_1, proj_1_issue_2, proj_2_issue_1, proj_2_issue_2].each do |issue|
+ expect(issue_search_data.find_by_issue_id(issue.id).namespace_id).to eq(issue.namespace_id)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_owasp_top_ten_of_vulnerability_reads_spec.rb b/spec/lib/gitlab/background_migration/backfill_owasp_top_ten_of_vulnerability_reads_spec.rb
new file mode 100644
index 00000000000..1462848845e
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_owasp_top_ten_of_vulnerability_reads_spec.rb
@@ -0,0 +1,225 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillOwaspTopTenOfVulnerabilityReads,
+ feature_category: :vulnerability_management do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:users) { table(:users) }
+ let(:scanners) { table(:vulnerability_scanners) }
+ let(:vulnerabilities) { table(:vulnerabilities) }
+ let(:vulnerability_reads) { table(:vulnerability_reads) }
+ let(:vulnerability_findings) { table(:vulnerability_occurrences) }
+ let(:vulnerability_occurrence_identifiers) { table(:vulnerability_occurrence_identifiers) }
+ let(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
+
+ let(:namespace) { namespaces.create!(name: 'user', path: 'user') }
+ let(:project) { projects.create!(namespace_id: namespace.id, project_namespace_id: namespace.id) }
+ let(:user) { users.create!(username: 'john_doe', email: 'johndoe@gitlab.com', projects_limit: 10) }
+ let(:scanner) { scanners.create!(project_id: project.id, external_id: 'external_id', name: 'Test Scanner') }
+
+ shared_context 'with vulnerability data' do
+ let(:external_id) { '' }
+ let(:external_type) { '' }
+ let(:identifier_name) { '' }
+
+ let(:vulnerability_1) { create_vulnerability(title: 'vulnerability 1') }
+ let(:vulnerability_2) { create_vulnerability(title: 'vulnerability 2') }
+ let(:vulnerability_3) { create_vulnerability(title: 'vulnerability 3') }
+
+ let(:vuln_identifier) do
+ create_identifier(external_id: external_id, external_type: external_type, name: identifier_name)
+ end
+
+ let(:vuln_finding) do
+ create_finding(vulnerability_id: vulnerability_1.id, primary_identifier_id: vuln_identifier.id)
+ end
+
+ let!(:vulnerability_read_1) { create_vulnerability_read(vulnerability_id: vulnerability_1.id) }
+ let!(:vulnerability_read_2) { create_vulnerability_read(vulnerability_id: vulnerability_2.id) }
+ let!(:vulnerability_read_3) { create_vulnerability_read(vulnerability_id: vulnerability_3.id) }
+
+ before do
+ create_vulnerability_occurrence_identifier(occurrence_id: vuln_finding.id, identifier_id: vuln_identifier.id)
+ end
+ end
+
+ describe '#perform' do
+ subject(:perform_migration) do
+ described_class.new(
+ start_id: vulnerability_reads.first.vulnerability_id,
+ end_id: vulnerability_reads.last.vulnerability_id,
+ batch_table: :vulnerability_reads,
+ batch_column: :vulnerability_id,
+ sub_batch_size: vulnerability_reads.count,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection
+ ).perform
+ end
+
+ context 'with owasp top 10 data' do
+ include_context 'with vulnerability data' do
+ let(:external_id) { 'A1:2017-Injection' }
+ let(:external_type) { 'owasp' }
+ let(:identifier_name) { 'Injection' }
+ end
+
+ it 'updates vulnerability_reads' do
+ expect { perform_migration }.to change { vulnerability_read_1.reload.owasp_top_10 }
+ .from(nil).to(1)
+ .and not_change { vulnerability_read_2.reload.owasp_top_10 }.from(nil)
+ end
+
+ it 'updates vulnerability_reads with correct mapping' do
+ vuln_identifier_2 = create_identifier(external_id: 'A1:2021', external_type: 'owasp', name: 'A1 2021')
+ vuln_identifier_3 = create_identifier
+ vuln_finding_2 = create_finding(vulnerability_id: vulnerability_2.id,
+ primary_identifier_id: vuln_identifier_2.id)
+ vuln_finding_3 = create_finding(vulnerability_id: vulnerability_3.id,
+ primary_identifier_id: vuln_identifier_3.id)
+
+ create_vulnerability_occurrence_identifier(occurrence_id: vuln_finding_2.id,
+ identifier_id: vuln_identifier_2.id)
+ create_vulnerability_occurrence_identifier(occurrence_id: vuln_finding_3.id,
+ identifier_id: vuln_identifier_3.id)
+
+ perform_migration
+
+ expect(vulnerability_read_1.reload.owasp_top_10).to eq(1)
+ expect(vulnerability_read_2.reload.owasp_top_10).to eq(11)
+ expect(vulnerability_read_3.reload.owasp_top_10).to be_nil
+ end
+ end
+
+ context 'with incorrect owasp top 10 data' do
+ include_context 'with vulnerability data'
+
+ shared_examples 'does not update vulnerability_reads' do
+ it do
+ perform_migration
+
+ expect(vulnerability_read_1.reload.owasp_top_10).to be_nil
+ expect(vulnerability_read_2.reload.owasp_top_10).to be_nil
+ expect(vulnerability_read_3.reload.owasp_top_10).to be_nil
+ end
+ end
+
+ context 'with incorrect long format external_id' do
+ let(:external_id) { 'A1:2015-Injection' }
+ let(:external_type) { 'owasp' }
+ let(:identifier_name) { 'Injection' }
+
+ it_behaves_like 'does not update vulnerability_reads'
+ end
+
+ context 'with incorrect short format external_id' do
+ let(:external_id) { 'A1' }
+ let(:external_type) { 'owasp' }
+ let(:identifier_name) { 'Injection' }
+
+ it_behaves_like 'does not update vulnerability_reads'
+ end
+
+ context 'with incorrect external_type' do
+ let(:external_id) { 'A1:2017' }
+ let(:external_type) { 'owasp2017' }
+ let(:identifier_name) { 'Injection' }
+
+ it_behaves_like 'does not update vulnerability_reads'
+ end
+ end
+
+ context 'with no vulnerability identifiers match' do
+ include_context 'with vulnerability data' do
+ let(:external_id) { 'CVE-2018-1234' }
+ let(:external_type) { 'CVE' }
+ let(:identifier_name) { 'CVE-2018-1234' }
+ end
+
+ it 'does not update vulnerability_reads' do
+ perform_migration
+
+ expect(vulnerability_reads.where.not(owasp_top_10: nil).count).to eq(0)
+ end
+ end
+ end
+
+ private
+
+ def create_vulnerability(overrides = {})
+ attrs = {
+ project_id: project.id,
+ author_id: user.id,
+ title: 'test',
+ severity: 1,
+ confidence: 1,
+ report_type: 1
+ }.merge(overrides)
+
+ vulnerabilities.create!(attrs)
+ end
+
+ def create_vulnerability_read(overrides = {})
+ attrs = {
+ project_id: project.id,
+ vulnerability_id: 1,
+ scanner_id: scanner.id,
+ severity: 1,
+ report_type: 1,
+ state: 1,
+ uuid: SecureRandom.uuid
+ }.merge(overrides)
+
+ vulnerability_reads.create!(attrs)
+ end
+
+ def create_finding(overrides = {})
+ attrs = {
+ project_id: project.id,
+ scanner_id: scanner.id,
+ severity: 5, # medium
+ confidence: 2, # unknown,
+ report_type: 99, # generic
+ primary_identifier_id: create_identifier.id,
+ project_fingerprint: SecureRandom.hex(20),
+ location_fingerprint: SecureRandom.hex(20),
+ uuid: SecureRandom.uuid,
+ name: "CVE-2018-1234",
+ raw_metadata: "{}",
+ metadata_version: "test:1.0"
+ }.merge(overrides)
+
+ vulnerability_findings.create!(attrs)
+ end
+
+ def create_identifier(overrides = {})
+ attrs = {
+ project_id: project.id,
+ external_id: "CVE-2018-1234",
+ external_type: "CVE",
+ name: "CVE-2018-1234",
+ fingerprint: SecureRandom.hex(20)
+ }.merge(overrides)
+
+ vulnerability_identifiers.create!(attrs)
+ end
+
+ def create_vulnerability_occurrence_identifier(overrides = {})
+ time = Time.now.utc
+
+ attrs = {
+ created_at: time,
+ updated_at: time,
+ occurrence_id: nil,
+ identifier_id: nil
+ }.merge(overrides)
+
+ vulnerability_occurrence_identifiers.create!(attrs)
+ end
+
+ def checksum(value)
+ sha = Digest::SHA256.hexdigest(value)
+ Gitlab::Database::ShaAttribute.new.serialize(sha)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_artifact_spec.rb b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_artifact_spec.rb
new file mode 100644
index 00000000000..c466fdaa36a
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_artifact_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillPartitionIdCiPipelineArtifact,
+ feature_category: :continuous_integration do
+ let(:ci_pipelines_table) { table(:ci_pipelines, database: :ci) }
+ let(:ci_pipeline_artifacts_table) { table(:ci_pipeline_artifacts, database: :ci) }
+ let!(:pipeline_100) { ci_pipelines_table.create!(id: 1, partition_id: 100) }
+ let!(:pipeline_101) { ci_pipelines_table.create!(id: 2, partition_id: 101) }
+ let!(:pipeline_102) { ci_pipelines_table.create!(id: 3, partition_id: 101) }
+ let!(:ci_pipeline_artifact_100) do
+ ci_pipeline_artifacts_table.create!(
+ id: 1,
+ pipeline_id: pipeline_100.id,
+ project_id: 1,
+ size: 1.megabyte,
+ file_type: 1,
+ file_format: 1,
+ file: fixture_file_upload(
+ Rails.root.join('spec/fixtures/pipeline_artifacts/code_coverage.json'), 'application/json'
+ ),
+ partition_id: pipeline_100.partition_id
+ )
+ end
+
+ let!(:ci_pipeline_artifact_101) do
+ ci_pipeline_artifacts_table.create!(
+ id: 2,
+ pipeline_id: pipeline_101.id,
+ project_id: 1,
+ size: 1.megabyte,
+ file_type: 1,
+ file_format: 1,
+ file: fixture_file_upload(
+ Rails.root.join('spec/fixtures/pipeline_artifacts/code_coverage.json'), 'application/json'
+ ),
+ partition_id: pipeline_101.partition_id
+ )
+ end
+
+ let!(:invalid_ci_pipeline_artifact) do
+ ci_pipeline_artifacts_table.create!(
+ id: 3,
+ pipeline_id: pipeline_102.id,
+ project_id: 1,
+ size: 1.megabyte,
+ file_type: 1,
+ file_format: 1,
+ file: fixture_file_upload(
+ Rails.root.join('spec/fixtures/pipeline_artifacts/code_coverage.json'), 'application/json'
+ ),
+ partition_id: pipeline_100.partition_id
+ )
+ end
+
+ let(:migration_attrs) do
+ {
+ start_id: ci_pipeline_artifacts_table.minimum(:pipeline_id),
+ end_id: ci_pipeline_artifacts_table.maximum(:pipeline_id),
+ batch_table: :ci_pipeline_artifacts,
+ batch_column: :id,
+ sub_batch_size: 1,
+ pause_ms: 0,
+ connection: Ci::ApplicationRecord.connection
+ }
+ end
+
+ let!(:migration) { described_class.new(**migration_attrs) }
+
+ describe '#perform' do
+ context 'when second partition does not exist' do
+ it 'does not execute the migration' do
+ expect { migration.perform }
+ .not_to change { invalid_ci_pipeline_artifact.reload.partition_id }
+ end
+ end
+
+ context 'when second partition exists' do
+ before do
+ allow(migration).to receive(:uses_multiple_partitions?).and_return(true)
+ end
+
+ it 'fixes invalid records in the wrong the partition' do
+ expect { migration.perform }
+ .to not_change { ci_pipeline_artifact_100.reload.partition_id }
+ .and not_change { ci_pipeline_artifact_101.reload.partition_id }
+ .and change { invalid_ci_pipeline_artifact.reload.partition_id }
+ .from(100)
+ .to(101)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_chat_data_spec.rb b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_chat_data_spec.rb
new file mode 100644
index 00000000000..ad1900ab6a6
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_chat_data_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillPartitionIdCiPipelineChatData,
+ feature_category: :continuous_integration do
+ let(:ci_pipelines_table) { table(:ci_pipelines, database: :ci) }
+ let(:ci_pipeline_chat_data_table) { table(:ci_pipeline_chat_data, database: :ci) }
+ let!(:pipeline1) { ci_pipelines_table.create!(id: 1, partition_id: 100) }
+ let!(:pipeline2) { ci_pipelines_table.create!(id: 2, partition_id: 101) }
+ let!(:invalid_ci_pipeline_chat_data) do
+ ci_pipeline_chat_data_table.create!(
+ id: 1,
+ pipeline_id: pipeline1.id,
+ chat_name_id: 1,
+ response_url: '',
+ partition_id: pipeline1.partition_id
+ )
+ end
+
+ let!(:valid_ci_pipeline_chat_data) do
+ ci_pipeline_chat_data_table.create!(
+ id: 2,
+ pipeline_id: pipeline2.id,
+ chat_name_id: 2,
+ response_url: '',
+ partition_id: pipeline2.partition_id
+ )
+ end
+
+ let(:migration_attrs) do
+ {
+ start_id: ci_pipeline_chat_data_table.minimum(:id),
+ end_id: ci_pipeline_chat_data_table.maximum(:id),
+ batch_table: :ci_pipeline_chat_data,
+ batch_column: :id,
+ sub_batch_size: 1,
+ pause_ms: 0,
+ connection: Ci::ApplicationRecord.connection
+ }
+ end
+
+ let!(:migration) { described_class.new(**migration_attrs) }
+
+ describe '#perform' do
+ context 'when second partition does not exist' do
+ it 'does not execute the migration' do
+ expect { migration.perform }
+ .not_to change { invalid_ci_pipeline_chat_data.reload.partition_id }
+ end
+ end
+
+ context 'when second partition exists' do
+ before do
+ allow(migration).to receive(:uses_multiple_partitions?).and_return(true)
+ pipeline1.update!(partition_id: 101)
+ end
+
+ it 'fixes invalid records in the wrong the partition' do
+ expect { migration.perform }
+ .to change { invalid_ci_pipeline_chat_data.reload.partition_id }
+ .from(100)
+ .to(101)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_config_spec.rb b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_config_spec.rb
new file mode 100644
index 00000000000..fad3e277888
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_config_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillPartitionIdCiPipelineConfig,
+ feature_category: :continuous_integration do
+ let(:ci_pipelines_table) { table(:ci_pipelines, database: :ci) }
+ let(:ci_pipeline_config_table) { table(:ci_pipelines_config, database: :ci) }
+ let!(:pipeline_1) { ci_pipelines_table.create!(id: 1, partition_id: 100) }
+ let!(:pipeline_2) { ci_pipelines_table.create!(id: 2, partition_id: 101) }
+ let!(:pipeline_3) { ci_pipelines_table.create!(id: 3, partition_id: 101) }
+ let!(:ci_pipeline_config_100) do
+ ci_pipeline_config_table.create!(
+ pipeline_id: pipeline_1.id,
+ content: "content",
+ partition_id: pipeline_1.partition_id
+ )
+ end
+
+ let!(:ci_pipeline_config_101) do
+ ci_pipeline_config_table.create!(
+ pipeline_id: pipeline_2.id,
+ content: "content",
+ partition_id: pipeline_2.partition_id
+ )
+ end
+
+ let!(:invalid_ci_pipeline_config) do
+ ci_pipeline_config_table.create!(
+ pipeline_id: pipeline_3.id,
+ content: "content",
+ partition_id: pipeline_1.partition_id
+ )
+ end
+
+ let(:migration_attrs) do
+ {
+ start_id: ci_pipeline_config_table.minimum(:pipeline_id),
+ end_id: ci_pipeline_config_table.maximum(:pipeline_id),
+ batch_table: :ci_pipelines_config,
+ batch_column: :pipeline_id,
+ sub_batch_size: 1,
+ pause_ms: 0,
+ connection: Ci::ApplicationRecord.connection
+ }
+ end
+
+ let!(:migration) { described_class.new(**migration_attrs) }
+
+ describe '#perform' do
+ context 'when second partition does not exist' do
+ before do
+ pipeline_3.update!(partition_id: 100)
+ end
+
+ it 'does not execute the migration' do
+ expect { migration.perform }
+ .not_to change { invalid_ci_pipeline_config.reload.partition_id }
+ end
+ end
+
+ context 'when second partition exists' do
+ it 'fixes invalid records in the wrong the partition' do
+ expect { migration.perform }
+ .to not_change { ci_pipeline_config_100.reload.partition_id }
+ .and not_change { ci_pipeline_config_101.reload.partition_id }
+ .and change { invalid_ci_pipeline_config.reload.partition_id }
+ .from(100)
+ .to(101)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_metadata_spec.rb b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_metadata_spec.rb
new file mode 100644
index 00000000000..d09d5016dcc
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_metadata_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillPartitionIdCiPipelineMetadata,
+ feature_category: :continuous_integration do
+ let(:ci_pipelines_table) { table(:ci_pipelines, database: :ci) }
+ let(:ci_pipeline_metadata_table) { table(:ci_pipeline_metadata, database: :ci) }
+ let!(:pipeline_100) { ci_pipelines_table.create!(id: 1, partition_id: 100) }
+ let!(:pipeline_101) { ci_pipelines_table.create!(id: 2, partition_id: 101) }
+ let!(:pipeline_102) { ci_pipelines_table.create!(id: 3, partition_id: 101) }
+ let!(:ci_pipeline_metadata_100) do
+ ci_pipeline_metadata_table.create!(
+ pipeline_id: pipeline_100.id,
+ project_id: 1,
+ partition_id: pipeline_100.partition_id
+ )
+ end
+
+ let!(:ci_pipeline_metadata_101) do
+ ci_pipeline_metadata_table.create!(
+ pipeline_id: pipeline_101.id,
+ project_id: 1,
+ partition_id: pipeline_101.partition_id
+ )
+ end
+
+ let!(:invalid_ci_pipeline_metadata) do
+ ci_pipeline_metadata_table.create!(
+ pipeline_id: pipeline_102.id,
+ project_id: 1,
+ partition_id: pipeline_100.partition_id
+ )
+ end
+
+ let(:migration_attrs) do
+ {
+ start_id: ci_pipeline_metadata_table.minimum(:pipeline_id),
+ end_id: ci_pipeline_metadata_table.maximum(:pipeline_id),
+ batch_table: :ci_pipeline_metadata,
+ batch_column: :pipeline_id,
+ sub_batch_size: 1,
+ pause_ms: 0,
+ connection: Ci::ApplicationRecord.connection
+ }
+ end
+
+ let!(:migration) { described_class.new(**migration_attrs) }
+
+ describe '#perform' do
+ context 'when second partition does not exist' do
+ it 'does not execute the migration' do
+ expect { migration.perform }
+ .not_to change { invalid_ci_pipeline_metadata.reload.partition_id }
+ end
+ end
+
+ context 'when second partition exists' do
+ before do
+ allow(migration).to receive(:uses_multiple_partitions?).and_return(true)
+ end
+
+ it 'fixes invalid records in the wrong the partition' do
+ expect { migration.perform }
+ .to not_change { ci_pipeline_metadata_100.reload.partition_id }
+ .and not_change { ci_pipeline_metadata_101.reload.partition_id }
+ .and change { invalid_ci_pipeline_metadata.reload.partition_id }
+ .from(100)
+ .to(101)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_vs_code_settings_version_spec.rb b/spec/lib/gitlab/background_migration/backfill_vs_code_settings_version_spec.rb
new file mode 100644
index 00000000000..725cd7f4bca
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_vs_code_settings_version_spec.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillVsCodeSettingsVersion, schema: 20231212135235, feature_category: :web_ide do
+ let(:vs_code_settings) { table(:vs_code_settings) }
+
+ let(:users) { table(:users) }
+
+ let(:user) do
+ users.create!(
+ email: "test1@example.com",
+ username: "test1",
+ notification_email: "test@example.com",
+ name: "test",
+ state: "active",
+ projects_limit: 10)
+ end
+
+ let(:persistent_settings) { VsCode::Settings::SETTINGS_TYPES.filter { |type| type != 'machines' } }
+
+ subject(:migration) do
+ described_class.new(
+ start_id: vs_code_settings.first.id,
+ end_id: vs_code_settings.last.id,
+ batch_table: :vs_code_settings,
+ batch_column: :id,
+ sub_batch_size: 100,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection
+ )
+ end
+
+ describe "#perform" do
+ context 'when it finds vs_code_setting rows with version that is nil or zero' do
+ let(:settings) do
+ persistent_settings.each_with_index.map do |type, index|
+ vs_code_settings.create!(user_id: user.id,
+ setting_type: type,
+ content: '{}',
+ uuid: SecureRandom.uuid,
+ version: index.odd? ? nil : 0)
+ end
+ end
+
+ it 'sets version field with default value for setting type' do
+ settings.each do |setting|
+ expect(setting.version).to eq(nil).or eq(0)
+ end
+
+ migration.perform
+
+ settings.each do |setting|
+ expect(setting.reload.version)
+ .to eq(described_class::VsCodeSetting::DEFAULT_SETTING_VERSIONS[setting.setting_type])
+ end
+ end
+ end
+
+ context 'when it finds vs_code_setting rows with version that is not nil or zero' do
+ let(:settings) do
+ persistent_settings.map do |type|
+ vs_code_settings.create!(user_id: user.id,
+ setting_type: type,
+ content: '{}',
+ uuid: SecureRandom.uuid,
+ version: 1)
+ end
+ end
+
+ it 'does not set version field' do
+ settings.each do |setting|
+ expect(setting.version).to eq(1)
+ end
+
+ migration.perform
+
+ settings.each do |setting|
+ expect(setting.reload.version).to eq(1)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/drop_vulnerabilities_without_finding_id_spec.rb b/spec/lib/gitlab/background_migration/drop_vulnerabilities_without_finding_id_spec.rb
new file mode 100644
index 00000000000..05817001395
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/drop_vulnerabilities_without_finding_id_spec.rb
@@ -0,0 +1,124 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::DropVulnerabilitiesWithoutFindingId, feature_category: :vulnerability_management do # rubocop:disable Layout/LineLength -- autogenerated
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:users) { table(:users) }
+ let(:members) { table(:members) }
+ let(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
+ let(:vulnerability_scanners) { table(:vulnerability_scanners) }
+ let(:vulnerability_findings) { table(:vulnerability_occurrences) }
+ let(:vulnerabilities) { table(:vulnerabilities) }
+ let!(:user) { create_user(email: "test1@example.com", username: "test1") }
+ let!(:namespace) { namespaces.create!(name: "test-1", path: "test-1", owner_id: user.id) }
+ let!(:project) do
+ projects.create!(
+ id: 9999, namespace_id: namespace.id,
+ project_namespace_id: namespace.id,
+ creator_id: user.id
+ )
+ end
+
+ let!(:membership) do
+ members.create!(access_level: 50, source_id: project.id, source_type: "Project", user_id: user.id, state: 0,
+ notification_level: 3, type: "ProjectMember", member_namespace_id: namespace.id)
+ end
+
+ let(:migration_attrs) do
+ {
+ start_id: vulnerabilities.first.id,
+ end_id: vulnerabilities.last.id,
+ batch_table: :vulnerabilities,
+ batch_column: :id,
+ sub_batch_size: 100,
+ pause_ms: 0,
+ connection: ApplicationRecord.connection
+ }
+ end
+
+ describe "#perform" do
+ subject(:background_migration) { described_class.new(**migration_attrs).perform }
+
+ let!(:vulnerability_without_finding_id) { create_vulnerability }
+
+ let!(:vulnerabilities_finding) { create_finding(project) }
+ let!(:vulnerability_with_finding_id) { create_vulnerability(finding_id: vulnerabilities_finding.id) }
+
+ it 'removes all Vulnerabilities without a finding_id' do
+ expect { background_migration }.to change { vulnerabilities.count }.from(2).to(1)
+ end
+ end
+
+ private
+
+ def create_scanner(project, overrides = {})
+ attrs = {
+ project_id: project.id,
+ external_id: "test_vulnerability_scanner",
+ name: "Test Vulnerabilities::Scanner"
+ }.merge(overrides)
+
+ vulnerability_scanners.create!(attrs)
+ end
+
+ def create_identifier(project, overrides = {})
+ attrs = {
+ project_id: project.id,
+ external_id: "CVE-2018-1234",
+ external_type: "CVE",
+ name: "CVE-2018-1234",
+ fingerprint: SecureRandom.hex(20)
+ }.merge(overrides)
+
+ vulnerability_identifiers.create!(attrs)
+ end
+
+ def create_finding(project, overrides = {})
+ attrs = {
+ project_id: project.id,
+ scanner_id: create_scanner(project).id,
+ severity: 5, # medium
+ confidence: 2, # unknown,
+ report_type: 99, # generic
+ primary_identifier_id: create_identifier(project).id,
+ project_fingerprint: SecureRandom.hex(20),
+ location_fingerprint: SecureRandom.hex(20),
+ uuid: SecureRandom.uuid,
+ name: "CVE-2018-1234",
+ raw_metadata: "{}",
+ metadata_version: "test:1.0"
+ }.merge(overrides)
+
+ vulnerability_findings.create!(attrs)
+ end
+
+ def create_vulnerability(overrides = {})
+ attrs = {
+ project_id: project.id,
+ author_id: user.id,
+ title: 'test',
+ severity: 1,
+ confidence: 1,
+ report_type: 1,
+ state: 1,
+ detected_at: Time.zone.now
+ }.merge(overrides)
+
+ vulnerabilities.create!(attrs)
+ end
+
+ def create_user(overrides = {})
+ attrs = {
+ email: "test@example.com",
+ notification_email: "test@example.com",
+ name: "test",
+ username: "test",
+ state: "active",
+ projects_limit: 10
+ }.merge(overrides)
+
+ users.create!(attrs)
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_importer_spec.rb
index 1ae68f9efb8..eeb2f9c8000 100644
--- a/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_importer_spec.rb
@@ -18,6 +18,8 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestImporter, fe
it 'imports the merge request correctly' do
expect_next(Gitlab::Import::MergeRequestCreator, project).to receive(:execute).and_call_original
expect_next(Gitlab::BitbucketServerImport::UserFinder, project).to receive(:author_id).and_call_original
+ expect_next(Gitlab::BitbucketServerImport::MentionsConverter, project.id).to receive(:convert).and_call_original
+
expect { importer.execute }.to change { MergeRequest.count }.by(1)
merge_request = project.merge_requests.find_by_iid(pull_request.iid)
@@ -34,6 +36,18 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestImporter, fe
)
end
+ context 'when the `bitbucket_server_convert_mentions_to_users` flag is disabled' do
+ before do
+ stub_feature_flags(bitbucket_server_convert_mentions_to_users: false)
+ end
+
+ it 'does not convert mentions' do
+ expect_next(Gitlab::BitbucketServerImport::MentionsConverter, project.id).not_to receive(:convert)
+
+ importer.execute
+ end
+ end
+
context 'when the `bitbucket_server_user_mapping_by_username` flag is disabled' do
before do
stub_feature_flags(bitbucket_server_user_mapping_by_username: false)
diff --git a/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_notes_importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_notes_importer_spec.rb
index 914ebefdb8f..7b662c1a2c7 100644
--- a/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_notes_importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_notes_importer_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
let_it_be(:pull_request_data) { Gitlab::Json.parse(fixture_file('importers/bitbucket_server/pull_request.json')) }
let_it_be(:pull_request) { BitbucketServer::Representation::PullRequest.new(pull_request_data) }
let_it_be(:note_author) { create(:user, username: 'note_author', email: 'note_author@example.org') }
+ let(:mentions_converter) { Gitlab::BitbucketServerImport::MentionsConverter.new(project) }
let!(:pull_request_author) do
create(:user, username: 'pull_request_author', email: 'pull_request_author@example.org')
@@ -79,6 +80,10 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
.to receive(:info).with(include(import_stage: stage, message: message))
end
+ before do
+ allow(Gitlab::BitbucketServerImport::MentionsConverter).to receive(:new).and_return(mentions_converter)
+ end
+
subject(:importer) { described_class.new(project.reload, pull_request.to_hash) }
describe '#execute' do
@@ -113,6 +118,8 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
end
it 'imports the stand alone comments' do
+ expect(mentions_converter).to receive(:convert).and_call_original
+
expect { subject.execute }.to change { Note.count }.by(1)
expect(merge_request.notes.count).to eq(1)
@@ -124,6 +131,66 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
)
end
+ context 'when the author is not found' do
+ before do
+ allow_next_instance_of(Gitlab::BitbucketServerImport::UserFinder) do |user_finder|
+ allow(user_finder).to receive(:uid).and_return(nil)
+ end
+ end
+
+ it 'adds a note with the author username and email' do
+ subject.execute
+
+ expect(Note.first.note).to include("*By #{note_author.username} (#{note_author.email})")
+ end
+ end
+
+ context 'when the note has a parent note' do
+ let(:pr_note) do
+ instance_double(
+ BitbucketServer::Representation::Comment,
+ note: 'Note',
+ author_email: note_author.email,
+ author_username: note_author.username,
+ comments: [],
+ created_at: now,
+ updated_at: now,
+ parent_comment: pr_parent_note
+ )
+ end
+
+ let(:pr_parent_note) do
+ instance_double(
+ BitbucketServer::Representation::Comment,
+ note: 'Parent note',
+ author_email: note_author.email,
+ author_username: note_author.username,
+ comments: [],
+ created_at: now,
+ updated_at: now,
+ parent_comment: nil
+ )
+ end
+
+ it 'adds the parent note before the actual note' do
+ subject.execute
+
+ expect(Note.first.note).to include("> #{pr_parent_note.note}\n\n")
+ end
+ end
+
+ context 'when the `bitbucket_server_convert_mentions_to_users` flag is disabled' do
+ before do
+ stub_feature_flags(bitbucket_server_convert_mentions_to_users: false)
+ end
+
+ it 'does not convert mentions' do
+ expect(mentions_converter).not_to receive(:convert)
+
+ subject.execute
+ end
+ end
+
it 'logs its progress' do
expect_log(stage: 'import_standalone_pr_comments', message: 'starting')
expect_log(stage: 'import_standalone_pr_comments', message: 'finished')
@@ -181,6 +248,8 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
end
it 'imports the threaded discussion' do
+ expect(mentions_converter).to receive(:convert).and_call_original.twice
+
expect { subject.execute }.to change { Note.count }.by(2)
expect(merge_request.discussions.count).to eq(1)
@@ -204,6 +273,18 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
expect(reply_note.position.new_line).to eq(pr_inline_note.new_pos)
end
+ context 'when the `bitbucket_server_convert_mentions_to_users` flag is disabled' do
+ before do
+ stub_feature_flags(bitbucket_server_convert_mentions_to_users: false)
+ end
+
+ it 'does not convert mentions' do
+ expect(mentions_converter).not_to receive(:convert)
+
+ subject.execute
+ end
+ end
+
it 'logs its progress' do
expect_log(stage: 'import_inline_comments', message: 'starting')
expect_log(stage: 'import_inline_comments', message: 'finished')
diff --git a/spec/lib/gitlab/bitbucket_server_import/importers/users_importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importers/users_importer_spec.rb
index 33d6ab94513..79010390628 100644
--- a/spec/lib/gitlab/bitbucket_server_import/importers/users_importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_server_import/importers/users_importer_spec.rb
@@ -52,6 +52,13 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::UsersImporter, feature_
expect(logger).to receive(:info).with(hash_including(message: 'importing page 3 using batch size 2'))
expect(logger).to receive(:info).with(hash_including(message: 'finished'))
+ expect_next_instance_of(Gitlab::Import::PageCounter) do |page_counter|
+ expect(page_counter).to receive(:current).and_call_original.once
+ expect(page_counter).to receive(:set).with(2).and_call_original.once
+ expect(page_counter).to receive(:set).with(3).and_call_original.once
+ expect(page_counter).to receive(:expire!).and_call_original.once
+ end
+
expect(Gitlab::Cache::Import::Caching).to receive(:write_multiple).and_call_original.twice
importer.execute
diff --git a/spec/lib/gitlab/bitbucket_server_import/mentions_converter_spec.rb b/spec/lib/gitlab/bitbucket_server_import/mentions_converter_spec.rb
new file mode 100644
index 00000000000..46800c924c9
--- /dev/null
+++ b/spec/lib/gitlab/bitbucket_server_import/mentions_converter_spec.rb
@@ -0,0 +1,118 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketServerImport::MentionsConverter, :clean_gitlab_redis_cache, feature_category: :importers do
+ let(:project_id) { 12 }
+ let(:text) { 'text without @ mentions' }
+ let(:source_user_cache_prefix) { "bitbucket_server/project/#{project_id}/source/username" }
+
+ subject(:converted_text) { described_class.new(project_id).convert(text) }
+
+ describe '#convert' do
+ context 'when the text has no mentions' do
+ it 'does not change the text' do
+ expect(converted_text).to eq(text)
+ end
+ end
+
+ context 'when the text has a mention' do
+ let(:text) { 'mentioning @john' }
+
+ context 'when the mention has matching cached email' do
+ before do
+ ::Gitlab::Cache::Import::Caching.write("#{source_user_cache_prefix}/john", 'john@example.com')
+ end
+
+ context 'when a user with the email does not exist on gitlab' do
+ it 'puts the mention in backticks' do
+ expect(converted_text).to eq('mentioning `@john`')
+ end
+ end
+
+ context 'when a user with the same email exists on gitlab' do
+ let_it_be(:user) { create(:user, username: 'johndoe', email: 'john@example.com') }
+
+ it "replaces the mention with the user's username" do
+ expect(converted_text).to eq('mentioning @johndoe')
+ end
+ end
+
+ context 'when a user with the same username but not email exists on gitlab' do
+ let_it_be(:user) { create(:user, username: 'john') }
+
+ it 'puts the mention in backticks' do
+ expect(converted_text).to eq('mentioning `@john`')
+ end
+ end
+ end
+
+ context 'when there is cached email but not for the mentioned username' do
+ before do
+ ::Gitlab::Cache::Import::Caching.write("#{source_user_cache_prefix}/jane", 'jane@example.com')
+ end
+
+ it 'puts the mention in backticks' do
+ expect(converted_text).to eq('mentioning `@john`')
+ end
+
+ context 'when a user with the same email exists on gitlab' do
+ let_it_be(:user) { create(:user, username: 'jane', email: 'jane@example.com') }
+
+ it 'puts the mention in backticks' do
+ expect(converted_text).to eq('mentioning `@john`')
+ end
+ end
+ end
+
+ context 'when the mention has digits, underscores, uppercase and hyphens' do
+ let(:text) { '@john_DOE-123' }
+ let_it_be(:user) { create(:user, username: 'johndoe', email: 'john@example.com') }
+
+ before do
+ ::Gitlab::Cache::Import::Caching.write("#{source_user_cache_prefix}/john_DOE-123", 'john@example.com')
+ end
+
+ it "replaces the mention with the user's username" do
+ expect(converted_text).to eq('@johndoe')
+ end
+ end
+
+ context 'when the mention has emails' do
+ let(:text) { "@john's email is john@gmail.com and @jane's email is info@jane." }
+
+ it 'does not alter the emails' do
+ expect(converted_text).to eq("`@john`'s email is john@gmail.com and `@jane`'s email is info@jane.")
+ end
+ end
+
+ context 'when no emails are cached' do
+ it 'puts the mention in backticks' do
+ expect(converted_text).to eq('mentioning `@john`')
+ end
+ end
+ end
+
+ context 'when the text has multiple mentions' do
+ let(:text) { "@john, @jane-doe and @johndoe123 with \n@john again on a newline" }
+
+ context 'if none of the mentions have matching cached emails and users' do
+ it 'puts every mention in backticks' do
+ expect(converted_text).to eq("`@john`, `@jane-doe` and `@johndoe123` with \n`@john` again on a newline")
+ end
+ end
+
+ context 'if one of the mentions have matching user' do
+ let_it_be(:user) { create(:user, username: 'johndoe', email: 'john@example.com') }
+
+ before do
+ ::Gitlab::Cache::Import::Caching.write("#{source_user_cache_prefix}/john", 'john@example.com')
+ end
+
+ it 'replaces all mentions with the username and puts rest of mentions in backticks' do
+ expect(converted_text).to eq("@johndoe, `@jane-doe` and `@johndoe123` with \n@johndoe again on a newline")
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_server_import/user_from_mention_spec.rb b/spec/lib/gitlab/bitbucket_server_import/user_from_mention_spec.rb
new file mode 100644
index 00000000000..73f9cde8322
--- /dev/null
+++ b/spec/lib/gitlab/bitbucket_server_import/user_from_mention_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketServerImport::UserFromMention, :clean_gitlab_redis_cache, feature_category: :importers do
+ let(:project_id) { 11 }
+ let(:username) { '@johndoe' }
+ let(:email) { 'john@gmail.com' }
+ let(:hash) { { key: 'value' } }
+ let(:cache_key) { "bitbucket_server/project/#{project_id}/source/username/#{username}" }
+
+ let(:example) do
+ Class.new do
+ include Gitlab::BitbucketServerImport::UserFromMention
+
+ def initialize(project_id)
+ @project_id = project_id
+ end
+
+ attr_reader :project_id
+
+ def foo(mention)
+ user_from_cache(mention)
+ end
+
+ def bar(hash)
+ cache_multiple(hash)
+ end
+ end
+ end
+
+ subject(:example_class) { example.new(project_id) }
+
+ describe '#user_from_cache' do
+ it 'returns nil if the cache is empty' do
+ expect(example_class.foo(username)).to be_nil
+ end
+
+ context 'when the username and email is cached' do
+ before do
+ ::Gitlab::Cache::Import::Caching.write(cache_key, email)
+ end
+
+ context 'if a user with the email does not exist' do
+ it 'returns nil' do
+ expect(example_class.foo(username)).to be_nil
+ end
+ end
+
+ context 'if a user with the email exists' do
+ let!(:user) { create(:user, email: email) }
+
+ it 'returns the user' do
+ expect(example_class.foo(username)).to eq(user)
+ end
+ end
+ end
+ end
+
+ describe '#cache_multiple' do
+ it 'calls write_multiple with the hash' do
+ expect(Gitlab::Cache::Import::Caching).to receive(:write_multiple).with(hash, timeout: 72.hours)
+
+ example_class.bar(hash)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/cache/import/caching_spec.rb b/spec/lib/gitlab/cache/import/caching_spec.rb
index 8f1c552e0b7..6cde51b668a 100644
--- a/spec/lib/gitlab/cache/import/caching_spec.rb
+++ b/spec/lib/gitlab/cache/import/caching_spec.rb
@@ -224,4 +224,56 @@ RSpec.describe Gitlab::Cache::Import::Caching, :clean_gitlab_redis_cache, :clean
subject { described_class.write_if_greater('foo', value) }
end
end
+
+ describe '.list_add' do
+ it 'adds a value to a list' do
+ described_class.list_add('foo', 10)
+ described_class.list_add('foo', 20)
+
+ key = described_class.cache_key_for('foo')
+ values = Gitlab::Redis::Cache.with { |r| r.lrange(key, 0, -1) }
+
+ expect(values).to eq(%w[10 20])
+ end
+
+ context 'when a limit is provided' do
+ it 'limits the size of the list to the number of items defined by the limit' do
+ described_class.list_add('foo', 10, limit: 3)
+ described_class.list_add('foo', 20, limit: 3)
+ described_class.list_add('foo', 30, limit: 3)
+ described_class.list_add('foo', 40, limit: 3)
+
+ key = described_class.cache_key_for('foo')
+ values = Gitlab::Redis::Cache.with { |r| r.lrange(key, 0, -1) }
+
+ expect(values).to eq(%w[20 30 40])
+ end
+ end
+
+ it_behaves_like 'validated redis value' do
+ subject { described_class.list_add('foo', value) }
+ end
+ end
+
+ describe '.values_from_list' do
+ it 'returns empty hash when the list is empty' do
+ expect(described_class.values_from_list('foo')).to eq([])
+ end
+
+ it 'returns the items stored in the list in order' do
+ described_class.list_add('foo', 10)
+ described_class.list_add('foo', 20)
+ described_class.list_add('foo', 10)
+
+ expect(described_class.values_from_list('foo')).to eq(%w[10 20 10])
+ end
+ end
+
+ describe '.del' do
+ it 'deletes the key' do
+ described_class.write('foo', 'value')
+
+ expect { described_class.del('foo') }.to change { described_class.read('foo') }.from('value').to(nil)
+ end
+ end
end
diff --git a/spec/lib/gitlab/checks/changes_access_spec.rb b/spec/lib/gitlab/checks/changes_access_spec.rb
index 854c04dd581..d20399cf7cf 100644
--- a/spec/lib/gitlab/checks/changes_access_spec.rb
+++ b/spec/lib/gitlab/checks/changes_access_spec.rb
@@ -78,13 +78,13 @@ RSpec.describe Gitlab::Checks::ChangesAccess, feature_category: :source_code_man
end
context 'with oldrev' do
- let(:changes) { [{ oldrev: oldrev, newrev: newrev }, { newrev: '' }, { newrev: Gitlab::Git::BLANK_SHA }] }
+ let(:changes) { [{ oldrev: oldrev, newrev: newrev }, { newrev: '' }, { newrev: Gitlab::Git::SHA1_BLANK_SHA }] }
it_behaves_like 'returns only commits with non empty revisions'
end
context 'without oldrev' do
- let(:changes) { [{ newrev: newrev }, { newrev: '' }, { newrev: Gitlab::Git::BLANK_SHA }] }
+ let(:changes) { [{ newrev: newrev }, { newrev: '' }, { newrev: Gitlab::Git::SHA1_BLANK_SHA }] }
it_behaves_like 'returns only commits with non empty revisions'
end
@@ -94,7 +94,7 @@ RSpec.describe Gitlab::Checks::ChangesAccess, feature_category: :source_code_man
describe '#commits_for' do
let(:new_commits) { [] }
let(:expected_commits) { [] }
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
shared_examples 'a listing of new commits' do
it 'returns expected commits' do
diff --git a/spec/lib/gitlab/checks/diff_check_spec.rb b/spec/lib/gitlab/checks/diff_check_spec.rb
index 20c6ad8a6e8..8056611b5bd 100644
--- a/spec/lib/gitlab/checks/diff_check_spec.rb
+++ b/spec/lib/gitlab/checks/diff_check_spec.rb
@@ -63,7 +63,7 @@ RSpec.describe Gitlab::Checks::DiffCheck, feature_category: :source_code_managem
end
context 'when deletion is true' do
- let(:newrev) { Gitlab::Git::BLANK_SHA }
+ let(:newrev) { Gitlab::Git::SHA1_BLANK_SHA }
it 'does not call find_changed_paths' do
expect(project.repository).not_to receive(:find_changed_paths)
diff --git a/spec/lib/gitlab/checks/lfs_check_spec.rb b/spec/lib/gitlab/checks/lfs_check_spec.rb
index 9f001dd1941..f00915bc1ec 100644
--- a/spec/lib/gitlab/checks/lfs_check_spec.rb
+++ b/spec/lib/gitlab/checks/lfs_check_spec.rb
@@ -61,7 +61,7 @@ RSpec.describe Gitlab::Checks::LfsCheck, feature_category: :source_code_manageme
context 'with blank newrev' do
it_behaves_like 'a skipped integrity check' do
- let(:changes) { [{ oldrev: oldrev, newrev: Gitlab::Git::BLANK_SHA, ref: ref }] }
+ let(:changes) { [{ oldrev: oldrev, newrev: Gitlab::Git::SHA1_BLANK_SHA, ref: ref }] }
end
end
end
diff --git a/spec/lib/gitlab/checks/lfs_integrity_spec.rb b/spec/lib/gitlab/checks/lfs_integrity_spec.rb
index 0aecf26f42f..4f844f10f34 100644
--- a/spec/lib/gitlab/checks/lfs_integrity_spec.rb
+++ b/spec/lib/gitlab/checks/lfs_integrity_spec.rb
@@ -66,7 +66,7 @@ RSpec.describe Gitlab::Checks::LfsIntegrity, feature_category: :source_code_mana
end
context 'deletion' do
- let(:newrevs) { [Gitlab::Git::BLANK_SHA] }
+ let(:newrevs) { [Gitlab::Git::SHA1_BLANK_SHA] }
it 'skips integrity check' do
expect_any_instance_of(Gitlab::Git::LfsChanges).not_to receive(:new_pointers)
diff --git a/spec/lib/gitlab/ci/build/image_spec.rb b/spec/lib/gitlab/ci/build/image_spec.rb
index f8c0d69be2e..3854437483d 100644
--- a/spec/lib/gitlab/ci/build/image_spec.rb
+++ b/spec/lib/gitlab/ci/build/image_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Gitlab::Ci::Build::Image do
context 'when image is defined as hash' do
let(:entrypoint) { '/bin/sh' }
let(:pull_policy) { %w[always if-not-present] }
- let(:executor_opts) { { docker: { platform: 'arm64' } } }
+ let(:executor_opts) { { docker: { platform: 'arm64', user: 'dave' } } }
let(:job) do
create(:ci_build, options: { image: { name: image_name,
@@ -101,7 +101,7 @@ RSpec.describe Gitlab::Ci::Build::Image do
let(:service_entrypoint) { '/bin/sh' }
let(:service_alias) { 'db' }
let(:service_command) { 'sleep 30' }
- let(:executor_opts) { { docker: { platform: 'amd64' } } }
+ let(:executor_opts) { { docker: { platform: 'amd64', user: 'dave' } } }
let(:pull_policy) { %w[always if-not-present] }
let(:job) do
create(:ci_build, options: { services: [{ name: service_image_name, entrypoint: service_entrypoint,
diff --git a/spec/lib/gitlab/ci/build/policy/changes_spec.rb b/spec/lib/gitlab/ci/build/policy/changes_spec.rb
index 00e44650d44..4ee8903dcd3 100644
--- a/spec/lib/gitlab/ci/build/policy/changes_spec.rb
+++ b/spec/lib/gitlab/ci/build/policy/changes_spec.rb
@@ -134,7 +134,7 @@ RSpec.describe Gitlab::Ci::Build::Policy::Changes do
ref: 'feature',
source: source,
sha: '0b4bc9a4',
- before_sha: Gitlab::Git::BLANK_SHA,
+ before_sha: Gitlab::Git::SHA1_BLANK_SHA,
merge_request: merge_request
)
end
diff --git a/spec/lib/gitlab/ci/build/rules_spec.rb b/spec/lib/gitlab/ci/build/rules_spec.rb
index 99577539798..61bd9f41182 100644
--- a/spec/lib/gitlab/ci/build/rules_spec.rb
+++ b/spec/lib/gitlab/ci/build/rules_spec.rb
@@ -254,6 +254,18 @@ RSpec.describe Gitlab::Ci::Build::Rules, feature_category: :pipeline_composition
end
end
+ context 'with auto_cancel' do
+ context 'with matching rule' do
+ let(:rule_list) { [{ if: '$VAR == null', auto_cancel: { on_new_commit: 'interruptible' } }] }
+
+ it do
+ is_expected.to eq(
+ described_class::Result.new(when: 'on_success', auto_cancel: { on_new_commit: 'interruptible' })
+ )
+ end
+ end
+ end
+
context 'with a regexp variable matching rule' do
let(:rule_list) { [{ if: '"abcde" =~ $pattern' }] }
diff --git a/spec/lib/gitlab/ci/config/entry/auto_cancel_spec.rb b/spec/lib/gitlab/ci/config/entry/auto_cancel_spec.rb
index bdd66cc00a1..764908ee040 100644
--- a/spec/lib/gitlab/ci/config/entry/auto_cancel_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/auto_cancel_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::AutoCancel, feature_category: :pipelin
it 'returns errors' do
expect(config.errors)
- .to include('auto cancel on new commit must be one of: conservative, interruptible, disabled')
+ .to include('auto cancel on new commit must be one of: conservative, interruptible, none')
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
index 35f2a99ee87..04154b72453 100644
--- a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
@@ -23,6 +23,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Bridge, feature_category: :continuous_
end
end
+ describe '.visible?' do
+ it 'always returns true' do
+ expect(described_class.visible?).to be_truthy
+ end
+ end
+
describe '.matching?' do
subject { described_class.matching?(name, config) }
diff --git a/spec/lib/gitlab/ci/config/entry/image_spec.rb b/spec/lib/gitlab/ci/config/entry/image_spec.rb
index 99a6e25b313..0a82010c20c 100644
--- a/spec/lib/gitlab/ci/config/entry/image_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/image_spec.rb
@@ -112,7 +112,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do
end
end
- context "when docker specifies an option" do
+ context "when docker specifies platform" do
let(:config) { { name: 'image:1.0', docker: { platform: 'amd64' } } }
it 'is valid' do
@@ -129,15 +129,73 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do
)
end
end
+
+ context "when invalid data type is specified for platform option" do
+ let(:config) { { name: 'image:1.0', docker: { platform: 1 } } }
+
+ it 'raises an error' do
+ expect(entry).not_to be_valid
+ expect(entry.errors.first)
+ .to match %r{image executor opts '/docker/platform' must be a valid 'string'}
+ end
+ end
+ end
+
+ context "when docker specifies user" do
+ let(:config) { { name: 'image:1.0', docker: { user: 'dave' } } }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+
+ describe '#value' do
+ it "returns value" do
+ expect(entry.value).to eq(
+ name: 'image:1.0',
+ executor_opts: {
+ docker: { user: 'dave' }
+ }
+ )
+ end
+ end
+
+ context "when user is a UID" do
+ let(:config) { { name: 'image:1.0', docker: { user: '1001' } } }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+
+ describe '#value' do
+ it "returns value" do
+ expect(entry.value).to eq(
+ name: 'image:1.0',
+ executor_opts: {
+ docker: { user: '1001' }
+ }
+ )
+ end
+ end
+ end
+
+ context "when invalid data type is specified for user option" do
+ let(:config) { { name: 'image:1.0', docker: { user: 1 } } }
+
+ it 'raises an error' do
+ expect(entry).not_to be_valid
+ expect(entry.errors.first)
+ .to match %r{image executor opts '/docker/user' must be a valid 'string'}
+ end
+ end
end
context "when docker specifies an invalid option" do
- let(:config) { { name: 'image:1.0', docker: { platform: 1 } } }
+ let(:config) { { name: 'image:1.0', docker: { unknown_key: 'foo' } } }
it 'is not valid' do
expect(entry).not_to be_valid
expect(entry.errors.first)
- .to match %r{image executor opts '/docker/platform' must be a valid 'string'}
+ .to match %r{image executor opts '/docker/unknown_key' must be a valid 'schema'}
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb b/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
index cd8e35ede61..a9f891a7b50 100644
--- a/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
require_dependency 'active_model'
RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule, feature_category: :pipeline_composition do
diff --git a/spec/lib/gitlab/ci/config/entry/needs_spec.rb b/spec/lib/gitlab/ci/config/entry/needs_spec.rb
index d1a8a74ac06..61bb3e912ba 100644
--- a/spec/lib/gitlab/ci/config/entry/needs_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/needs_spec.rb
@@ -52,6 +52,27 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Needs, feature_category: :pipeline_c
end
end
+ context 'when config has disallowed keys' do
+ let(:config) { ['some_value'] }
+
+ before do
+ needs.metadata[:allowed_needs] = %i[cross_dependency]
+ needs.compose!
+ end
+
+ describe '#valid?' do
+ it 'returns invalid' do
+ expect(needs.valid?).to be_falsey
+ end
+ end
+
+ describe '#errors' do
+ it 'returns invalid types error' do
+ expect(needs.errors).to include('needs config uses invalid types: job')
+ end
+ end
+ end
+
context 'when wrong needs type is used' do
let(:config) { [{ job: 'job_name', artifacts: true, some: :key }] }
diff --git a/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb b/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
index 3531d6e9f1a..d5bf532c216 100644
--- a/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require 'gitlab_chronic_duration'
-RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
+RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule, feature_category: :pipeline_composition do
let(:factory) do
Gitlab::Config::Entry::Factory.new(described_class)
.metadata(metadata)
@@ -11,7 +11,10 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
end
let(:metadata) do
- { allowed_when: %w[on_success on_failure always never manual delayed] }
+ {
+ allowed_when: %w[on_success on_failure always never manual delayed],
+ allowed_keys: %i[if changes exists when start_in allow_failure variables needs auto_cancel]
+ }
end
let(:entry) { factory.create! }
@@ -296,18 +299,18 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
end
end
- context 'with a string passed in metadata but not allowed in the class' do
- let(:metadata) { { allowed_when: %w[explode] } }
+ context 'with an invalid when' do
+ let(:metadata) { { allowed_when: %w[always never], allowed_keys: %i[if when] } }
let(:config) do
- { if: '$THIS == "that"', when: 'explode' }
+ { if: '$THIS == "that"', when: 'on_success' }
end
it { is_expected.to be_a(described_class) }
it { is_expected.not_to be_valid }
it 'returns an error about invalid when:' do
- expect(subject.errors).to include(/when unknown value: explode/)
+ expect(subject.errors).to include(/when unknown value: on_success/)
end
context 'when composed' do
@@ -318,41 +321,30 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
it { is_expected.not_to be_valid }
it 'returns an error about invalid when:' do
- expect(subject.errors).to include(/when unknown value: explode/)
+ expect(subject.errors).to include(/when unknown value: on_success/)
end
end
end
- context 'with a string allowed in the class but not passed in metadata' do
- let(:metadata) { { allowed_when: %w[always never] } }
-
+ context 'with an invalid variables' do
let(:config) do
- { if: '$THIS == "that"', when: 'on_success' }
+ { if: '$THIS == "that"', variables: 'hello' }
end
- it { is_expected.to be_a(described_class) }
- it { is_expected.not_to be_valid }
-
- it 'returns an error about invalid when:' do
- expect(subject.errors).to include(/when unknown value: on_success/)
+ before do
+ subject.compose!
end
- context 'when composed' do
- before do
- subject.compose!
- end
-
- it { is_expected.not_to be_valid }
+ it { is_expected.not_to be_valid }
- it 'returns an error about invalid when:' do
- expect(subject.errors).to include(/when unknown value: on_success/)
- end
+ it 'returns an error about invalid variables:' do
+ expect(subject.errors).to include(/variables config should be a hash/)
end
end
- context 'with an invalid variables' do
+ context 'with an invalid auto_cancel' do
let(:config) do
- { if: '$THIS == "that"', variables: 'hello' }
+ { if: '$THIS == "that"', auto_cancel: { on_new_commit: 'xyz' } }
end
before do
@@ -361,8 +353,9 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
it { is_expected.not_to be_valid }
- it 'returns an error about invalid variables:' do
- expect(subject.errors).to include(/variables config should be a hash/)
+ it 'returns an error' do
+ expect(subject.errors).to include(
+ 'auto_cancel on new commit must be one of: conservative, interruptible, none')
end
end
end
@@ -445,6 +438,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
it { is_expected.to eq(config) }
end
+
+ context 'when it has auto_cancel' do
+ let(:config) { { if: '$THIS || $THAT', auto_cancel: { on_new_commit: 'interruptible' } } }
+
+ it { is_expected.to eq(config) }
+ end
end
describe '.default' do
diff --git a/spec/lib/gitlab/ci/config/entry/rules_spec.rb b/spec/lib/gitlab/ci/config/entry/rules_spec.rb
index b0871f2345e..0113b6c1f7f 100644
--- a/spec/lib/gitlab/ci/config/entry/rules_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/rules_spec.rb
@@ -1,16 +1,18 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
require_dependency 'active_model'
-RSpec.describe Gitlab::Ci::Config::Entry::Rules do
+RSpec.describe Gitlab::Ci::Config::Entry::Rules, feature_category: :pipeline_composition do
let(:factory) do
Gitlab::Config::Entry::Factory.new(described_class)
.metadata(metadata)
.value(config)
end
- let(:metadata) { { allowed_when: %w[always never] } }
+ let(:metadata) do
+ { allowed_when: %w[always never], allowed_keys: %i[if when] }
+ end
subject(:entry) { factory.create! }
diff --git a/spec/lib/gitlab/ci/config/entry/service_spec.rb b/spec/lib/gitlab/ci/config/entry/service_spec.rb
index 82747e7b521..8ce0f890b46 100644
--- a/spec/lib/gitlab/ci/config/entry/service_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/service_spec.rb
@@ -154,22 +154,45 @@ RSpec.describe Gitlab::Ci::Config::Entry::Service do
end
context 'when configuration has docker options' do
- let(:config) { { name: 'postgresql:9.5', docker: { platform: 'amd64' } } }
+ context "with platform option" do
+ let(:config) { { name: 'postgresql:9.5', docker: { platform: 'amd64' } } }
- describe '#valid?' do
- it 'is valid' do
- expect(entry).to be_valid
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ describe '#value' do
+ it "returns value" do
+ expect(entry.value).to eq(
+ name: 'postgresql:9.5',
+ executor_opts: {
+ docker: { platform: 'amd64' }
+ }
+ )
+ end
end
end
- describe '#value' do
- it "returns value" do
- expect(entry.value).to eq(
- name: 'postgresql:9.5',
- executor_opts: {
- docker: { platform: 'amd64' }
- }
- )
+ context "with user option" do
+ let(:config) { { name: 'postgresql:9.5', docker: { user: 'dave' } } }
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ describe '#value' do
+ it "returns value" do
+ expect(entry.value).to eq(
+ name: 'postgresql:9.5',
+ executor_opts: {
+ docker: { user: 'dave' }
+ }
+ )
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/workflow_spec.rb b/spec/lib/gitlab/ci/config/entry/workflow_spec.rb
index d3ce3ffe641..dbd25010884 100644
--- a/spec/lib/gitlab/ci/config/entry/workflow_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/workflow_spec.rb
@@ -6,6 +6,10 @@ RSpec.describe Gitlab::Ci::Config::Entry::Workflow, feature_category: :pipeline_
subject(:config) { described_class.new(workflow_hash) }
describe 'validations' do
+ before do
+ config.compose!
+ end
+
context 'when work config value is a string' do
let(:workflow_hash) { 'build' }
@@ -27,6 +31,28 @@ RSpec.describe Gitlab::Ci::Config::Entry::Workflow, feature_category: :pipeline_
end
context 'when work config value is a hash' do
+ context 'with an invalid key' do
+ let(:workflow_hash) { { trash: [{ if: '$VAR' }] } }
+
+ describe '#valid?' do
+ it 'is invalid' do
+ expect(config).not_to be_valid
+ end
+
+ it 'attaches an error specifying the unknown key' do
+ expect(config.errors).to include('workflow config contains unknown keys: trash')
+ end
+ end
+
+ describe '#value' do
+ it 'returns the invalid configuration' do
+ expect(config.value).to eq(workflow_hash)
+ end
+ end
+ end
+ end
+
+ context 'when config has rules' do
let(:workflow_hash) { { rules: [{ if: '$VAR' }] } }
describe '#valid?' do
@@ -45,8 +71,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Workflow, feature_category: :pipeline_
end
end
- context 'with an invalid key' do
- let(:workflow_hash) { { trash: [{ if: '$VAR' }] } }
+ context 'when rules has an invalid key' do
+ let(:workflow_hash) { { rules: [{ if: '$VAR', trash: 'something' }] } }
describe '#valid?' do
it 'is invalid' do
@@ -54,7 +80,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Workflow, feature_category: :pipeline_
end
it 'attaches an error specifying the unknown key' do
- expect(config.errors).to include('workflow config contains unknown keys: trash')
+ expect(config.errors).to include('rules:rule config contains unknown keys: trash')
end
end
@@ -64,6 +90,41 @@ RSpec.describe Gitlab::Ci::Config::Entry::Workflow, feature_category: :pipeline_
end
end
end
+
+ context 'when rules has auto_cancel' do
+ let(:workflow_hash) { { rules: [{ if: '$VAR', auto_cancel: { on_new_commit: 'interruptible' } }] } }
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(config).to be_valid
+ end
+
+ it 'attaches no errors' do
+ expect(config.errors).to be_empty
+ end
+ end
+
+ describe '#value' do
+ it 'returns the config' do
+ expect(config.value).to eq(workflow_hash)
+ end
+ end
+
+ context 'when auto_cancel has an invalid value' do
+ let(:workflow_hash) { { rules: [{ if: '$VAR', auto_cancel: { on_new_commit: 'xyz' } }] } }
+
+ describe '#valid?' do
+ it 'is invalid' do
+ expect(config).not_to be_valid
+ end
+
+ it 'returns error' do
+ expect(config.errors).to include(
+ 'rules:rule:auto_cancel on new commit must be one of: conservative, interruptible, none')
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/context_spec.rb b/spec/lib/gitlab/ci/config/external/context_spec.rb
index 9ac72ebbac8..3409fc53d19 100644
--- a/spec/lib/gitlab/ci/config/external/context_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/context_spec.rb
@@ -159,10 +159,14 @@ RSpec.describe Gitlab::Ci::Config::External::Context, feature_category: :pipelin
shared_examples 'a mutated context' do
let(:mutated) { subject.mutate(new_attributes) }
+ let(:lazy_response) { double('lazy_response') }
before do
+ allow(lazy_response).to receive(:execute).and_return(lazy_response)
+
subject.expandset << :a_file
subject.set_deadline(15.seconds)
+ subject.execute_remote_parallel_request(lazy_response)
end
it { expect(mutated).not_to eq(subject) }
@@ -170,8 +174,9 @@ RSpec.describe Gitlab::Ci::Config::External::Context, feature_category: :pipelin
it { expect(mutated).to have_attributes(new_attributes) }
it { expect(mutated.pipeline).to eq(subject.pipeline) }
it { expect(mutated.expandset).to eq(subject.expandset) }
- it { expect(mutated.execution_deadline).to eq(mutated.execution_deadline) }
- it { expect(mutated.logger).to eq(mutated.logger) }
+ it { expect(mutated.execution_deadline).to eq(subject.execution_deadline) }
+ it { expect(mutated.logger).to eq(subject.logger) }
+ it { expect(mutated.parallel_requests).to eq(subject.parallel_requests) }
end
context 'with attributes' do
@@ -212,4 +217,80 @@ RSpec.describe Gitlab::Ci::Config::External::Context, feature_category: :pipelin
end
end
end
+
+ describe '#execute_remote_parallel_request' do
+ let(:lazy_response1) { double('lazy_response', wait: true, complete?: complete1) }
+ let(:lazy_response2) { double('lazy_response') }
+
+ let(:complete1) { false }
+
+ before do
+ allow(lazy_response1).to receive(:execute).and_return(lazy_response1)
+ allow(lazy_response2).to receive(:execute).and_return(lazy_response2)
+ end
+
+ context 'when the queue is empty' do
+ before do
+ stub_const("Gitlab::Ci::Config::External::Context::MAX_PARALLEL_REMOTE_REQUESTS", 2)
+ end
+
+ it 'adds the new lazy response to the queue' do
+ expect { subject.execute_remote_parallel_request(lazy_response1) }
+ .to change { subject.parallel_requests }
+ .from([])
+ .to([lazy_response1])
+ end
+ end
+
+ context 'when there is a lazy response in the queue' do
+ before do
+ subject.execute_remote_parallel_request(lazy_response1)
+ end
+
+ context 'when there is a free slot in the queue' do
+ before do
+ stub_const("Gitlab::Ci::Config::External::Context::MAX_PARALLEL_REMOTE_REQUESTS", 2)
+ end
+
+ it 'adds the new lazy response to the queue' do
+ expect { subject.execute_remote_parallel_request(lazy_response2) }
+ .to change { subject.parallel_requests }
+ .from([lazy_response1])
+ .to([lazy_response1, lazy_response2])
+ end
+ end
+
+ context 'when the queue is full' do
+ before do
+ stub_const("Gitlab::Ci::Config::External::Context::MAX_PARALLEL_REMOTE_REQUESTS", 1)
+ end
+
+ context 'when the first lazy response in the queue is complete' do
+ let(:complete1) { true }
+
+ it 'removes the completed lazy response and adds the new one to the queue' do
+ expect(lazy_response1).not_to receive(:wait)
+
+ expect { subject.execute_remote_parallel_request(lazy_response2) }
+ .to change { subject.parallel_requests }
+ .from([lazy_response1])
+ .to([lazy_response2])
+ end
+ end
+
+ context 'when the first lazy response in the queue is not complete' do
+ let(:complete1) { false }
+
+ it 'waits for the first lazy response to complete and then adds the new one to the queue' do
+ expect(lazy_response1).to receive(:wait)
+
+ expect { subject.execute_remote_parallel_request(lazy_response2) }
+ .to change { subject.parallel_requests }
+ .from([lazy_response1])
+ .to([lazy_response1, lazy_response2])
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/config/external/file/component_spec.rb b/spec/lib/gitlab/ci/config/external/file/component_spec.rb
index 88e272ac3fd..7907837db6a 100644
--- a/spec/lib/gitlab/ci/config/external/file/component_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/component_spec.rb
@@ -146,6 +146,16 @@ RSpec.describe Gitlab::Ci::Config::External::File::Component, feature_category:
external_resource.content
end
+
+ context 'when user is missing in a context' do
+ let_it_be(:user) { nil }
+
+ it 'does not track the event' do
+ expect(::Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
+
+ external_resource.content
+ end
+ end
end
context 'when component is invalid' do
diff --git a/spec/lib/gitlab/ci/config/external/file/remote_spec.rb b/spec/lib/gitlab/ci/config/external/file/remote_spec.rb
index 7293e640112..adca9e750d0 100644
--- a/spec/lib/gitlab/ci/config/external/file/remote_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/remote_spec.rb
@@ -157,6 +157,40 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote, feature_category: :pi
it_behaves_like "#content"
end
+ describe '#preload_content' do
+ context 'when the parallel request queue is full' do
+ let(:location1) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.secret_file1.yml' }
+ let(:location2) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.secret_file2.yml' }
+
+ before do
+ # Makes the parallel queue full easily
+ stub_const("Gitlab::Ci::Config::External::Context::MAX_PARALLEL_REMOTE_REQUESTS", 1)
+
+ # Adding a failing promise to the queue
+ promise = Concurrent::Promise.new do
+ sleep 1.1
+ raise Timeout::Error
+ end
+
+ context.execute_remote_parallel_request(
+ Gitlab::HTTP_V2::LazyResponse.new(promise, location1, {}, nil)
+ )
+
+ stub_full_request(location2).to_return(body: remote_file_content)
+ end
+
+ it 'waits for the queue' do
+ file2 = described_class.new({ remote: location2 }, context)
+
+ start_at = Time.current
+ file2.preload_content
+ end_at = Time.current
+
+ expect(end_at - start_at).to be > 1
+ end
+ end
+ end
+
describe "#error_message" do
subject(:error_message) do
Gitlab::Ci::Config::External::Mapper::Verifier.new(context).process([remote_file])
diff --git a/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_properties_spec.rb b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_properties_spec.rb
index 2c57106b07c..9718d16756c 100644
--- a/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_properties_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_properties_spec.rb
@@ -3,118 +3,173 @@
require 'fast_spec_helper'
RSpec.describe Gitlab::Ci::Parsers::Sbom::CyclonedxProperties, feature_category: :dependency_management do
- subject(:parse_source_from_properties) { described_class.parse_source(properties) }
+ shared_examples 'handling invalid properties' do
+ context 'when properties are nil' do
+ let(:properties) { nil }
- context 'when properties are nil' do
- let(:properties) { nil }
+ it { is_expected.to be_nil }
+ end
+
+ context 'when report does not have valid properties' do
+ let(:properties) { ['name' => 'foo', 'value' => 'bar'] }
- it { is_expected.to be_nil }
+ it { is_expected.to be_nil }
+ end
end
- context 'when report does not have gitlab properties' do
- let(:properties) { ['name' => 'foo', 'value' => 'bar'] }
+ describe '#parse_source' do
+ subject(:parse_source_from_properties) { described_class.parse_source(properties) }
- it { is_expected.to be_nil }
- end
+ it_behaves_like 'handling invalid properties'
- context 'when schema_version is missing' do
- let(:properties) do
- [
- { 'name' => 'gitlab:dependency_scanning:dependency_file', 'value' => 'package-lock.json' },
- { 'name' => 'gitlab:dependency_scanning:package_manager_name', 'value' => 'npm' },
- { 'name' => 'gitlab:dependency_scanning:language', 'value' => 'JavaScript' }
- ]
- end
+ context 'when schema_version is missing' do
+ let(:properties) do
+ [
+ { 'name' => 'gitlab:dependency_scanning:dependency_file', 'value' => 'package-lock.json' },
+ { 'name' => 'gitlab:dependency_scanning:package_manager_name', 'value' => 'npm' },
+ { 'name' => 'gitlab:dependency_scanning:language', 'value' => 'JavaScript' }
+ ]
+ end
- it { is_expected.to be_nil }
- end
+ it { is_expected.to be_nil }
+ end
- context 'when schema version is unsupported' do
- let(:properties) do
- [
- { 'name' => 'gitlab:meta:schema_version', 'value' => '2' },
- { 'name' => 'gitlab:dependency_scanning:dependency_file', 'value' => 'package-lock.json' },
- { 'name' => 'gitlab:dependency_scanning:package_manager_name', 'value' => 'npm' },
- { 'name' => 'gitlab:dependency_scanning:language', 'value' => 'JavaScript' }
- ]
+ context 'when schema version is unsupported' do
+ let(:properties) do
+ [
+ { 'name' => 'gitlab:meta:schema_version', 'value' => '2' },
+ { 'name' => 'gitlab:dependency_scanning:dependency_file', 'value' => 'package-lock.json' },
+ { 'name' => 'gitlab:dependency_scanning:package_manager_name', 'value' => 'npm' },
+ { 'name' => 'gitlab:dependency_scanning:language', 'value' => 'JavaScript' }
+ ]
+ end
+
+ it { is_expected.to be_nil }
end
- it { is_expected.to be_nil }
- end
+ context 'when no dependency_scanning or container_scanning properties are present' do
+ let(:properties) do
+ [
+ { 'name' => 'gitlab:meta:schema_version', 'value' => '1' },
+ { 'name' => 'gitlab::aquasecurity:trivy:FilePath', 'value' => '1' }
+ ]
+ end
- context 'when no dependency_scanning or container_scanning properties are present' do
- let(:properties) do
- [
- { 'name' => 'gitlab:meta:schema_version', 'value' => '1' }
- ]
+ it 'does not call source parsers' do
+ expect(Gitlab::Ci::Parsers::Sbom::Source::DependencyScanning).not_to receive(:source)
+ expect(Gitlab::Ci::Parsers::Sbom::Source::ContainerScanning).not_to receive(:source)
+
+ parse_source_from_properties
+ end
end
- it 'does not call source parsers' do
- expect(Gitlab::Ci::Parsers::Sbom::Source::DependencyScanning).not_to receive(:source)
- expect(Gitlab::Ci::Parsers::Sbom::Source::ContainerScanning).not_to receive(:source)
+ context 'when dependency_scanning properties are present' do
+ let(:properties) do
+ [
+ { 'name' => 'gitlab:meta:schema_version', 'value' => '1' },
+ { 'name' => 'gitlab:dependency_scanning:category', 'value' => 'development' },
+ { 'name' => 'gitlab:dependency_scanning:input_file:path', 'value' => 'package-lock.json' },
+ { 'name' => 'gitlab:dependency_scanning:source_file:path', 'value' => 'package.json' },
+ { 'name' => 'gitlab:dependency_scanning:package_manager:name', 'value' => 'npm' },
+ { 'name' => 'gitlab:dependency_scanning:language:name', 'value' => 'JavaScript' },
+ { 'name' => 'gitlab:dependency_scanning:unsupported_property', 'value' => 'Should be ignored' }
+ ]
+ end
+
+ let(:expected_input) do
+ {
+ 'category' => 'development',
+ 'input_file' => { 'path' => 'package-lock.json' },
+ 'source_file' => { 'path' => 'package.json' },
+ 'package_manager' => { 'name' => 'npm' },
+ 'language' => { 'name' => 'JavaScript' }
+ }
+ end
- parse_source_from_properties
- end
- end
+ it 'passes only supported properties to the dependency scanning parser' do
+ expect(Gitlab::Ci::Parsers::Sbom::Source::DependencyScanning).to receive(:source).with(expected_input)
- context 'when dependency_scanning properties are present' do
- let(:properties) do
- [
- { 'name' => 'gitlab:meta:schema_version', 'value' => '1' },
- { 'name' => 'gitlab:dependency_scanning:category', 'value' => 'development' },
- { 'name' => 'gitlab:dependency_scanning:input_file:path', 'value' => 'package-lock.json' },
- { 'name' => 'gitlab:dependency_scanning:source_file:path', 'value' => 'package.json' },
- { 'name' => 'gitlab:dependency_scanning:package_manager:name', 'value' => 'npm' },
- { 'name' => 'gitlab:dependency_scanning:language:name', 'value' => 'JavaScript' },
- { 'name' => 'gitlab:dependency_scanning:unsupported_property', 'value' => 'Should be ignored' }
- ]
+ parse_source_from_properties
+ end
end
- let(:expected_input) do
- {
- 'category' => 'development',
- 'input_file' => { 'path' => 'package-lock.json' },
- 'source_file' => { 'path' => 'package.json' },
- 'package_manager' => { 'name' => 'npm' },
- 'language' => { 'name' => 'JavaScript' }
- }
- end
+ context 'when container_scanning properties are present' do
+ let(:properties) do
+ [
+ { 'name' => 'gitlab:meta:schema_version', 'value' => '1' },
+ { 'name' => 'gitlab:container_scanning:image:name', 'value' => 'photon' },
+ { 'name' => 'gitlab:container_scanning:image:tag', 'value' => '5.0-20231007' },
+ { 'name' => 'gitlab:container_scanning:operating_system:name', 'value' => 'Photon OS' },
+ { 'name' => 'gitlab:container_scanning:operating_system:version', 'value' => '5.0' }
+ ]
+ end
+
+ let(:expected_input) do
+ {
+ 'image' => {
+ 'name' => 'photon',
+ 'tag' => '5.0-20231007'
+ },
+ 'operating_system' => {
+ 'name' => 'Photon OS',
+ 'version' => '5.0'
+ }
+ }
+ end
- it 'passes only supported properties to the dependency scanning parser' do
- expect(Gitlab::Ci::Parsers::Sbom::Source::DependencyScanning).to receive(:source).with(expected_input)
+ it 'passes only supported properties to the container scanning parser' do
+ expect(Gitlab::Ci::Parsers::Sbom::Source::ContainerScanning).to receive(:source).with(expected_input)
- parse_source_from_properties
+ parse_source_from_properties
+ end
end
end
- context 'when container_scanning properties are present' do
- let(:properties) do
- [
- { 'name' => 'gitlab:meta:schema_version', 'value' => '1' },
- { 'name' => 'gitlab:container_scanning:image:name', 'value' => 'photon' },
- { 'name' => 'gitlab:container_scanning:image:tag', 'value' => '5.0-20231007' },
- { 'name' => 'gitlab:container_scanning:operating_system:name', 'value' => 'Photon OS' },
- { 'name' => 'gitlab:container_scanning:operating_system:version', 'value' => '5.0' }
- ]
+ describe '#parse_trivy_source' do
+ subject(:parse_trivy_source_from_properties) { described_class.parse_trivy_source(properties) }
+
+ it_behaves_like 'handling invalid properties'
+
+ context 'when no trivy properties are present' do
+ let(:properties) do
+ [
+ { 'name' => 'gitlab:meta:schema_version', 'value' => '1' },
+ { 'name' => 'gitlab::aquasecurity:trivy:FilePath', 'value' => '1' }
+ ]
+ end
+
+ it 'does not call source parsers' do
+ expect(Gitlab::Ci::Parsers::Sbom::Source::Trivy).not_to receive(:source)
+
+ parse_trivy_source_from_properties
+ end
end
- let(:expected_input) do
- {
- 'image' => {
- 'name' => 'photon',
- 'tag' => '5.0-20231007'
- },
- 'operating_system' => {
- 'name' => 'Photon OS',
- 'version' => '5.0'
+ context 'when trivy properties are present' do
+ let(:properties) do
+ [
+ { 'name' => 'aquasecurity:trivy:PkgID', 'value' => 'sha256:47ce8fad8..' },
+ { 'name' => 'aquasecurity:trivy:LayerDigest',
+ 'value' => 'registry.test.com/atiwari71/container-scanning-test/main@sha256:e14a4bcf..' },
+ { 'name' => 'aquasecurity:trivy:LayerDiffID', 'value' => 'sha256:94dd7d531fa..' },
+ { 'name' => 'aquasecurity:trivy:SrcEpoch', 'value' => 'sha256:5d20c808c..' }
+ ]
+ end
+
+ let(:expected_input) do
+ {
+ 'PkgID' => 'sha256:47ce8fad8..',
+ 'LayerDigest' => 'registry.test.com/atiwari71/container-scanning-test/main@sha256:e14a4bcf..',
+ 'LayerDiffID' => 'sha256:94dd7d531fa..',
+ 'SrcEpoch' => 'sha256:5d20c808c..'
}
- }
- end
+ end
- it 'passes only supported properties to the container scanning parser' do
- expect(Gitlab::Ci::Parsers::Sbom::Source::ContainerScanning).to receive(:source).with(expected_input)
+ it 'passes only supported properties to the container scanning parser' do
+ expect(Gitlab::Ci::Parsers::Sbom::Source::Trivy).to receive(:source).with(expected_input)
- parse_source_from_properties
+ parse_trivy_source_from_properties
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb
index 9c8402faf77..6a6fe59bce1 100644
--- a/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb
@@ -125,6 +125,56 @@ RSpec.describe Gitlab::Ci::Parsers::Sbom::Cyclonedx, feature_category: :dependen
parse!
end
+ context 'when component is trivy type' do
+ let(:parsed_properties) do
+ {
+ 'PkgID' => 'adduser@3.134',
+ 'PkgType' => 'debian'
+ }
+ end
+
+ let(:components) do
+ [
+ {
+ # Trivy component
+ "bom-ref" => "0eda252d-d8a4-4250-b816-b6314f029063",
+ "type" => "library",
+ "name" => "analyzer",
+ "purl" => "pkg:gem/activesupport@5.1.4",
+ "properties" => [
+ {
+ "name" => "aquasecurity:trivy:PkgID",
+ "value" => "apt@2.6.1"
+ },
+ {
+ "name" => "aquasecurity:trivy:PkgType",
+ "value" => "debian"
+ }
+ ]
+ }
+ ]
+ end
+
+ before do
+ allow(properties_parser).to receive(:parse_trivy_source).and_return(parsed_properties)
+ stub_const('Gitlab::Ci::Parsers::Sbom::CyclonedxProperties', properties_parser)
+ end
+
+ it 'adds each component, ignoring unused attributes' do
+ expect(report).to receive(:add_component)
+ .with(
+ an_object_having_attributes(
+ component_type: "library",
+ properties: parsed_properties,
+ purl: an_object_having_attributes(
+ type: "gem"
+ )
+ )
+ )
+ parse!
+ end
+ end
+
context 'when a component has an invalid purl' do
before do
components.push(
diff --git a/spec/lib/gitlab/ci/parsers/sbom/source/trivy_spec.rb b/spec/lib/gitlab/ci/parsers/sbom/source/trivy_spec.rb
new file mode 100644
index 00000000000..460ca4f28a1
--- /dev/null
+++ b/spec/lib/gitlab/ci/parsers/sbom/source/trivy_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::Parsers::Sbom::Source::Trivy, feature_category: :dependency_management do
+ subject { described_class.source(property_data) }
+
+ context 'when all property data is present' do
+ let(:property_data) do
+ {
+ 'PkgID' => 'sha256:47ce8fad8..',
+ 'LayerDigest' => 'registry.test.com/atiwari71/container-scanning-test/main@sha256:e14a4bcf..',
+ 'LayerDiffID' => 'sha256:94dd7d531fa..',
+ 'SrcEpoch' => 'sha256:5d20c808c..'
+ }
+ end
+
+ it 'returns expected source data' do
+ is_expected.to have_attributes(
+ source_type: :trivy,
+ data: property_data
+ )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/parsers/security/common_spec.rb b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
index 431a6d94c48..6aa526c1829 100644
--- a/spec/lib/gitlab/ci/parsers/security/common_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
@@ -185,7 +185,7 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common, feature_category: :vulnera
context 'when name is provided' do
it 'sets name from the report as a name' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-1030' }
+ finding = report.findings.second
expected_name = Gitlab::Json.parse(finding.raw_metadata)['name']
expect(finding.name).to eq(expected_name)
@@ -197,7 +197,8 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common, feature_category: :vulnera
let(:location) { nil }
it 'returns only identifier name' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-2017-11429' }
+ finding = report.findings.third
+
expect(finding.name).to eq("CVE-2017-11429")
end
end
@@ -205,21 +206,24 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common, feature_category: :vulnera
context 'when location exists' do
context 'when CVE identifier exists' do
it 'combines identifier with location to create name' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-2017-11429' }
+ finding = report.findings.third
+
expect(finding.name).to eq("CVE-2017-11429 in yarn.lock")
end
end
context 'when CWE identifier exists' do
it 'combines identifier with location to create name' do
- finding = report.findings.find { |x| x.compare_key == 'CWE-2017-11429' }
+ finding = report.findings.fourth
+
expect(finding.name).to eq("CWE-2017-11429 in yarn.lock")
end
end
context 'when neither CVE nor CWE identifier exist' do
it 'combines identifier with location to create name' do
- finding = report.findings.find { |x| x.compare_key == 'OTHER-2017-11429' }
+ finding = report.findings.fifth
+
expect(finding.name).to eq("other-2017-11429 in yarn.lock")
end
end
@@ -476,6 +480,20 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common, feature_category: :vulnera
end
end
end
+
+ describe 'handling the unicode null characters' do
+ let(:artifact) { build(:ci_job_artifact, :common_security_report_with_unicode_null_character) }
+
+ it 'escapes the unicode null characters while parsing the report' do
+ finding = report.findings.first
+
+ expect(finding.solution).to eq('Upgrade to latest version.\u0000')
+ end
+
+ it 'adds warning to report' do
+ expect(report.warnings).to include({ type: 'Parsing', message: 'Report artifact contained unicode null characters which are escaped during the ingestion.' })
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
index 68158503628..37535b80cd4 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
@@ -200,7 +200,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Command do
let(:command) { described_class.new(project: project) }
it 'uses BLANK_SHA' do
- is_expected.to eq(Gitlab::Git::BLANK_SHA)
+ is_expected.to eq(Gitlab::Git::SHA1_BLANK_SHA)
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb
index 44ccb1eeae1..bf146791659 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb
@@ -12,13 +12,10 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules do
end
let(:step) { described_class.new(pipeline, command) }
- let(:ff_always_set_pipeline_failure_reason) { true }
describe '#perform!' do
context 'when pipeline has been skipped by workflow configuration' do
before do
- stub_feature_flags(always_set_pipeline_failure_reason: ff_always_set_pipeline_failure_reason)
-
allow(step).to receive(:workflow_rules_result)
.and_return(
double(pass?: false, variables: {})
@@ -47,15 +44,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules do
expect(pipeline).to be_failed
expect(pipeline).to be_filtered_by_workflow_rules
end
-
- context 'when always_set_pipeline_failure_reason is disabled' do
- let(:ff_always_set_pipeline_failure_reason) { false }
-
- it 'does not set the failure reason', :aggregate_failures do
- expect(pipeline).not_to be_failed
- expect(pipeline.failure_reason).to be_blank
- end
- end
end
context 'when pipeline has not been skipped by workflow configuration' do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb
index 84c2fb6525e..5956137a725 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb
@@ -52,22 +52,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Helpers, feature_category: :continuo
expect(pipeline.status).to eq 'failed'
expect(pipeline.failure_reason).to eq drop_reason.to_s
end
-
- context 'when feature flag always_set_pipeline_failure_reason is false' do
- before do
- stub_feature_flags(always_set_pipeline_failure_reason: false)
- end
-
- specify do
- subject.error(message, config_error: config_error, drop_reason: drop_reason)
-
- if command.save_incompleted
- expect(pipeline.failure_reason).to eq drop_reason.to_s
- else
- expect(pipeline.failure_reason).not_to be_present
- end
- end
- end
end
context 'when the error includes malicious HTML' do
@@ -93,6 +77,37 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Helpers, feature_category: :continuo
end
end
+ context 'when drop_reason is nil' do
+ let(:command) { double(project: nil) }
+
+ shared_examples "error function with no drop reason" do
+ it 'drops with out failure reason' do
+ expect(command).to receive(:increment_pipeline_failure_reason_counter)
+
+ call_error
+
+ expect(pipeline.failure_reason).to be_nil
+ expect(pipeline.yaml_errors).to be_nil
+ expect(pipeline.errors[:base]).to include(message)
+ expect(pipeline).to be_failed
+ expect(pipeline).not_to be_persisted
+ end
+ end
+
+ context 'when no drop_reason argument is passed' do
+ let(:call_error) { subject.error(message) }
+
+ it_behaves_like "error function with no drop reason"
+ end
+
+ context 'when drop_reason argument is passed as nil' do
+ let(:drop_reason) { nil }
+ let(:call_error) { subject.error(message, drop_reason: drop_reason) }
+
+ it_behaves_like "error function with no drop reason"
+ end
+ end
+
context 'when config error is false' do
context 'does not set the yaml error or override the drop reason' do
let(:drop_reason) { :size_limit_exceeded }
@@ -107,7 +122,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Helpers, feature_category: :continuo
expect(pipeline).to be_persisted
end
- context ' when the drop reason is not persistable' do
+ context 'when the drop reason is not persistable' do
let(:drop_reason) { :filtered_by_rules }
let(:command) { double(project: nil) }
diff --git a/spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb
index 732748d8c8b..787a458f0ff 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb
@@ -240,6 +240,78 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::PopulateMetadata, feature_category:
expect(pipeline.pipeline_metadata).not_to be_persisted
end
end
+
+ context 'with workflow:rules:auto_cancel' do
+ context 'with auto_cancel:on_new_commit not set and rules:workflow:auto_cancel:on_new_commit set' do
+ let(:config) do
+ {
+ variables: { MY_VAR: my_var_value },
+ workflow: {
+ auto_cancel: { on_job_failure: 'all' },
+ rules: [{ if: '$MY_VAR == "something"', auto_cancel: { on_new_commit: 'interruptible' } }]
+ },
+ rspec: { script: 'rspec' }
+ }
+ end
+
+ context 'when the rule is matched' do
+ let(:my_var_value) { 'something' }
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('interruptible')
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('all')
+ end
+ end
+
+ context 'when the rule is not matched' do
+ let(:my_var_value) { 'something else' }
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('conservative')
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('all')
+ end
+ end
+ end
+
+ context 'with auto_cancel:on_new_commit set and rules:workflow:auto_cancel:on_new_commit set' do
+ let(:config) do
+ {
+ variables: { MY_VAR: my_var_value },
+ workflow: {
+ auto_cancel: { on_new_commit: 'interruptible' },
+ rules: [{ if: '$MY_VAR == "something"', auto_cancel: { on_new_commit: 'none' } }]
+ },
+ rspec: { script: 'rspec' }
+ }
+ end
+
+ context 'when the rule is matched' do
+ let(:my_var_value) { 'something' }
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('none')
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
+ end
+ end
+
+ context 'when the rule is not matched' do
+ let(:my_var_value) { 'something else' }
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('interruptible')
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
+ end
+ end
+ end
+ end
end
context 'with both pipeline name and auto_cancel' do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
index 476b1be35a9..22ff367c746 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
@@ -34,15 +34,12 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Populate, feature_category: :continu
{ rspec: { script: 'rspec' } }
end
- let(:ff_always_set_pipeline_failure_reason) { true }
-
def run_chain
dependencies.map(&:perform!)
step.perform!
end
before do
- stub_feature_flags(always_set_pipeline_failure_reason: ff_always_set_pipeline_failure_reason)
stub_ci_pipeline_yaml_file(YAML.dump(config))
end
@@ -113,18 +110,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Populate, feature_category: :continu
expect(pipeline).to be_failed
expect(pipeline).to be_filtered_by_rules
end
-
- context 'when ff always_set_pipeline_failure_reason is disabled' do
- let(:ff_always_set_pipeline_failure_reason) { false }
-
- it 'sets the failure reason without persisting the pipeline', :aggregate_failures do
- run_chain
-
- expect(pipeline).not_to be_persisted
- expect(pipeline).not_to be_failed
- expect(pipeline).not_to be_filtered_by_rules
- end
- end
end
describe 'pipeline protect' do
diff --git a/spec/lib/gitlab/ci/reports/security/report_spec.rb b/spec/lib/gitlab/ci/reports/security/report_spec.rb
index d7f967f1c55..dabee0f32de 100644
--- a/spec/lib/gitlab/ci/reports/security/report_spec.rb
+++ b/spec/lib/gitlab/ci/reports/security/report_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Reports::Security::Report do
+RSpec.describe Gitlab::Ci::Reports::Security::Report, feature_category: :vulnerability_management do
let_it_be(:pipeline) { create(:ci_pipeline) }
let(:created_at) { 2.weeks.ago }
@@ -89,7 +89,7 @@ RSpec.describe Gitlab::Ci::Reports::Security::Report do
let(:other_report) do
create(
:ci_reports_security_report,
- findings: [create(:ci_reports_security_finding, compare_key: 'other_finding')],
+ findings: [create(:ci_reports_security_finding)],
scanners: [create(:ci_reports_security_scanner, external_id: 'other_scanner', name: 'Other Scanner')],
identifiers: [create(:ci_reports_security_identifier, external_id: 'other_id', name: 'other_scanner')]
)
diff --git a/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
index f8d67a6f0b4..18ad723b75c 100644
--- a/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
@@ -152,51 +152,6 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :secr
end
end
- context 'when truncate_ci_merge_request_description feature flag is disabled' do
- before do
- stub_feature_flags(truncate_ci_merge_request_description: false)
- end
-
- context 'when merge request description hits the limit' do
- let(:merge_request_description) { 'a' * (MergeRequest::CI_MERGE_REQUEST_DESCRIPTION_MAX_LENGTH + 1) }
-
- it 'does not truncate the exposed description' do
- expect(subject.to_hash)
- .to include(
- 'CI_MERGE_REQUEST_DESCRIPTION' => merge_request.description
- )
- expect(subject.to_hash)
- .not_to have_key('CI_MERGE_REQUEST_DESCRIPTION_IS_TRUNCATED')
- end
- end
-
- context 'when merge request description fits the length limit' do
- let(:merge_request_description) { 'a' * (MergeRequest::CI_MERGE_REQUEST_DESCRIPTION_MAX_LENGTH - 1) }
-
- it 'does not truncate the exposed description' do
- expect(subject.to_hash)
- .to include(
- 'CI_MERGE_REQUEST_DESCRIPTION' => merge_request.description
- )
- expect(subject.to_hash)
- .not_to have_key('CI_MERGE_REQUEST_DESCRIPTION_IS_TRUNCATED')
- end
- end
-
- context 'when merge request description does not exist' do
- let(:merge_request_description) { nil }
-
- it 'does not truncate the exposed description' do
- expect(subject.to_hash)
- .to include(
- 'CI_MERGE_REQUEST_DESCRIPTION' => merge_request.description
- )
- expect(subject.to_hash)
- .not_to have_key('CI_MERGE_REQUEST_DESCRIPTION_IS_TRUNCATED')
- end
- end
- end
-
it 'exposes diff variables' do
expect(subject.to_hash)
.to include(
diff --git a/spec/lib/gitlab/ci/yaml_processor/test_cases/include_spec.rb b/spec/lib/gitlab/ci/yaml_processor/test_cases/include_spec.rb
new file mode 100644
index 00000000000..d8f8a58edf3
--- /dev/null
+++ b/spec/lib/gitlab/ci/yaml_processor/test_cases/include_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+module Gitlab
+ module Ci
+ RSpec.describe YamlProcessor, feature_category: :pipeline_composition do
+ include StubRequests
+
+ subject(:processor) do
+ described_class.new(config, project: project, user: project.first_owner, logger: logger)
+ end
+
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:logger) { Gitlab::Ci::Pipeline::Logger.new(project: project) }
+ let(:result) { processor.execute }
+ let(:builds) { result.builds }
+
+ context 'with include:remote' do
+ let(:config) do
+ <<~YAML
+ include:
+ - remote: http://my.domain.com/config1.yml
+ - remote: http://my.domain.com/config2.yml
+ YAML
+ end
+
+ before do
+ stub_full_request('http://my.domain.com/config1.yml')
+ .to_return(body: 'build1: { script: echo Hello World }')
+
+ stub_full_request('http://my.domain.com/config2.yml')
+ .to_return(body: 'build2: { script: echo Hello World }')
+ end
+
+ it 'returns builds from included files' do
+ expect(builds.pluck(:name)).to eq %w[build1 build2]
+ end
+
+ it 'stores instrumentation logs' do
+ result
+
+ expect(logger.observations_hash['config_mapper_process_duration_s']['count']).to eq(1)
+ end
+
+ # Remove with the FF ci_parallel_remote_includes
+ it 'does not store log with config_file_fetch_remote_content' do
+ result
+
+ expect(logger.observations_hash).not_to have_key('config_file_fetch_remote_content_duration_s')
+ end
+
+ context 'when the FF ci_parallel_remote_includes is disabled' do
+ before do
+ stub_feature_flags(ci_parallel_remote_includes: false)
+ end
+
+ it 'stores log with config_file_fetch_remote_content' do
+ result
+
+ expect(logger.observations_hash['config_file_fetch_remote_content_duration_s']['count']).to eq(2)
+ end
+
+ context 'when the FF is specifically enabled for the project' do
+ before do
+ stub_feature_flags(ci_parallel_remote_includes: [project])
+ end
+
+ it 'does not store log with config_file_fetch_remote_content' do
+ result
+
+ expect(logger.observations_hash).not_to have_key('config_file_fetch_remote_content_duration_s')
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/yaml_processor/test_cases/interruptible_spec.rb b/spec/lib/gitlab/ci/yaml_processor/test_cases/interruptible_spec.rb
index 03ff7077969..297872f4cf3 100644
--- a/spec/lib/gitlab/ci/yaml_processor/test_cases/interruptible_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor/test_cases/interruptible_spec.rb
@@ -5,9 +5,10 @@ require 'spec_helper'
module Gitlab
module Ci
RSpec.describe YamlProcessor, feature_category: :pipeline_composition do
- subject(:processor) { described_class.new(config, user: nil).execute }
+ subject(:processor) { described_class.new(config, user: nil) }
- let(:builds) { processor.builds }
+ let(:result) { processor.execute }
+ let(:builds) { result.builds }
context 'with interruptible' do
let(:default_config) { nil }
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index 844a6849c8f..4f759109b26 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -516,6 +516,32 @@ module Gitlab
})
end
end
+
+ context 'with rules and auto_cancel' do
+ let(:config) do
+ <<-YML
+ workflow:
+ rules:
+ - if: $VAR == "value"
+ auto_cancel:
+ on_new_commit: none
+ on_job_failure: none
+
+ hello:
+ script: echo world
+ YML
+ end
+
+ it 'parses workflow_rules' do
+ expect(subject.workflow_rules).to contain_exactly({
+ if: '$VAR == "value"',
+ auto_cancel: {
+ on_new_commit: 'none',
+ on_job_failure: 'none'
+ }
+ })
+ end
+ end
end
describe '#warnings' do
@@ -1295,10 +1321,12 @@ module Gitlab
name: ruby:2.7
docker:
platform: linux/amd64
+ user: dave
services:
- name: postgres:11.9
docker:
platform: linux/amd64
+ user: john
YAML
end
@@ -1313,9 +1341,9 @@ module Gitlab
options: {
script: ["exit 0"],
image: { name: "ruby:2.7",
- executor_opts: { docker: { platform: 'linux/amd64' } } },
+ executor_opts: { docker: { platform: 'linux/amd64', user: 'dave' } } },
services: [{ name: "postgres:11.9",
- executor_opts: { docker: { platform: 'linux/amd64' } } }]
+ executor_opts: { docker: { platform: 'linux/amd64', user: 'john' } } }]
},
allow_failure: false,
when: "on_success",
diff --git a/spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects/job_artifact_object_spec.rb b/spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects/job_artifact_object_spec.rb
new file mode 100644
index 00000000000..103df128dac
--- /dev/null
+++ b/spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects/job_artifact_object_spec.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Cleanup::OrphanJobArtifactFinalObjects::JobArtifactObject, :clean_gitlab_redis_shared_state, feature_category: :build_artifacts do
+ let(:job_artifact_object) do
+ described_class.new(
+ fog_file,
+ bucket_prefix: bucket_prefix
+ )
+ end
+
+ # rubocop:disable RSpec/VerifiedDoubles -- For some reason it can't see Fog::AWS::Storage::File
+ let(:fog_file) { double(key: fog_file_key, content_length: 145) }
+ # rubocop:enable RSpec/VerifiedDoubles
+
+ let(:fog_file_key) { 'aaa/bbb/123' }
+ let(:bucket_prefix) { nil }
+
+ describe '#path' do
+ subject { job_artifact_object.path }
+
+ it { is_expected.to eq(fog_file.key) }
+ end
+
+ describe '#size' do
+ subject { job_artifact_object.size }
+
+ it { is_expected.to eq(fog_file.content_length) }
+ end
+
+ describe '#in_final_location?' do
+ subject { job_artifact_object.in_final_location? }
+
+ context 'when path has @final in it' do
+ let(:fog_file_key) { 'aaa/bbb/@final/123/ccc' }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when path has no @final in it' do
+ let(:fog_file_key) { 'aaa/bbb/ccc' }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '#orphan?' do
+ shared_examples_for 'identifying orphan object' do
+ let(:artifact_final_path) { 'aaa/@final/bbb' }
+ let(:fog_file_key) { File.join([bucket_prefix, artifact_final_path].compact) }
+
+ subject { job_artifact_object.orphan? }
+
+ context 'when there is job artifact record with a file_final_path that matches the object path' do
+ before do
+ # We don't store the bucket_prefix if ever in the file_final_path
+ create(:ci_job_artifact, file_final_path: artifact_final_path)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when there are no job artifact records with a file_final_path that matches the object path' do
+ context 'and there is a pending direct upload entry that matches the object path' do
+ before do
+ # We don't store the bucket_prefix if ever in the pending direct upload entry
+ ObjectStorage::PendingDirectUpload.prepare(:artifacts, artifact_final_path)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'and there are no pending direct upload entries that match the object path' do
+ it { is_expected.to eq(true) }
+ end
+ end
+ end
+
+ context 'when bucket prefix is not present' do
+ it_behaves_like 'identifying orphan object'
+ end
+
+ context 'when bucket prefix is present' do
+ let(:bucket_prefix) { 'my/prefix' }
+
+ it_behaves_like 'identifying orphan object'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects_cleaner_spec.rb b/spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects_cleaner_spec.rb
new file mode 100644
index 00000000000..aeb87bc0d9e
--- /dev/null
+++ b/spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects_cleaner_spec.rb
@@ -0,0 +1,263 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Cleanup::OrphanJobArtifactFinalObjectsCleaner, :orphan_final_artifacts_cleanup, :clean_gitlab_redis_shared_state, feature_category: :build_artifacts do
+ describe '#run!' do
+ let(:cleaner) do
+ described_class.new(
+ provider: specified_provider,
+ force_restart: force_restart,
+ dry_run: dry_run
+ )
+ end
+
+ let(:dry_run) { true }
+ let(:force_restart) { false }
+ let(:remote_directory) { 'artifacts' }
+ let(:bucket_prefix) { nil }
+
+ subject(:run) { cleaner.run! }
+
+ before do
+ stub_const('Gitlab::Cleanup::OrphanJobArtifactFinalObjects::Paginators::BasePaginator::BATCH_SIZE', 2)
+
+ Rake.application.rake_require 'tasks/gitlab/cleanup'
+
+ Gitlab.config.artifacts.object_store.tap do |config|
+ config[:remote_directory] = remote_directory
+ config[:bucket_prefix] = bucket_prefix
+ end
+
+ allow(Gitlab::AppLogger).to receive(:info)
+ end
+
+ shared_examples_for 'cleaning up orphan final job artifact objects' do
+ let(:fog_connection) do
+ stub_object_storage_uploader(
+ config: Gitlab.config.artifacts.object_store,
+ uploader: JobArtifactUploader,
+ direct_upload: true
+ )
+ end
+
+ let!(:orphan_final_object_1) { create_fog_file }
+ let!(:orphan_final_object_2) { create_fog_file }
+ let!(:orphan_non_final_object) { create_fog_file(final: false) }
+
+ let!(:non_orphan_final_object_1) do
+ create_fog_file.tap do |file|
+ create(:ci_job_artifact, file_final_path: path_without_bucket_prefix(file.key))
+ end
+ end
+
+ let!(:non_orphan_final_object_2) do
+ create_fog_file.tap do |file|
+ create(:ci_job_artifact, file_final_path: path_without_bucket_prefix(file.key))
+ end
+ end
+
+ shared_context 'when resuming from marker' do
+ let(:dummy_error) { Class.new(StandardError) }
+
+ before do
+ fetch_counter = 0
+
+ allow(cleaner).to receive(:fetch_batch).and_wrap_original do |m, *args|
+ raise dummy_error if fetch_counter == 1
+
+ fetch_counter += 1
+ m.call(*args)
+ end
+ end
+ end
+
+ shared_examples_for 'handling dry run mode' do
+ context 'when on dry run (which is default)' do
+ it 'logs orphan objects to delete but does not delete them' do
+ run
+
+ expect_start_log_message
+ expect_first_page_loading_log_message
+ expect_page_loading_via_marker_log_message(times: 3)
+ expect_delete_log_message(orphan_final_object_1)
+ expect_delete_log_message(orphan_final_object_2)
+ expect_no_delete_log_message(orphan_non_final_object)
+ expect_no_delete_log_message(non_orphan_final_object_1)
+ expect_no_delete_log_message(non_orphan_final_object_2)
+ expect_done_log_message
+
+ expect_object_to_exist(orphan_final_object_1)
+ expect_object_to_exist(orphan_final_object_2)
+ expect_object_to_exist(orphan_non_final_object)
+ expect_object_to_exist(non_orphan_final_object_1)
+ expect_object_to_exist(non_orphan_final_object_2)
+ end
+
+ context 'when interrupted in the middle of processing pages' do
+ include_context 'when resuming from marker'
+
+ it 'resumes from last known page marker on the next run' do
+ expect { cleaner.run! }.to raise_error(dummy_error)
+ saved_marker = fetch_saved_marker
+
+ new_cleaner = described_class.new(
+ provider: specified_provider,
+ force_restart: false,
+ dry_run: true
+ )
+
+ new_cleaner.run!
+
+ expect_resuming_from_marker_log_message(saved_marker)
+
+ # Given we can't guarantee the order of the objects because
+ # of random path generation, we can't tell which page they will
+ # fall in, so we will just ensure that they
+ # were all logged in the end.
+ expect_delete_log_message(orphan_final_object_1)
+ expect_delete_log_message(orphan_final_object_2)
+
+ # Ensure that they were not deleted because this is just dry run.
+ expect_object_to_exist(orphan_final_object_1)
+ expect_object_to_exist(orphan_final_object_2)
+ end
+
+ context 'and force_restart is true' do
+ it 'starts from the first page on the next run' do
+ expect { cleaner.run! }.to raise_error(dummy_error)
+
+ new_cleaner = described_class.new(
+ provider: specified_provider,
+ force_restart: true,
+ dry_run: true
+ )
+
+ new_cleaner.run!
+
+ expect_no_resuming_from_marker_log_message
+
+ # Ensure that they were not deleted because this is just dry run.
+ expect_object_to_exist(orphan_final_object_1)
+ expect_object_to_exist(orphan_final_object_2)
+ end
+ end
+ end
+ end
+
+ context 'when dry run is set to false' do
+ let(:dry_run) { false }
+
+ it 'logs orphan objects to delete and deletes them' do
+ expect_object_to_exist(orphan_final_object_1)
+ expect_object_to_exist(orphan_final_object_2)
+
+ run
+
+ expect_start_log_message
+ expect_first_page_loading_log_message
+ expect_page_loading_via_marker_log_message(times: 3)
+ expect_delete_log_message(orphan_final_object_1)
+ expect_delete_log_message(orphan_final_object_2)
+ expect_no_delete_log_message(orphan_non_final_object)
+ expect_no_delete_log_message(non_orphan_final_object_1)
+ expect_no_delete_log_message(non_orphan_final_object_2)
+ expect_done_log_message
+
+ expect_object_to_be_deleted(orphan_final_object_1)
+ expect_object_to_be_deleted(orphan_final_object_2)
+ expect_object_to_exist(orphan_non_final_object)
+ expect_object_to_exist(non_orphan_final_object_1)
+ expect_object_to_exist(non_orphan_final_object_2)
+ end
+
+ context 'when interrupted in the middle of processing pages' do
+ include_context 'when resuming from marker'
+
+ it 'resumes from last known page marker on the next run' do
+ expect { cleaner.run! }.to raise_error(dummy_error)
+ saved_marker = fetch_saved_marker
+
+ new_cleaner = described_class.new(
+ provider: specified_provider,
+ force_restart: false,
+ dry_run: false
+ )
+
+ new_cleaner.run!
+
+ expect_resuming_from_marker_log_message(saved_marker)
+
+ # Given we can't guarantee the order of the objects because
+ # of random path generation, we can't tell which page they will
+ # fall in, so we will just ensure that they
+ # were all logged in the end.
+ expect_delete_log_message(orphan_final_object_1)
+ expect_delete_log_message(orphan_final_object_2)
+
+ # Ensure that they were deleted because this is not dry run.
+ expect_object_to_be_deleted(orphan_final_object_1)
+ expect_object_to_be_deleted(orphan_final_object_2)
+ end
+
+ context 'and force_restart is true' do
+ it 'starts from the first page on the next run' do
+ expect { cleaner.run! }.to raise_error(dummy_error)
+
+ new_cleaner = described_class.new(
+ provider: specified_provider,
+ force_restart: true,
+ dry_run: false
+ )
+
+ new_cleaner.run!
+
+ expect_no_resuming_from_marker_log_message
+
+ # Ensure that they were deleted because this is not a dry run.
+ expect_object_to_be_deleted(orphan_final_object_1)
+ expect_object_to_be_deleted(orphan_final_object_2)
+ end
+ end
+ end
+ end
+ end
+
+ context 'when not configured to use bucket_prefix' do
+ let(:remote_directory) { 'artifacts' }
+ let(:bucket_prefix) { nil }
+
+ it_behaves_like 'handling dry run mode'
+ end
+
+ context 'when configured to use bucket_prefix' do
+ let(:remote_directory) { 'main-bucket' }
+ let(:bucket_prefix) { 'my/artifacts' }
+
+ it_behaves_like 'handling dry run mode'
+ end
+ end
+
+ context 'when defaulting to provider in the object store configuration' do
+ let(:specified_provider) { nil }
+
+ it_behaves_like 'cleaning up orphan final job artifact objects'
+ end
+
+ context 'when provider is specified' do
+ context 'and provider is supported' do
+ let(:specified_provider) { 'aws' }
+
+ it_behaves_like 'cleaning up orphan final job artifact objects'
+ end
+
+ context 'and provider is not supported' do
+ let(:specified_provider) { 'somethingelse' }
+
+ it 'raises an error' do
+ expect { run }.to raise_error(described_class::UnsupportedProviderError)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/current_settings_spec.rb b/spec/lib/gitlab/current_settings_spec.rb
index df1b12e479f..7fc50438c95 100644
--- a/spec/lib/gitlab/current_settings_spec.rb
+++ b/spec/lib/gitlab/current_settings_spec.rb
@@ -97,207 +97,44 @@ RSpec.describe Gitlab::CurrentSettings, feature_category: :shared do
expect(described_class.metrics_sample_interval).to be(15)
end
- context 'when ENV["IN_MEMORY_APPLICATION_SETTINGS"] is true' do
- before do
- stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'true')
- end
+ it 'retrieves settings using ApplicationSettingFetcher' do
+ expect(Gitlab::ApplicationSettingFetcher).to receive(:current_application_settings).and_call_original
- it 'returns an in-memory ApplicationSetting object' do
- expect(ApplicationSetting).not_to receive(:current)
+ described_class.home_page_url
+ end
+ end
- expect(described_class.current_application_settings).to be_a(ApplicationSetting)
- expect(described_class.current_application_settings).not_to be_persisted
- end
+ describe '#current_application_settings?' do
+ subject(:settings_set) { described_class.current_application_settings? }
+
+ before do
+ # unstub, it is stubbed in spec/spec_helper.rb
+ allow(described_class).to receive(:current_application_settings?).and_call_original
end
- context 'in a Rake task with DB unavailable' do
+ context 'when settings are cached in RequestStore' do
before do
- allow(Gitlab::Runtime).to receive(:rake?).and_return(true)
- # For some reason, `allow(described_class).to receive(:connect_to_db?).and_return(false)` causes issues
- # during the initialization phase of the test suite, so instead let's mock the internals of it
- allow(ApplicationSetting.connection).to receive(:active?).and_return(false)
+ allow(Gitlab::SafeRequestStore).to receive(:exist?).with(:current_application_settings).and_return(true)
end
- context 'and no settings in cache' do
- before do
- expect(ApplicationSetting).not_to receive(:current)
- end
-
- it 'returns a FakeApplicationSettings object' do
- expect(described_class.current_application_settings).to be_a(Gitlab::FakeApplicationSettings)
- end
-
- it 'does not issue any query' do
- expect(ActiveRecord::QueryRecorder.new { described_class.current_application_settings }.count).to eq(0)
- end
+ it 'returns true' do
+ expect(settings_set).to be(true)
end
end
- context 'with DB available' do
- # This method returns the ::ApplicationSetting.defaults hash
- # but with respect of custom attribute accessors of ApplicationSetting model
- def settings_from_defaults
- ar_wrapped_defaults = ::ApplicationSetting.build_from_defaults.attributes
- ar_wrapped_defaults.slice(*::ApplicationSetting.defaults.keys)
- end
-
- context 'and settings in cache' do
- include_context 'with settings in cache'
-
- it 'fetches the settings from cache' do
- # For some reason, `allow(described_class).to receive(:connect_to_db?).and_return(true)` causes issues
- # during the initialization phase of the test suite, so instead let's mock the internals of it
- expect(ApplicationSetting.connection).not_to receive(:active?)
- expect(ApplicationSetting.connection).not_to receive(:cached_table_exists?)
- expect_any_instance_of(ActiveRecord::MigrationContext).not_to receive(:needs_migration?)
- expect(ActiveRecord::QueryRecorder.new { described_class.current_application_settings }.count).to eq(0)
- end
+ context 'when ApplicationSettingFetcher.current_application_settings? returns true' do
+ before do
+ allow(Gitlab::ApplicationSettingFetcher).to receive(:current_application_settings?).and_return(true)
end
- context 'and no settings in cache' do
- before do
- allow(ApplicationSetting.connection).to receive(:active?).and_return(true)
- allow(ApplicationSetting.connection).to receive(:cached_table_exists?).with('application_settings').and_return(true)
- end
-
- context 'with RequestStore enabled', :request_store do
- it 'fetches the settings from DB only once' do
- described_class.current_application_settings # warm the cache
-
- expect(ActiveRecord::QueryRecorder.new { described_class.current_application_settings }.count).to eq(0)
- end
- end
-
- it 'creates default ApplicationSettings if none are present' do
- settings = described_class.current_application_settings
-
- expect(settings).to be_a(ApplicationSetting)
- expect(settings).to be_persisted
- expect(settings).to have_attributes(settings_from_defaults)
- end
-
- context 'when we hit a recursive loop' do
- before do
- expect(ApplicationSetting).to receive(:create_from_defaults) do
- raise ApplicationSetting::Recursion
- end
- end
-
- it 'recovers and returns in-memory settings' do
- settings = described_class.current_application_settings
-
- expect(settings).to be_a(ApplicationSetting)
- expect(settings).not_to be_persisted
- end
- end
-
- context 'when ApplicationSettings does not have a primary key' do
- before do
- allow(ApplicationSetting.connection).to receive(:primary_key).with('application_settings').and_return(nil)
- end
-
- it 'raises an exception if ApplicationSettings does not have a primary key' do
- expect { described_class.current_application_settings }.to raise_error(/table is missing a primary key constraint/)
- end
- end
-
- context 'with pending migrations' do
- let(:current_settings) { described_class.current_application_settings }
-
- before do
- allow(Gitlab::Runtime).to receive(:rake?).and_return(false)
- end
-
- shared_examples 'a non-persisted ApplicationSetting object' do
- it 'uses the default value from ApplicationSetting.defaults' do
- expect(current_settings.signup_enabled).to eq(ApplicationSetting.defaults[:signup_enabled])
- end
-
- it 'uses the default value from custom ApplicationSetting accessors' do
- expect(current_settings.commit_email_hostname).to eq(ApplicationSetting.default_commit_email_hostname)
- end
-
- it 'responds to predicate methods' do
- expect(current_settings.signup_enabled?).to eq(current_settings.signup_enabled)
- end
- end
-
- context 'in a Rake task' do
- before do
- allow(Gitlab::Runtime).to receive(:rake?).and_return(true)
- expect_any_instance_of(ActiveRecord::MigrationContext).to receive(:needs_migration?).and_return(true)
- end
-
- it_behaves_like 'a non-persisted ApplicationSetting object'
-
- it 'returns a FakeApplicationSettings object' do
- expect(current_settings).to be_a(Gitlab::FakeApplicationSettings)
- end
-
- context 'when a new column is used before being migrated' do
- before do
- allow(ApplicationSetting).to receive(:defaults).and_return({ foo: 'bar' })
- end
-
- it 'uses the default value if present' do
- expect(current_settings.foo).to eq('bar')
- end
- end
- end
-
- context 'with no ApplicationSetting DB record' do
- it_behaves_like 'a non-persisted ApplicationSetting object'
- end
-
- context 'with an existing ApplicationSetting DB record' do
- before do
- described_class.update!(home_page_url: 'http://mydomain.com')
- end
-
- it_behaves_like 'a non-persisted ApplicationSetting object'
-
- it 'uses the value from the DB attribute if present and not overridden by an accessor' do
- expect(current_settings.home_page_url).to eq('http://mydomain.com')
- end
- end
- end
-
- context 'when ApplicationSettings.current is present' do
- it 'returns the existing application settings' do
- expect(ApplicationSetting).to receive(:current).and_return(:current_settings)
-
- expect(described_class.current_application_settings).to eq(:current_settings)
- end
- end
+ it 'returns true' do
+ expect(settings_set).to be(true)
end
end
- end
-
- describe '#current_application_settings?', :use_clean_rails_memory_store_caching do
- before do
- allow(described_class).to receive(:current_application_settings?).and_call_original
- ApplicationSetting.delete_all # ensure no settings exist
- end
-
- it 'returns true when settings exist' do
- described_class.update!(
- home_page_url: 'http://mydomain.com',
- signup_enabled: false)
-
- expect(described_class.current_application_settings?).to eq(true)
- end
-
- it 'returns false when settings do not exist' do
- expect(described_class.current_application_settings?).to eq(false)
- end
-
- context 'with cache', :request_store do
- include_context 'with settings in cache'
-
- it 'returns an in-memory ApplicationSetting object' do
- expect(ApplicationSetting).not_to receive(:current)
- expect(described_class.current_application_settings?).to eq(true)
+ context 'when not cached and not in ApplicationSettingFetcher' do
+ it 'returns false' do
+ expect(settings_set).to be(false)
end
end
end
diff --git a/spec/lib/gitlab/data_builder/pipeline_spec.rb b/spec/lib/gitlab/data_builder/pipeline_spec.rb
index ad7cd2dc736..5fa61b1680d 100644
--- a/spec/lib/gitlab/data_builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/data_builder/pipeline_spec.rb
@@ -184,14 +184,14 @@ RSpec.describe Gitlab::DataBuilder::Pipeline, feature_category: :continuous_inte
create(:ci_build, :deploy_to_production, :with_deployment, user: user, project: project, pipeline: pipeline)
# We need `.to_json` as the build hook data is wrapped within `Gitlab::Lazy`
- control_count = ActiveRecord::QueryRecorder.new { described_class.build(pipeline.reload).to_json }.count
+ control = ActiveRecord::QueryRecorder.new { described_class.build(pipeline.reload).to_json }
# Adding more builds to the pipeline and serializing the data again
create_list(:ci_build, 3, user: user, project: project, pipeline: pipeline)
create(:ci_build, :start_review_app, :with_deployment, user: user, project: project, pipeline: pipeline)
create(:ci_build, :stop_review_app, :with_deployment, user: user, project: project, pipeline: pipeline)
- expect { described_class.build(pipeline.reload).to_json }.not_to exceed_query_limit(control_count)
+ expect { described_class.build(pipeline.reload).to_json }.not_to exceed_query_limit(control)
end
it "with multiple retried builds" do
@@ -201,14 +201,14 @@ RSpec.describe Gitlab::DataBuilder::Pipeline, feature_category: :continuous_inte
create(:ci_build, :deploy_to_production, :retried, :with_deployment, user: user, project: project, pipeline: pipeline)
# We need `.to_json` as the build hook data is wrapped within `Gitlab::Lazy`
- control_count = ActiveRecord::QueryRecorder.new { described_class.build(pipeline.reload).with_retried_builds.to_json }.count
+ control = ActiveRecord::QueryRecorder.new { described_class.build(pipeline.reload).with_retried_builds.to_json }
# Adding more builds to the pipeline and serializing the data again
create_list(:ci_build, 3, :retried, user: user, project: project, pipeline: pipeline)
create(:ci_build, :start_review_app, :retried, :with_deployment, user: user, project: project, pipeline: pipeline)
create(:ci_build, :stop_review_app, :retried, :with_deployment, user: user, project: project, pipeline: pipeline)
- expect { described_class.build(pipeline.reload).with_retried_builds.to_json }.not_to exceed_query_limit(control_count)
+ expect { described_class.build(pipeline.reload).with_retried_builds.to_json }.not_to exceed_query_limit(control)
end
end
end
diff --git a/spec/lib/gitlab/data_builder/push_spec.rb b/spec/lib/gitlab/data_builder/push_spec.rb
index 02dc596c5eb..2d2beaff339 100644
--- a/spec/lib/gitlab/data_builder/push_spec.rb
+++ b/spec/lib/gitlab/data_builder/push_spec.rb
@@ -89,13 +89,13 @@ RSpec.describe Gitlab::DataBuilder::Push do
described_class.build(
project: project,
user: user,
- oldrev: Gitlab::Git::BLANK_SHA,
+ oldrev: Gitlab::Git::SHA1_BLANK_SHA,
newrev: '8a2a6eb295bb170b34c24c76c49ed0e9b2eaf34b',
ref: 'refs/tags/v1.1.0')
end
it { expect(data).to be_a(Hash) }
- it { expect(data[:before]).to eq(Gitlab::Git::BLANK_SHA) }
+ it { expect(data[:before]).to eq(Gitlab::Git::SHA1_BLANK_SHA) }
it { expect(data[:checkout_sha]).to eq('5937ac0a7beb003549fc5fd26fc247adbce4a52e') }
it { expect(data[:after]).to eq('8a2a6eb295bb170b34c24c76c49ed0e9b2eaf34b') }
it { expect(data[:ref]).to eq('refs/tags/v1.1.0') }
diff --git a/spec/lib/gitlab/database/click_house_client_spec.rb b/spec/lib/gitlab/database/click_house_client_spec.rb
index 271500ed3f6..e501a17b9b0 100644
--- a/spec/lib/gitlab/database/click_house_client_spec.rb
+++ b/spec/lib/gitlab/database/click_house_client_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe 'ClickHouse::Client', :click_house, feature_category: :database d
describe 'RSpec hooks' do
it 'ensures that tables are empty' do
- results = ClickHouse::Client.select('SELECT * FROM events', :main)
+ results = ClickHouse::Client.select('SELECT * FROM FINAL events', :main)
expect(results).to be_empty
end
@@ -66,7 +66,7 @@ RSpec.describe 'ClickHouse::Client', :click_house, feature_category: :database d
:main)
end
- results = ClickHouse::Client.select('SELECT id, path, created_at FROM events ORDER BY id', :main)
+ results = ClickHouse::Client.select('SELECT id, path, created_at FROM events FINAL ORDER BY id', :main)
expect(results).to match([
{ 'id' => 10, 'path' => '1/2/', 'created_at' => be_within(0.1.seconds).of(time) },
@@ -87,7 +87,7 @@ RSpec.describe 'ClickHouse::Client', :click_house, feature_category: :database d
ClickHouse::Client.execute(insert_query, :main)
- results = ClickHouse::Client.select('SELECT * FROM events ORDER BY id', :main)
+ results = ClickHouse::Client.select('SELECT * FROM events FINAL ORDER BY id', :main)
expect(results.size).to eq(3)
last = results.last
@@ -106,7 +106,7 @@ RSpec.describe 'ClickHouse::Client', :click_house, feature_category: :database d
ClickHouse::Client.execute(delete_query, :main)
select_query = ClickHouse::Client::Query.new(
- raw_query: 'SELECT * FROM events WHERE id = {id:UInt64}',
+ raw_query: 'SELECT * FROM events FINAL WHERE id = {id:UInt64}',
placeholders: { id: event3.id }
)
diff --git a/spec/lib/gitlab/database/dictionary_spec.rb b/spec/lib/gitlab/database/dictionary_spec.rb
index 261cf27ed69..59145842b24 100644
--- a/spec/lib/gitlab/database/dictionary_spec.rb
+++ b/spec/lib/gitlab/database/dictionary_spec.rb
@@ -24,6 +24,25 @@ RSpec.describe Gitlab::Database::Dictionary, feature_category: :database do
end
end
+ describe '.any_entry' do
+ it 'loads an entry from any scope' do
+ expect(described_class.any_entry('ci_pipelines')).to be_present # Regular table
+ expect(described_class.any_entry('audit_events_archived')).to be_present # Deleted table
+ expect(described_class.any_entry('postgres_constraints')).to be_present # View
+ expect(described_class.any_entry('not_a_table_ever')).to be_nil
+ end
+ end
+
+ describe '.entry' do
+ it 'loads an Entry from the given scope' do
+ expect(described_class.entry('ci_pipelines')).to be_present # Regular table
+ expect(described_class.entry('audit_events_archived')).not_to be_present # Deleted table
+ expect(described_class.entry('postgres_constraints')).not_to be_present # Deleted table
+ expect(described_class.entry('audit_events_archived', 'deleted_tables')).to be_present # Deleted table
+ expect(described_class.entry('postgres_constraints', 'views')).to be_present # View
+ end
+ end
+
describe '::Entry' do
subject(:database_dictionary) { described_class::Entry.new(file_path) }
@@ -80,6 +99,39 @@ RSpec.describe Gitlab::Database::Dictionary, feature_category: :database do
expect { database_dictionary.validate! }.to raise_error(Gitlab::Database::GitlabSchema::UnknownSchemaError)
end
end
+
+ context 'with allow_cross_joins' do
+ let(:file_path) { 'db/docs/achievements.yml' }
+
+ describe '#allow_cross_to_schemas' do
+ it 'returns the list of allowed schemas' do
+ expect(database_dictionary.allow_cross_to_schemas(:joins))
+ .to contain_exactly(:gitlab_main_clusterwide)
+ end
+ end
+ end
+
+ context 'with allow_cross_transactions' do
+ let(:file_path) { 'db/docs/activity_pub_releases_subscriptions.yml' }
+
+ describe '#allow_cross_to_schemas' do
+ it 'returns the list of allowed schemas' do
+ expect(database_dictionary.allow_cross_to_schemas(:transactions))
+ .to contain_exactly(:gitlab_main_clusterwide)
+ end
+ end
+ end
+
+ context 'with allow_cross_foreign_keys' do
+ let(:file_path) { 'db/docs/agent_group_authorizations.yml' }
+
+ describe '#allow_cross_to_schemas' do
+ it 'returns the list of allowed schemas' do
+ expect(database_dictionary.allow_cross_to_schemas(:foreign_keys))
+ .to contain_exactly(:gitlab_main_clusterwide)
+ end
+ end
+ end
end
context 'for a view' do
diff --git a/spec/lib/gitlab/database/gitlab_schema_spec.rb b/spec/lib/gitlab/database/gitlab_schema_spec.rb
index 7fca47c707c..f716bcfcf49 100644
--- a/spec/lib/gitlab/database/gitlab_schema_spec.rb
+++ b/spec/lib/gitlab/database/gitlab_schema_spec.rb
@@ -217,17 +217,19 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
describe '.cross_joins_allowed?' do
where(:schemas, :tables, :result) do
- %i[] | %i[] | true
- %i[gitlab_main] | %i[] | true
- %i[gitlab_main_clusterwide gitlab_main] | %i[] | true
- %i[gitlab_main_clusterwide gitlab_ci] | %i[] | false
- %i[gitlab_main_clusterwide gitlab_main gitlab_ci] | %i[] | false
- %i[gitlab_main_clusterwide gitlab_internal] | %i[] | false
- %i[gitlab_main gitlab_ci] | %i[] | false
- %i[gitlab_main_clusterwide gitlab_main gitlab_shared] | %i[] | true
- %i[gitlab_main_clusterwide gitlab_shared] | %i[] | true
+ %i[] | %w[] | true
+ %i[gitlab_main] | %w[evidences] | true
+ %i[gitlab_main_clusterwide gitlab_main] | %w[users evidences] | true
+ %i[gitlab_main_clusterwide gitlab_ci] | %w[users ci_pipelines] | false
+ %i[gitlab_main_clusterwide gitlab_main gitlab_ci] | %w[users evidences ci_pipelines] | false
+ %i[gitlab_main_clusterwide gitlab_internal] | %w[users schema_migrations] | false
+ %i[gitlab_main gitlab_ci] | %w[evidences schema_migrations] | false
+ %i[gitlab_main_clusterwide gitlab_main gitlab_shared] | %w[users evidences detached_partitions] | true
+ %i[gitlab_main_clusterwide gitlab_shared] | %w[users detached_partitions] | true
%i[gitlab_main_clusterwide gitlab_main_cell] | %w[users namespaces] | false
%i[gitlab_main_clusterwide gitlab_main_cell] | %w[plans namespaces] | true
+ %i[gitlab_main_clusterwide gitlab_main_cell] | %w[users achievements] | true
+ %i[gitlab_main_clusterwide gitlab_main_cell] | %w[users activity_pub_releases_subscriptions] | false
end
with_them do
@@ -237,17 +239,19 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
describe '.cross_transactions_allowed?' do
where(:schemas, :tables, :result) do
- %i[] | %i[] | true
- %i[gitlab_main] | %i[] | true
- %i[gitlab_main_clusterwide gitlab_main] | %i[] | true
- %i[gitlab_main_clusterwide gitlab_ci] | %i[] | false
- %i[gitlab_main_clusterwide gitlab_main gitlab_ci] | %i[] | false
- %i[gitlab_main_clusterwide gitlab_internal] | %i[] | true
- %i[gitlab_main gitlab_ci] | %i[] | false
- %i[gitlab_main_clusterwide gitlab_main gitlab_shared] | %i[] | true
- %i[gitlab_main_clusterwide gitlab_shared] | %i[] | true
+ %i[] | %w[] | true
+ %i[gitlab_main] | %w[evidences] | true
+ %i[gitlab_main_clusterwide gitlab_main] | %w[users evidences] | true
+ %i[gitlab_main_clusterwide gitlab_ci] | %w[users ci_pipelines] | false
+ %i[gitlab_main_clusterwide gitlab_main gitlab_ci] | %w[users evidences ci_pipelines] | false
+ %i[gitlab_main_clusterwide gitlab_internal] | %w[users schema_migrations] | true
+ %i[gitlab_main gitlab_ci] | %w[evidences ci_pipelines] | false
+ %i[gitlab_main_clusterwide gitlab_main gitlab_shared] | %w[users evidences detached_partitions] | true
+ %i[gitlab_main_clusterwide gitlab_shared] | %w[users detached_partitions] | true
%i[gitlab_main_clusterwide gitlab_main_cell] | %w[users namespaces] | false
%i[gitlab_main_clusterwide gitlab_main_cell] | %w[plans namespaces] | true
+ %i[gitlab_main_clusterwide gitlab_main_cell] | %w[users achievements] | false
+ %i[gitlab_main_clusterwide gitlab_main_cell] | %w[users activity_pub_releases_subscriptions] | true
end
with_them do
@@ -257,15 +261,17 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
describe '.cross_foreign_key_allowed?' do
where(:schemas, :tables, :result) do
- %i[] | %i[] | false
- %i[gitlab_main] | %i[] | true
- %i[gitlab_main_clusterwide gitlab_main] | %i[] | true
- %i[gitlab_main_clusterwide gitlab_ci] | %i[] | false
- %i[gitlab_main_clusterwide gitlab_internal] | %i[] | false
- %i[gitlab_main gitlab_ci] | %i[] | false
- %i[gitlab_main_clusterwide gitlab_shared] | %i[] | false
+ %i[] | %w[] | false
+ %i[gitlab_main] | %w[evidences] | true
+ %i[gitlab_main_clusterwide gitlab_main] | %w[users evidences] | true
+ %i[gitlab_main_clusterwide gitlab_ci] | %w[users ci_pipelines] | false
+ %i[gitlab_main_clusterwide gitlab_internal] | %w[users schema_migrations] | false
+ %i[gitlab_main gitlab_ci] | %w[evidences ci_pipelines] | false
+ %i[gitlab_main_clusterwide gitlab_shared] | %w[users detached_partitions] | false
%i[gitlab_main_clusterwide gitlab_main_cell] | %w[users namespaces] | false
%i[gitlab_main_clusterwide gitlab_main_cell] | %w[plans namespaces] | true
+ %i[gitlab_main_clusterwide gitlab_main_cell] | %w[users achievements] | false
+ %i[gitlab_main_clusterwide gitlab_main_cell] | %w[users agent_group_authorizations] | true
end
with_them do
diff --git a/spec/lib/gitlab/database/migration_helpers/v2_spec.rb b/spec/lib/gitlab/database/migration_helpers/v2_spec.rb
index 8b653e2d89d..afcec5ea214 100644
--- a/spec/lib/gitlab/database/migration_helpers/v2_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers/v2_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::MigrationHelpers::V2 do
+RSpec.describe Gitlab::Database::MigrationHelpers::V2, feature_category: :database do
include Database::TriggerHelpers
include Database::TableSchemaHelpers
@@ -59,7 +59,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do
context 'when the batch column does exist' do
it 'passes it when creating the column' do
expect(migration).to receive(:create_column_from)
- .with(:_test_table, existing_column, added_column, type: nil, batch_column_name: :status)
+ .with(:_test_table, existing_column, added_column, type: nil, batch_column_name: :status, type_cast_function: nil)
.and_call_original
migration.public_send(operation, :_test_table, :original, :renamed, batch_column_name: :status)
@@ -495,4 +495,83 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do
end
end
end
+
+ describe '#change_column_type_concurrently' do
+ let(:table_name) { :_test_change_column_type_concurrently }
+
+ before do
+ migration.connection.execute(<<~SQL)
+ DROP TABLE IF EXISTS #{table_name};
+ CREATE TABLE #{table_name} (
+ id serial NOT NULL PRIMARY KEY,
+ user_id bigint,
+ name character varying
+ );
+ /* at least one record for batching update */
+ INSERT INTO #{table_name} (id, user_id, name)
+ VALUES (1, 9, '{ \"lucky_number\": 8 }')
+ SQL
+ end
+
+ it 'adds a column of the new type and triggers to keep these two columns in sync' do
+ allow(migration).to receive(:transaction_open?).and_return(false)
+ recorder = ActiveRecord::QueryRecorder.new do
+ migration.change_column_type_concurrently(table_name, :name, :text)
+ end
+ expect(recorder.log).to include(/ALTER TABLE "_test_change_column_type_concurrently" ADD "name_for_type_change" text/)
+ expect(recorder.log).to include(/BEGIN\n IF NEW."name" IS NOT DISTINCT FROM NULL AND NEW."name_for_type_change" IS DISTINCT FROM NULL THEN\n NEW."name" = NEW."name_for_type_change";\n END IF;\n\n IF NEW."name_for_type_change" IS NOT DISTINCT FROM NULL AND NEW."name" IS DISTINCT FROM NULL THEN\n NEW."name_for_type_change" = NEW."name";\n END IF;\n\n RETURN NEW;\nEND/m)
+ expect(recorder.log).to include(/BEGIN\n NEW."name" := NEW."name_for_type_change";\n RETURN NEW;\nEND/m)
+ expect(recorder.log).to include(/BEGIN\n NEW."name_for_type_change" := NEW."name";\n RETURN NEW;\nEND/m)
+ expect(recorder.log).to include(/ON "_test_change_column_type_concurrently"\nFOR EACH ROW\sEXECUTE FUNCTION/m)
+ expect(recorder.log).to include(/UPDATE .* WHERE "_test_change_column_type_concurrently"."id" >= \d+/)
+ end
+
+ context 'with batch column name' do
+ it 'updates the new column using the batch column' do
+ allow(migration).to receive(:transaction_open?).and_return(false)
+ recorder = ActiveRecord::QueryRecorder.new do
+ migration.change_column_type_concurrently(table_name, :name, :text, batch_column_name: :user_id)
+ end
+ expect(recorder.log).to include(/UPDATE .* WHERE "_test_change_column_type_concurrently"."user_id" >= \d+/)
+ end
+ end
+
+ context 'with type cast function' do
+ it 'updates the new column with casting the value to the given type' do
+ allow(migration).to receive(:transaction_open?).and_return(false)
+ recorder = ActiveRecord::QueryRecorder.new do
+ migration.change_column_type_concurrently(table_name, :name, :text, type_cast_function: 'JSON')
+ end
+ expect(recorder.log).to include(/SET "name_for_type_change" = JSON\("_test_change_column_type_concurrently"\."name"\)/m)
+ end
+ end
+ end
+
+ describe '#undo_change_column_type_concurrently' do
+ let(:table_name) { :_test_undo_change_column_type_concurrently }
+
+ before do
+ migration.connection.execute(<<~SQL)
+ DROP TABLE IF EXISTS #{table_name};
+ CREATE TABLE #{table_name} (
+ id serial NOT NULL PRIMARY KEY,
+ user_id bigint,
+ name character varying
+ );
+ /* at least one record for batching update */
+ INSERT INTO #{table_name} (id, user_id, name)
+ VALUES (1, 9, 'For every young')
+ SQL
+ end
+
+ it 'undoes the column type change' do
+ allow(migration).to receive(:transaction_open?).and_return(false)
+ migration.change_column_type_concurrently(table_name, :name, :text)
+ recorder = ActiveRecord::QueryRecorder.new do
+ migration.undo_change_column_type_concurrently(table_name, :name)
+ end
+ expect(recorder.log).to include(/DROP TRIGGER IF EXISTS .+ON "_test_undo_change_column_type_concurrently"/m)
+ expect(recorder.log).to include(/ALTER TABLE "_test_undo_change_column_type_concurrently" DROP COLUMN "name_for_type_change"/)
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/namespace_each_batch_spec.rb b/spec/lib/gitlab/database/namespace_each_batch_spec.rb
new file mode 100644
index 00000000000..23de19a6683
--- /dev/null
+++ b/spec/lib/gitlab/database/namespace_each_batch_spec.rb
@@ -0,0 +1,174 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::NamespaceEachBatch, feature_category: :database do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:other_group) { create(:group) }
+ let_it_be(:user) { create(:user, :admin) }
+
+ let(:namespace_id) { group.id }
+
+ let_it_be(:subgroup1) { create(:group, parent: group) }
+ let_it_be(:subgroup2) { create(:group, parent: group) }
+
+ let_it_be(:subsubgroup1) { create(:group, parent: subgroup1) }
+ let_it_be(:subsubgroup2) { create(:group, parent: subgroup1) }
+ let_it_be(:subsubgroup3) { create(:group, parent: subgroup1) }
+
+ let_it_be(:project1) { create(:project, namespace: group) }
+ let_it_be(:project2) { create(:project, namespace: group) }
+ let_it_be(:project3) { create(:project, namespace: subsubgroup2) }
+ let_it_be(:project4) { create(:project, namespace: subsubgroup3) }
+ let_it_be(:project5) { create(:project, namespace: subsubgroup3) }
+
+ let(:namespace_class) { Namespace }
+ let(:batch_size) { 3 }
+
+ def collected_ids(cursor = { current_id: namespace_id, depth: [namespace_id] })
+ [].tap do |ids|
+ described_class.new(namespace_class: namespace_class, cursor: cursor).each_batch(of: batch_size) do |batch_ids|
+ ids.concat(batch_ids)
+ end
+ end
+ end
+
+ shared_examples 'iteration over the hierarchy' do
+ it 'returns the correct namespace ids' do
+ expect(collected_ids).to eq([
+ group.id,
+ subgroup1.id,
+ subsubgroup1.id,
+ subsubgroup2.id,
+ project3.project_namespace_id,
+ subsubgroup3.id,
+ project4.project_namespace_id,
+ project5.project_namespace_id,
+ subgroup2.id,
+ project1.project_namespace_id,
+ project2.project_namespace_id
+ ])
+ end
+ end
+
+ it_behaves_like 'iteration over the hierarchy'
+
+ context 'when batch size is larger than the hierarchy' do
+ let(:batch_size) { 100 }
+
+ it_behaves_like 'iteration over the hierarchy'
+ end
+
+ context 'when batch size is 1' do
+ let(:batch_size) { 1 }
+
+ it_behaves_like 'iteration over the hierarchy'
+ end
+
+ context 'when stopping the iteration in the middle and resuming' do
+ it 'returns the correct ids' do
+ ids = []
+ cursor = { current_id: namespace_id, depth: [namespace_id] }
+
+ iterator = described_class.new(namespace_class: namespace_class, cursor: cursor)
+ iterator.each_batch(of: 5) do |batch_ids, new_cursor|
+ ids.concat(batch_ids)
+ cursor = new_cursor
+ end
+
+ iterator = described_class.new(namespace_class: namespace_class, cursor: cursor)
+ iterator.each_batch(of: 500) do |batch_ids|
+ ids.concat(batch_ids)
+ end
+
+ expect(collected_ids).to eq([
+ group.id,
+ subgroup1.id,
+ subsubgroup1.id,
+ subsubgroup2.id,
+ project3.project_namespace_id,
+ subsubgroup3.id,
+ project4.project_namespace_id,
+ project5.project_namespace_id,
+ subgroup2.id,
+ project1.project_namespace_id,
+ project2.project_namespace_id
+ ])
+ end
+ end
+
+ context 'when querying a subgroup' do
+ let(:namespace_id) { subgroup1.id }
+
+ it 'returns the correct ids' do
+ expect(collected_ids).to eq([
+ subgroup1.id,
+ subsubgroup1.id,
+ subsubgroup2.id,
+ project3.project_namespace_id,
+ subsubgroup3.id,
+ project4.project_namespace_id,
+ project5.project_namespace_id
+ ])
+ end
+ end
+
+ context 'when querying a subgroup without descendants' do
+ let(:namespace_id) { subgroup2.id }
+
+ it 'finds only the given namespace id' do
+ expect(collected_ids).to eq([subgroup2.id])
+ end
+ end
+
+ context 'when batching over groups only' do
+ let(:namespace_class) { Group }
+
+ it 'returns the correct namespace ids' do
+ expect(collected_ids).to eq([
+ group.id,
+ subgroup1.id,
+ subsubgroup1.id,
+ subsubgroup2.id,
+ subsubgroup3.id,
+ subgroup2.id
+ ])
+ end
+ end
+
+ context 'when the cursor is invalid' do
+ context 'when non-integer current id is given' do
+ it 'raises error' do
+ cursor = { current_id: 'not int', depth: [group.id] }
+
+ expect { collected_ids(cursor) }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'when depth is not an array' do
+ it 'raises error' do
+ cursor = { current_id: group.id, depth: group.id }
+
+ expect { collected_ids(cursor) }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'when non-integer depth values are given' do
+ it 'raises error' do
+ cursor = { current_id: group.id, depth: ['not int'] }
+
+ expect { collected_ids(cursor) }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'when giving non-existing namespace id' do
+ it 'returns nothing', :enable_admin_mode do
+ cursor = { current_id: subgroup1.id, depth: [group.id, subgroup1.id] }
+
+ Groups::DestroyService.new(group, user).execute
+
+ expect(collected_ids(cursor)).to eq([])
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb b/spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb
index d1d7aa12c46..4fc62c6cc74 100644
--- a/spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb
+++ b/spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb
@@ -12,7 +12,8 @@ RSpec.describe 'new tables with gitlab_main schema', feature_category: :cell do
# Specific tables can be exempted from this requirement, and such tables must be added to the `exempted_tables` list.
let!(:exempted_tables) do
[
- "audit_events_instance_amazon_s3_configurations" # https://gitlab.com/gitlab-org/gitlab/-/issues/431327
+ "audit_events_instance_amazon_s3_configurations", # https://gitlab.com/gitlab-org/gitlab/-/issues/431327
+ "sbom_source_packages" # https://gitlab.com/gitlab-org/gitlab/-/issues/437718
]
end
diff --git a/spec/lib/gitlab/database/partitioning/int_range_partition_spec.rb b/spec/lib/gitlab/database/partitioning/int_range_partition_spec.rb
new file mode 100644
index 00000000000..eadae77bdc6
--- /dev/null
+++ b/spec/lib/gitlab/database/partitioning/int_range_partition_spec.rb
@@ -0,0 +1,173 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Partitioning::IntRangePartition, feature_category: :database do
+ describe 'validate attributes' do
+ subject(:int_range_partition) { described_class.from_sql(table, partition_name, definition) }
+
+ let(:table) { 'foo' }
+ let(:partition_name) { 'foo_bar' }
+ let(:definition) { "FOR VALUES FROM ('1') TO ('10')" }
+
+ context 'when `from` is greater than `to`' do
+ let(:definition) { "FOR VALUES FROM ('10') TO ('1')" }
+
+ it 'raises an exception' do
+ expect { int_range_partition }.to raise_error(RuntimeError, '`to` must be greater than `from`')
+ end
+ end
+
+ context 'when `to` is 0' do
+ let(:definition) { "FOR VALUES FROM ('10') TO ('0')" }
+
+ it 'raises an exception' do
+ expect { int_range_partition }.to raise_error(RuntimeError, '`to` statement must be greater than 0')
+ end
+ end
+
+ context 'when `from` is 0' do
+ let(:definition) { "FOR VALUES FROM ('0') TO ('1')" }
+
+ it 'raises an exception' do
+ expect { int_range_partition }.to raise_error(RuntimeError, '`from` statement must be greater than 0')
+ end
+ end
+ end
+
+ describe '.from_sql' do
+ subject(:int_range_partition) { described_class.from_sql(table, partition_name, definition) }
+
+ let(:table) { 'foo' }
+ let(:partition_name) { 'foo_bar' }
+ let(:definition) { "FOR VALUES FROM ('1') TO ('10')" }
+
+ it 'uses specified table name' do
+ expect(int_range_partition.table).to eq(table)
+ end
+
+ it 'uses specified partition name' do
+ expect(int_range_partition.partition_name).to eq(partition_name)
+ end
+
+ it 'parses start date' do
+ expect(int_range_partition.from).to eq(1)
+ end
+
+ it 'parses end date' do
+ expect(int_range_partition.to).to eq(10)
+ end
+ end
+
+ describe '#partition_name' do
+ subject(:int_range_partition_name) do
+ described_class.new(table, from, to, partition_name: partition_name).partition_name
+ end
+
+ let(:table) { 'foo' }
+ let(:from) { '1' }
+ let(:to) { '10' }
+ let(:partition_name) { nil }
+
+ it 'uses table as prefix' do
+ expect(int_range_partition_name).to start_with(table)
+ end
+
+ it 'uses start id (from) as suffix' do
+ expect(int_range_partition_name).to end_with("_1")
+ end
+
+ context 'with partition name explicitly given' do
+ let(:partition_name) { "foo_bar" }
+
+ it 'uses given partition name' do
+ expect(int_range_partition_name).to eq(partition_name)
+ end
+ end
+ end
+
+ describe '#to_sql' do
+ subject(:to_sql) { described_class.new(table, from, to).to_sql }
+
+ let(:table) { 'foo' }
+ let(:from) { '1' }
+ let(:to) { '10' }
+
+ it 'transforms to a CREATE TABLE statement' do
+ expect(to_sql).to eq(<<~SQL)
+ CREATE TABLE IF NOT EXISTS "#{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}"."foo_1"
+ PARTITION OF "foo"
+ FOR VALUES FROM ('1') TO ('10')
+ SQL
+ end
+ end
+
+ describe 'object equality - #eql' do
+ def expect_inequality(actual, other)
+ expect(actual.eql?(other)).to be_falsey
+ expect(actual).not_to eq(other)
+ end
+
+ def expect_equality(actual, other)
+ expect(actual).to eq(other)
+ expect(actual.eql?(other)).to be_truthy
+ expect(actual.hash).to eq(other.hash)
+ end
+
+ def make_new(table: 'foo', from: '1', to: '10', partition_name: 'foo_1')
+ described_class.new(table, from, to, partition_name: partition_name)
+ end
+
+ it 'treats objects identical with identical attributes' do
+ expect_equality(make_new, make_new)
+ end
+
+ it 'different table leads to in-equality' do
+ expect_inequality(make_new, make_new(table: 'bar'))
+ end
+
+ it 'different from leads to in-equality' do
+ expect_inequality(make_new, make_new(from: '2'))
+ end
+
+ it 'different to leads to in-equality' do
+ expect_inequality(make_new, make_new(to: '11'))
+ end
+
+ it 'different partition_name leads to in-equality' do
+ expect_inequality(make_new, make_new(partition_name: 'different'))
+ end
+
+ it 'nil partition_name is ignored if auto-generated matches' do
+ expect_equality(make_new, make_new(partition_name: nil))
+ end
+ end
+
+ describe 'Comparable, #<=>' do
+ let(:table) { 'foo' }
+
+ it 'sorts by partition bounds' do
+ partitions = [
+ described_class.new(table, '100', '110', partition_name: 'p_100'),
+ described_class.new(table, '5', '10', partition_name: 'p_5'),
+ described_class.new(table, '10', '100', partition_name: 'p_10'),
+ described_class.new(table, '1', '5', partition_name: 'p_1')
+ ]
+
+ expect(partitions.sort).to eq(
+ [
+ described_class.new(table, '1', '5', partition_name: 'p_1'),
+ described_class.new(table, '5', '10', partition_name: 'p_5'),
+ described_class.new(table, '10', '100', partition_name: 'p_10'),
+ described_class.new(table, '100', '110', partition_name: 'p_100')
+ ])
+ end
+
+ it 'returns nil for partitions of different tables' do
+ one = described_class.new('foo', '1', '10')
+ two = described_class.new('bar', '1', '10')
+
+ expect(one.<=>(two)).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/partitioning/int_range_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/int_range_strategy_spec.rb
new file mode 100644
index 00000000000..19937544393
--- /dev/null
+++ b/spec/lib/gitlab/database/partitioning/int_range_strategy_spec.rb
@@ -0,0 +1,317 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Partitioning::IntRangeStrategy, feature_category: :database do
+ include Database::PartitioningHelpers
+
+ let(:connection) { ActiveRecord::Base.connection }
+ let(:model) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = '_test_partitioned_test'
+ end
+ end
+
+ after do
+ model.reset_column_information
+ end
+
+ describe '#current_partitions' do
+ subject(:current_partitions) { described_class.new(model, partitioning_key, partition_size: 10).current_partitions }
+
+ let(:partitioning_key) { double }
+ let(:table_name) { :_test_partitioned_test }
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{table_name}
+ (id serial not null, external_id integer not null, PRIMARY KEY (id, external_id))
+ PARTITION BY RANGE (external_id);
+
+ CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_1
+ PARTITION OF #{table_name}
+ FOR VALUES FROM ('1') TO ('5');
+
+ CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_5
+ PARTITION OF #{table_name}
+ FOR VALUES FROM ('5') TO ('10');
+
+ CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_10
+ PARTITION OF #{table_name}
+ FOR VALUES FROM ('10') TO ('100');
+
+ CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_100
+ PARTITION OF #{table_name}
+ FOR VALUES FROM ('100') TO ('110');
+ SQL
+ end
+
+ it 'returns partitions order by range bound' do
+ expect(current_partitions).to eq(
+ [
+ Gitlab::Database::Partitioning::IntRangePartition.new(table_name, 1, 5,
+ partition_name: '_test_partitioned_test_1'),
+ Gitlab::Database::Partitioning::IntRangePartition.new(table_name, 5, 10,
+ partition_name: '_test_partitioned_test_5'),
+ Gitlab::Database::Partitioning::IntRangePartition.new(table_name, 10, 100,
+ partition_name: '_test_partitioned_test_10'),
+ Gitlab::Database::Partitioning::IntRangePartition.new(table_name, 100, 110,
+ partition_name: '_test_partitioned_test_100')
+ ])
+ end
+ end
+
+ describe '#extra_partitions' do
+ let(:partitioning_key) { double }
+ let(:table_name) { :_test_partitioned_test }
+
+ subject(:extra_partitions) { described_class.new(model, partitioning_key, partition_size: 10).extra_partitions }
+
+ it 'returns an empty array' do
+ expect(extra_partitions).to eq([])
+ end
+ end
+
+ describe '#missing_partitions' do
+ subject(:missing_partitions) { described_class.new(model, partitioning_key, partition_size: 10).missing_partitions }
+
+ let(:model) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = '_test_partitioned_test'
+ self.primary_key = :id
+ end
+ end
+
+ let(:partitioning_key) { :external_id }
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{model.table_name}
+ (id serial not null, external_id integer not null, PRIMARY KEY (id, external_id))
+ PARTITION BY RANGE (external_id);
+ SQL
+ end
+
+ context 'when the current partitions are not completed' do
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_11
+ PARTITION OF #{model.table_name}
+ FOR VALUES FROM ('11') TO ('21');
+ SQL
+ end
+
+ context 'when partitions have data' do
+ before do
+ model.create!(external_id: 15)
+ end
+
+ it 'returns missing partitions' do
+ expect(missing_partitions.size).to eq(7)
+
+ expect(missing_partitions).to include(
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 1, 11),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 21, 31),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 31, 41),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 41, 51),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 51, 61),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 61, 71),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 71, 81)
+ )
+
+ expect(missing_partitions).not_to include(
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 11, 21)
+ )
+ end
+ end
+
+ context 'when partitions are empty' do
+ before do
+ model.delete_all
+ end
+
+ it 'returns missing partitions' do
+ expect(missing_partitions.size).to eq(7)
+
+ expect(missing_partitions).to include(
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 1, 11),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 21, 31),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 31, 41),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 41, 51),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 51, 61),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 61, 71),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 71, 81)
+ )
+
+ expect(missing_partitions).not_to include(
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 11, 21)
+ )
+ end
+ end
+ end
+
+ context 'with existing partitions' do
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_1
+ PARTITION OF #{model.table_name}
+ FOR VALUES FROM ('1') TO ('11');
+
+ CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_11
+ PARTITION OF #{model.table_name}
+ FOR VALUES FROM ('11') TO ('21');
+ SQL
+ end
+
+ context 'when partitions have data' do
+ before do
+ model.create!(external_id: 1)
+ model.create!(external_id: 15)
+ end
+
+ it 'returns missing partitions' do
+ expect(missing_partitions.size).to eq(6)
+
+ expect(missing_partitions).to include(
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 21, 31),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 31, 41),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 41, 51),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 51, 61),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 61, 71),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 71, 81)
+ )
+
+ expect(missing_partitions).not_to include(
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 1, 11),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 11, 21)
+ )
+ end
+ end
+
+ context 'when partitions are empty' do
+ before do
+ model.delete_all
+ end
+
+ it 'returns missing partitions' do
+ expect(missing_partitions.size).to eq(6)
+
+ expect(missing_partitions).to include(
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 21, 31),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 31, 41),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 41, 51),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 51, 61),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 61, 71),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 71, 81)
+ )
+
+ expect(missing_partitions).not_to include(
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 1, 11),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 11, 21)
+ )
+ end
+ end
+ end
+
+ context 'without partitions' do
+ it 'returns missing partitions' do
+ expect(missing_partitions.size).to eq(6)
+
+ expect(missing_partitions).to include(
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 1, 11),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 11, 21),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 21, 31),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 31, 41),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 41, 51),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 51, 61)
+ )
+ end
+ end
+ end
+
+ describe 'attributes' do
+ let(:partitioning_key) { :partition }
+ let(:table_name) { :_test_partitioned_test }
+ let(:partition_size) { 5 }
+
+ subject(:strategy) do
+ described_class.new(
+ model, partitioning_key,
+ partition_size: partition_size
+ )
+ end
+
+ specify do
+ expect(strategy).to have_attributes({
+ model: model,
+ partitioning_key: partitioning_key,
+ partition_size: partition_size
+ })
+ end
+ end
+
+ describe 'simulates the merge_request_diff_commits partition creation' do
+ let(:table_name) { '_test_partitioned_test' }
+ let(:model) do
+ Class.new(ApplicationRecord) do
+ include PartitionedTable
+
+ self.table_name = '_test_partitioned_test'
+ self.primary_key = :merge_request_diff_id
+
+ partitioned_by :merge_request_diff_id, strategy: :int_range, partition_size: 2
+ end
+ end
+
+ before do
+ connection.execute(<<~SQL)
+ create table #{table_name}
+ (
+ merge_request_diff_id int not null,
+ relative_order int not null,
+ created_at timestamptz,
+ primary key (merge_request_diff_id, relative_order)
+ )
+ PARTITION BY RANGE (merge_request_diff_id);
+
+ create table gitlab_partitions_dynamic.#{table_name}_1
+ PARTITION of #{table_name} FOR VALUES FROM (1) TO (3);
+
+ create table gitlab_partitions_dynamic.#{table_name}_3
+ PARTITION of #{table_name} FOR VALUES FROM (3) TO (5);
+ SQL
+ end
+
+ it 'redirects to the new partition', :aggregate_failures do
+ expect_range_partitions_for(table_name, {
+ '1' => %w[1 3],
+ '3' => %w[3 5]
+ })
+
+ expect do
+ model.create!(merge_request_diff_id: 1, relative_order: 1, created_at: Time.zone.now) # Goes in partition 1
+ end.to change { model.count }.by(1)
+
+ expect do
+ model.create!(merge_request_diff_id: 5, relative_order: 1, created_at: Time.zone.now)
+ end.to raise_error(ActiveRecord::StatementInvalid, /no partition of relation/)
+
+ Gitlab::Database::Partitioning::PartitionManager.new(model).sync_partitions # Generates more 6 partitions
+
+ expect_range_partitions_for(table_name, {
+ '1' => %w[1 3],
+ '3' => %w[3 5],
+ '5' => %w[5 7],
+ '7' => %w[7 9],
+ '9' => %w[9 11],
+ '11' => %w[11 13],
+ '13' => %w[13 15],
+ '15' => %w[15 17]
+ })
+
+ expect do
+ model.create!(merge_request_diff_id: 5, relative_order: 1, created_at: Time.zone.now) # Goes in partition 5
+ end.to change { model.count }.by(1)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb b/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb
index b30501cce21..e0b090f7ff9 100644
--- a/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Gitlab::Database::Partitioning::List::ConvertTable, feature_categ
table_name: table_name,
partitioning_column: partitioning_column,
parent_table_name: parent_table_name,
- zero_partition_value: partitioning_default
+ zero_partition_value: zero_partition_value
)
end
@@ -24,107 +24,121 @@ RSpec.describe Gitlab::Database::Partitioning::List::ConvertTable, feature_categ
let(:async) { false }
- it 'adds a check constraint' do
- expect { prepare }.to change {
- Gitlab::Database::PostgresConstraint
- .check_constraints
- .by_table_identifier(table_identifier)
- .count
- }.from(0).to(1)
- end
-
- context 'when it fails to add constraint' do
- before do
- allow(migration_context).to receive(:add_check_constraint)
- end
-
- it 'raises UnableToPartition error' do
- expect { prepare }
- .to raise_error(described_class::UnableToPartition)
- .and change {
- Gitlab::Database::PostgresConstraint
- .check_constraints
- .by_table_identifier(table_identifier)
- .count
- }.by(0)
- end
- end
-
- context 'when async' do
- let(:async) { true }
-
- it 'adds a NOT VALID check constraint' do
+ shared_examples 'runs #prepare_for_partitioning' do
+ it 'adds a check constraint' do
expect { prepare }.to change {
Gitlab::Database::PostgresConstraint
.check_constraints
.by_table_identifier(table_identifier)
.count
}.from(0).to(1)
+ end
- constraint =
- Gitlab::Database::PostgresConstraint
- .check_constraints
- .by_table_identifier(table_identifier)
- .last
+ context 'when it fails to add constraint' do
+ before do
+ allow(migration_context).to receive(:add_check_constraint)
+ end
- expect(constraint.definition).to end_with('NOT VALID')
+ it 'raises UnableToPartition error' do
+ expect { prepare }
+ .to raise_error(described_class::UnableToPartition)
+ .and change {
+ Gitlab::Database::PostgresConstraint
+ .check_constraints
+ .by_table_identifier(table_identifier)
+ .count
+ }.by(0)
+ end
end
- it 'adds a PostgresAsyncConstraintValidation record' do
- expect { prepare }.to change {
- Gitlab::Database::AsyncConstraints::PostgresAsyncConstraintValidation.count
- }.by(1)
+ context 'when async' do
+ let(:async) { true }
- record = Gitlab::Database::AsyncConstraints::PostgresAsyncConstraintValidation
- .where(table_name: table_name).last
+ it 'adds a NOT VALID check constraint' do
+ expect { prepare }.to change {
+ Gitlab::Database::PostgresConstraint
+ .check_constraints
+ .by_table_identifier(table_identifier)
+ .count
+ }.from(0).to(1)
- expect(record.name).to eq described_class::PARTITIONING_CONSTRAINT_NAME
- expect(record).to be_check_constraint
- end
+ constraint =
+ Gitlab::Database::PostgresConstraint
+ .check_constraints
+ .by_table_identifier(table_identifier)
+ .last
- context 'when constraint exists but is not valid' do
- before do
- converter.prepare_for_partitioning(async: true)
+ expect(constraint.definition).to end_with('NOT VALID')
end
- it 'validates the check constraint' do
+ it 'adds a PostgresAsyncConstraintValidation record' do
expect { prepare }.to change {
- Gitlab::Database::PostgresConstraint
- .check_constraints
- .by_table_identifier(table_identifier).first.constraint_valid?
- }.from(false).to(true)
+ Gitlab::Database::AsyncConstraints::PostgresAsyncConstraintValidation.count
+ }.by(1)
+
+ record = Gitlab::Database::AsyncConstraints::PostgresAsyncConstraintValidation
+ .where(table_name: table_name).last
+
+ expect(record.name).to eq described_class::PARTITIONING_CONSTRAINT_NAME
+ expect(record).to be_check_constraint
end
- context 'when it fails to validate constraint' do
+ context 'when constraint exists but is not valid' do
before do
- allow(migration_context).to receive(:validate_check_constraint)
+ converter.prepare_for_partitioning(async: true)
end
- it 'raises UnableToPartition error' do
- expect { prepare }
- .to raise_error(described_class::UnableToPartition,
- starting_with('Error validating partitioning constraint'))
- .and change {
- Gitlab::Database::PostgresConstraint
- .check_constraints
- .by_table_identifier(table_identifier)
- .count
- }.by(0)
+ it 'validates the check constraint' do
+ expect { prepare }.to change {
+ Gitlab::Database::PostgresConstraint
+ .check_constraints
+ .by_table_identifier(table_identifier).first.constraint_valid?
+ }.from(false).to(true)
end
- end
- end
- context 'when constraint exists and is valid' do
- before do
- converter.prepare_for_partitioning(async: false)
+ context 'when it fails to validate constraint' do
+ before do
+ allow(migration_context).to receive(:validate_check_constraint)
+ end
+
+ it 'raises UnableToPartition error' do
+ expect { prepare }
+ .to raise_error(described_class::UnableToPartition,
+ starting_with('Error validating partitioning constraint'))
+ .and change {
+ Gitlab::Database::PostgresConstraint
+ .check_constraints
+ .by_table_identifier(table_identifier)
+ .count
+ }.by(0)
+ end
+ end
end
- it 'raises UnableToPartition error' do
- expect(Gitlab::AppLogger).to receive(:info).with(starting_with('Nothing to do'))
- prepare
+ context 'when constraint exists and is valid' do
+ before do
+ converter.prepare_for_partitioning(async: false)
+ end
+
+ it 'raises UnableToPartition error' do
+ expect(Gitlab::AppLogger).to receive(:info).with(starting_with('Nothing to do'))
+ prepare
+ end
end
end
end
+
+ context 'when a single partitioning value is given' do
+ let(:zero_partition_value) { single_partitioning_value }
+
+ include_examples 'runs #prepare_for_partitioning'
+ end
+
+ context 'when multiple partitioning values are given' do
+ let(:zero_partition_value) { multiple_partitioning_values }
+
+ include_examples 'runs #prepare_for_partitioning'
+ end
end
describe '#revert_preparation_for_partitioning' do
@@ -132,15 +146,29 @@ RSpec.describe Gitlab::Database::Partitioning::List::ConvertTable, feature_categ
converter.prepare_for_partitioning
end
+ shared_examples 'runs #revert_preparation_for_partitioning' do
+ it 'removes a check constraint' do
+ expect { revert_prepare }.to change {
+ Gitlab::Database::PostgresConstraint
+ .check_constraints
+ .by_table_identifier("#{connection.current_schema}.#{table_name}")
+ .count
+ }.from(1).to(0)
+ end
+ end
+
subject(:revert_prepare) { converter.revert_preparation_for_partitioning }
- it 'removes a check constraint' do
- expect { revert_prepare }.to change {
- Gitlab::Database::PostgresConstraint
- .check_constraints
- .by_table_identifier("#{connection.current_schema}.#{table_name}")
- .count
- }.from(1).to(0)
+ context 'when a single partitioning value is given' do
+ let(:zero_partition_value) { single_partitioning_value }
+
+ include_examples 'runs #revert_preparation_for_partitioning'
+ end
+
+ context 'when multiple partitioning values are given' do
+ let(:zero_partition_value) { multiple_partitioning_values }
+
+ include_examples 'runs #revert_preparation_for_partitioning'
end
end
@@ -153,128 +181,146 @@ RSpec.describe Gitlab::Database::Partitioning::List::ConvertTable, feature_categ
converter.prepare_for_partitioning(async: async)
end
- context 'when the primary key is incorrect' do
- before do
- connection.execute(<<~SQL)
- alter table #{referencing_table_name} drop constraint fk_referencing; -- this depends on the primary key
- alter table #{other_referencing_table_name} drop constraint fk_referencing_other; -- this does too
- alter table #{table_name} drop constraint #{table_name}_pkey;
- alter table #{table_name} add constraint #{table_name}_pkey PRIMARY KEY (id);
- SQL
- end
+ shared_examples 'runs partition method' do
+ context 'when the primary key is incorrect' do
+ before do
+ connection.execute(<<~SQL)
+ alter table #{referencing_table_name} drop constraint fk_referencing; -- this depends on the primary key
+ alter table #{other_referencing_table_name} drop constraint fk_referencing_other; -- this does too
+ alter table #{table_name} drop constraint #{table_name}_pkey;
+ alter table #{table_name} add constraint #{table_name}_pkey PRIMARY KEY (id);
+ SQL
+ end
- it 'throws a reasonable error message' do
- expect { partition }.to raise_error(described_class::UnableToPartition, /#{partitioning_column}/)
+ it 'throws a reasonable error message' do
+ expect { partition }.to raise_error(described_class::UnableToPartition, /#{partitioning_column}/)
+ end
end
- end
- context 'when there is not a supporting check constraint' do
- before do
- connection.execute(<<~SQL)
- alter table #{table_name} drop constraint partitioning_constraint;
- SQL
- end
+ context 'when there is not a supporting check constraint' do
+ before do
+ connection.execute(<<~SQL)
+ alter table #{table_name} drop constraint partitioning_constraint;
+ SQL
+ end
- it 'throws a reasonable error message' do
- expect { partition }.to raise_error(described_class::UnableToPartition, /is not ready for partitioning./)
+ it 'throws a reasonable error message' do
+ expect { partition }.to raise_error(described_class::UnableToPartition, /is not ready for partitioning./)
+ end
end
- end
- context 'when supporting check constraint is not valid' do
- let(:async) { true }
+ context 'when supporting check constraint is not valid' do
+ let(:async) { true }
- it 'throws a reasonable error message' do
- expect { partition }.to raise_error(described_class::UnableToPartition, /is not ready for partitioning./)
+ it 'throws a reasonable error message' do
+ expect { partition }.to raise_error(described_class::UnableToPartition, /is not ready for partitioning./)
+ end
end
- end
-
- it 'migrates the table to a partitioned table' do
- fks_before = migration_context.foreign_keys(table_name)
-
- partition
- expect(Gitlab::Database::PostgresPartition.for_parent_table(parent_table_name).count).to eq(1)
- expect(migration_context.foreign_keys(parent_table_name).map(&:options)).to match_array(fks_before.map(&:options))
+ it 'migrates the table to a partitioned table' do
+ fks_before = migration_context.foreign_keys(table_name)
- connection.execute(<<~SQL)
- insert into #{table_name} (referenced_id, other_referenced_id) select #{referenced_table_name}.id, #{other_referenced_table_name}.id from #{referenced_table_name}, #{other_referenced_table_name};
- SQL
+ partition
- # Create a second partition
- connection.execute(<<~SQL)
- create table #{table_name}2 partition of #{parent_table_name} FOR VALUES IN (2)
- SQL
+ expect(Gitlab::Database::PostgresPartition.for_parent_table(parent_table_name).count).to eq(1)
+ expect(migration_context.foreign_keys(parent_table_name)
+ .map(&:options)).to match_array(fks_before.map(&:options))
- parent_model.create!(partitioning_column => 2, :referenced_id => 1, :other_referenced_id => 1)
- expect(parent_model.pluck(:id)).to match_array([1, 2, 3])
-
- expect { referencing_model.create!(partitioning_column => 1, :ref_id => 1) }.not_to raise_error
- end
+ connection.execute(<<~SQL)
+ insert into #{table_name} (referenced_id, other_referenced_id) select #{referenced_table_name}.id, #{other_referenced_table_name}.id from #{referenced_table_name}, #{other_referenced_table_name};
+ SQL
- context 'when the existing table is owned by a different user' do
- before do
+ # Create a second partition
connection.execute(<<~SQL)
- CREATE USER other_user SUPERUSER;
- ALTER TABLE #{table_name} OWNER TO other_user;
+ create table #{table_name}2 partition of #{parent_table_name} FOR VALUES IN (2)
SQL
- end
- let(:current_user) { model.connection.select_value('select current_user') }
+ parent_model.create!(partitioning_column => 2, :referenced_id => 1, :other_referenced_id => 1)
+ expect(parent_model.pluck(:id)).to match_array([1, 2, 3])
- it 'partitions without error' do
- expect { partition }.not_to raise_error
+ expect { referencing_model.create!(partitioning_column => 1, :ref_id => 1) }.not_to raise_error
end
- end
- context 'when an error occurs during the conversion' do
- before do
- # Set up the fault that we'd like to inject
- fault.call
- end
+ context 'when the existing table is owned by a different user' do
+ before do
+ connection.execute(<<~SQL)
+ CREATE USER other_user SUPERUSER;
+ ALTER TABLE #{table_name} OWNER TO other_user;
+ SQL
+ end
- let(:old_fks) do
- Gitlab::Database::PostgresForeignKey.by_referenced_table_identifier(table_identifier).not_inherited
- end
+ let(:current_user) { model.connection.select_value('select current_user') }
- let(:new_fks) do
- Gitlab::Database::PostgresForeignKey.by_referenced_table_identifier(parent_table_identifier).not_inherited
+ it 'partitions without error' do
+ expect { partition }.not_to raise_error
+ end
end
- context 'when partitioning fails the first time' do
- where(:case_name, :fault) do
- [
- ["creating parent table", lazy { fail_sql_matching(/CREATE/i) }],
- ["adding the first foreign key", lazy { fail_adding_fk(parent_table_name, referenced_table_name) }],
- ["adding the second foreign key", lazy { fail_adding_fk(parent_table_name, other_referenced_table_name) }],
- ["attaching table", lazy { fail_sql_matching(/ATTACH/i) }]
- ]
+ context 'when an error occurs during the conversion' do
+ before do
+ # Set up the fault that we'd like to inject
+ fault.call
end
- with_them do
- it 'recovers from a fault', :aggregate_failures do
- expect { converter.partition }.to raise_error(/fault/)
+ let(:old_fks) do
+ Gitlab::Database::PostgresForeignKey.by_referenced_table_identifier(table_identifier).not_inherited
+ end
- expect { converter.partition }.not_to raise_error
- expect(Gitlab::Database::PostgresPartition.for_parent_table(parent_table_name).count).to eq(1)
+ let(:new_fks) do
+ Gitlab::Database::PostgresForeignKey.by_referenced_table_identifier(parent_table_identifier).not_inherited
+ end
+
+ context 'when partitioning fails the first time' do
+ where(:case_name, :fault) do
+ [
+ ["creating parent table", lazy { fail_sql_matching(/CREATE/i) }],
+ ["adding the first foreign key", lazy { fail_adding_fk(parent_table_name, referenced_table_name) }],
+ ["adding the second foreign key", lazy do
+ fail_adding_fk(parent_table_name, other_referenced_table_name)
+ end],
+ ["attaching table", lazy { fail_sql_matching(/ATTACH/i) }]
+ ]
+ end
+
+ with_them do
+ it 'recovers from a fault', :aggregate_failures do
+ expect { converter.partition }.to raise_error(/fault/)
+
+ expect { converter.partition }.not_to raise_error
+ expect(Gitlab::Database::PostgresPartition.for_parent_table(parent_table_name).count).to eq(1)
+ end
end
end
end
- end
- context 'when table has LFK triggers' do
- before do
- migration_context.track_record_deletions(table_name)
- end
+ context 'when table has LFK triggers' do
+ before do
+ migration_context.track_record_deletions(table_name)
+ end
- it 'moves the trigger on the parent table', :aggregate_failures do
- expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy
+ it 'moves the trigger on the parent table', :aggregate_failures do
+ expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy
- expect { partition }.not_to raise_error
+ expect { partition }.not_to raise_error
- expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy
- expect(migration_context.has_loose_foreign_key?(parent_table_name)).to be_truthy
+ expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy
+ expect(migration_context.has_loose_foreign_key?(parent_table_name)).to be_truthy
+ end
end
end
+
+ context 'when a single partitioning value is given' do
+ let(:zero_partition_value) { single_partitioning_value }
+
+ include_examples 'runs partition method'
+ end
+
+ context 'when multiple partitioning values are given' do
+ # Because of the common spec on line 220
+ let(:zero_partition_value) { [1, 3, 4] }
+
+ include_examples 'runs partition method'
+ end
end
describe '#revert_partitioning' do
@@ -285,49 +331,67 @@ RSpec.describe Gitlab::Database::Partitioning::List::ConvertTable, feature_categ
subject(:revert_conversion) { converter.revert_partitioning }
- it 'detaches the partition' do
- expect { revert_conversion }.to change {
- Gitlab::Database::PostgresPartition
- .for_parent_table(parent_table_name).count
- }.from(1).to(0)
- end
-
- it 'does not drop the child partition' do
- expect { revert_conversion }.not_to change { table_oid(table_name) }
- end
+ shared_examples 'runs #revert_partitioning' do
+ it 'detaches the partition' do
+ expect { revert_conversion }.to change {
+ Gitlab::Database::PostgresPartition
+ .for_parent_table(parent_table_name).count
+ }.from(1).to(0)
+ end
- it 'removes the parent table' do
- expect { revert_conversion }.to change { table_oid(parent_table_name).present? }.from(true).to(false)
- end
+ it 'does not drop the child partition' do
+ expect { revert_conversion }.not_to change { table_oid(table_name) }
+ end
- it 're-adds the check constraint' do
- expect { revert_conversion }.to change {
- Gitlab::Database::PostgresConstraint
- .check_constraints
- .by_table_identifier(table_identifier)
- .count
- }.by(1)
- end
+ it 'removes the parent table' do
+ expect { revert_conversion }.to change { table_oid(parent_table_name).present? }.from(true).to(false)
+ end
- it 'moves sequences back to the original table' do
- expect { revert_conversion }.to change { converter.send(:sequences_owned_by, table_name).count }.from(0)
- .and change { converter.send(:sequences_owned_by, parent_table_name).count }.to(0)
- end
+ it 're-adds the check constraint' do
+ expect { revert_conversion }.to change {
+ Gitlab::Database::PostgresConstraint
+ .check_constraints
+ .by_table_identifier(table_identifier)
+ .count
+ }.by(1)
+ end
- context 'when table has LFK triggers' do
- before do
- migration_context.track_record_deletions(parent_table_name)
- migration_context.track_record_deletions(table_name)
+ it 'moves sequences back to the original table' do
+ expect { revert_conversion }.to change { converter.send(:sequences_owned_by, table_name).count }
+ .from(0)
+ .and change {
+ converter.send(
+ :sequences_owned_by, parent_table_name).count
+ }.to(0)
end
- it 'restores the trigger on the partition', :aggregate_failures do
- expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy
- expect(migration_context.has_loose_foreign_key?(parent_table_name)).to be_truthy
+ context 'when table has LFK triggers' do
+ before do
+ migration_context.track_record_deletions(parent_table_name)
+ migration_context.track_record_deletions(table_name)
+ end
+
+ it 'restores the trigger on the partition', :aggregate_failures do
+ expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy
+ expect(migration_context.has_loose_foreign_key?(parent_table_name)).to be_truthy
- expect { revert_conversion }.not_to raise_error
+ expect { revert_conversion }.not_to raise_error
- expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy
+ expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy
+ end
end
end
+
+ context 'when a single partitioning value is given' do
+ let(:zero_partition_value) { single_partitioning_value }
+
+ include_examples 'runs #revert_partitioning'
+ end
+
+ context 'when multiple partitioning values are given' do
+ let(:zero_partition_value) { multiple_partitioning_values }
+
+ include_examples 'runs #revert_partitioning'
+ end
end
end
diff --git a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
index 9ca0a1b6e57..f4a4b0f002c 100644
--- a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
@@ -14,6 +14,10 @@ RSpec.describe Gitlab::Database::Partitioning::SlidingListStrategy, feature_cate
let(:next_partition_if) { double('next_partition_if') }
let(:detach_partition_if) { double('detach_partition_if') }
+ after do
+ model.reset_column_information
+ end
+
subject(:strategy) do
described_class.new(
model,
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/uniqueness_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/uniqueness_helpers_spec.rb
new file mode 100644
index 00000000000..230847f6902
--- /dev/null
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/uniqueness_helpers_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::UniquenessHelpers, feature_category: :database do
+ let(:migration) do
+ ActiveRecord::Migration.new.extend(described_class)
+ end
+
+ let(:connection) { ActiveRecord::Base.connection }
+ let(:table_not_partitioned) { '_test_not_partitioned_table' }
+ let(:table_partitioned) { '_test_partitioned_table' }
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE _test_partitioned_table
+ (
+ id serial NOT NULL,
+ PARTITION bigint NULL DEFAULT 1,
+ PRIMARY KEY (id, partition)
+ ) PARTITION BY list(partition);
+
+ CREATE TABLE _test_partitioned_table_1
+ PARTITION OF _test_partitioned_table FOR VALUES IN (1);
+ SQL
+ end
+
+ describe '#ensure_unique_id' do
+ subject(:ensure_unique_id) { migration.ensure_unique_id(table_name) }
+
+ context 'when table is partitioned' do
+ let(:table_name) { table_partitioned }
+ let(:trigger_name) { "assign_#{table_name}_id_trigger" }
+ let(:function_name) { "assign_#{table_name}_id_value" }
+
+ context 'when trigger already exists' do
+ before do
+ allow(migration).to receive(:trigger_exists?)
+ .with(table_name, trigger_name)
+ .and_return(true)
+ end
+
+ it 'does not modify existing trigger' do
+ expect(migration).not_to receive(:change_column_default)
+ expect(migration).not_to receive(:create_trigger_function)
+ expect(migration).not_to receive(:create_trigger)
+
+ expect do
+ ensure_unique_id
+ end.not_to raise_error
+ end
+ end
+
+ context 'when trigger is not defined' do
+ it 'creates trigger', :aggregate_failures do
+ expect(migration).to receive(:change_column_default).with(table_name, :id, nil).and_call_original
+ expect(migration).to receive(:create_trigger_function).with(function_name).and_call_original
+ expect(migration).to receive(:create_trigger)
+ .with(table_name, trigger_name, function_name, fires: 'BEFORE INSERT')
+ .and_call_original
+
+ expect do
+ ensure_unique_id
+ end.not_to raise_error
+
+ expect(migration.trigger_exists?(table_name, trigger_name)).to eq(true)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb b/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb
index 3ccdb907cba..d5584342dd5 100644
--- a/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb
@@ -239,7 +239,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio
end
end
- context 'when uniquiness validation is tested', type: :model do
+ context 'when uniqueness validation is tested', type: :model do
subject { build(:ci_variable) }
it 'does not raise exceptions' do
diff --git a/spec/lib/gitlab/database/sharding_key_spec.rb b/spec/lib/gitlab/database/sharding_key_spec.rb
index b47f5ea5df0..67c1422af3c 100644
--- a/spec/lib/gitlab/database/sharding_key_spec.rb
+++ b/spec/lib/gitlab/database/sharding_key_spec.rb
@@ -21,6 +21,17 @@ RSpec.describe 'new tables missing sharding_key', feature_category: :cell do
]
end
+ # Some reasons to exempt a table:
+ # 1. It has no foreign key for performance reasons
+ # 2. It does not yet have a foreign key as the index is still being backfilled
+ let(:allowed_to_be_missing_foreign_key) do
+ [
+ 'p_catalog_resource_sync_events.project_id',
+ 'zoekt_indices.namespace_id',
+ 'namespace_descendants.namespace_id'
+ ]
+ end
+
let(:starting_from_milestone) { 16.6 }
let(:allowed_sharding_key_referenced_tables) { %w[projects namespaces organizations] }
@@ -38,6 +49,19 @@ RSpec.describe 'new tables missing sharding_key', feature_category: :cell do
expect(column_exists?(table_name, column_name)).to eq(true),
"Could not find sharding key column #{table_name}.#{column_name}"
expect(referenced_table_name).to be_in(allowed_sharding_key_referenced_tables)
+
+ if allowed_to_be_missing_foreign_key.include?("#{table_name}.#{column_name}")
+ expect(has_foreign_key?(table_name, column_name)).to eq(false),
+ "The column `#{table_name}.#{column_name}` has a foreign key so cannot be " \
+ "allowed_to_be_missing_foreign_key. " \
+ "If this is a foreign key referencing the specified table #{referenced_table_name} " \
+ "then you must remove it from allowed_to_be_missing_foreign_key"
+ else
+ expect(has_foreign_key?(table_name, column_name, to_table_name: referenced_table_name)).to eq(true),
+ "Missing a foreign key constraint for `#{table_name}.#{column_name}` " \
+ "referencing #{referenced_table_name}. " \
+ "All sharding keys must have a foreign key constraint"
+ end
end
end
end
@@ -137,6 +161,25 @@ RSpec.describe 'new tables missing sharding_key', feature_category: :cell do
result.count > 0
end
+ def has_foreign_key?(from_table_name, column_name, to_table_name: nil)
+ where_clause = {
+ constrained_table_name: from_table_name,
+ constrained_columns: [column_name]
+ }
+
+ where_clause[:referenced_table_name] = to_table_name if to_table_name
+
+ fk = ::Gitlab::Database::PostgresForeignKey.where(where_clause).first
+
+ lfk = ::Gitlab::Database::LooseForeignKeys.definitions.find do |d|
+ d.from_table == from_table_name &&
+ (to_table_name.nil? || d.to_table == to_table_name) &&
+ d.options[:column] == column_name
+ end
+
+ fk.present? || lfk.present?
+ end
+
def column_exists?(table_name, column_name)
sql = <<~SQL
SELECT 1
diff --git a/spec/lib/gitlab/dependency_linker/package_json_linker_spec.rb b/spec/lib/gitlab/dependency_linker/package_json_linker_spec.rb
index 127f437dd54..e3cddceb7a9 100644
--- a/spec/lib/gitlab/dependency_linker/package_json_linker_spec.rb
+++ b/spec/lib/gitlab/dependency_linker/package_json_linker_spec.rb
@@ -100,5 +100,21 @@ RSpec.describe Gitlab::DependencyLinker::PackageJsonLinker do
it 'does not link scripts with the same key as a package' do
expect(subject).not_to include(link('karma start config/karma.config.js --single-run', 'https://github.com/karma start config/karma.config.js --single-run'))
end
+
+ context 'when dependency is not a string' do
+ let(:file_content) do
+ <<-CONTENT.strip_heredoc
+ {
+ "dependencies": {
+ "wrong": {}
+ }
+ }
+ CONTENT
+ end
+
+ it 'does not link it' do
+ expect(subject).not_to include(%(<a href))
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/dependency_linker_spec.rb b/spec/lib/gitlab/dependency_linker_spec.rb
index 8feab0f8017..4da0b9d8c0d 100644
--- a/spec/lib/gitlab/dependency_linker_spec.rb
+++ b/spec/lib/gitlab/dependency_linker_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
RSpec.describe Gitlab::DependencyLinker do
describe '.link' do
@@ -107,5 +107,16 @@ RSpec.describe Gitlab::DependencyLinker do
described_class.link(blob_name, nil, nil)
end
+
+ it 'increments usage counter based on specified used_on', :prometheus do
+ allow(described_class::GemfileLinker).to receive(:link)
+
+ described_class.link('Gemfile', nil, nil, used_on: :diff)
+
+ dependency_linker_usage_counter = Gitlab::Metrics.registry.get(:dependency_linker_usage)
+
+ expect(dependency_linker_usage_counter.get(used_on: :diff)).to eq(1)
+ expect(dependency_linker_usage_counter.get(used_on: :blob)).to eq(0)
+ end
end
end
diff --git a/spec/lib/gitlab/diff/file_collection/paginated_merge_request_diff_spec.rb b/spec/lib/gitlab/diff/file_collection/paginated_merge_request_diff_spec.rb
index 891336658ce..6c9a5341695 100644
--- a/spec/lib/gitlab/diff/file_collection/paginated_merge_request_diff_spec.rb
+++ b/spec/lib/gitlab/diff/file_collection/paginated_merge_request_diff_spec.rb
@@ -9,9 +9,10 @@ RSpec.describe Gitlab::Diff::FileCollection::PaginatedMergeRequestDiff, feature_
let(:diffable) { merge_request.merge_request_diff }
let(:diff_files_relation) { diffable.merge_request_diff_files }
let(:diff_files) { subject.diff_files }
+ let(:diff_options) { {} }
subject do
- described_class.new(diffable, page, per_page)
+ described_class.new(diffable, page, per_page, diff_options)
end
describe '#diff_files' do
@@ -77,18 +78,32 @@ RSpec.describe Gitlab::Diff::FileCollection::PaginatedMergeRequestDiff, feature_
context 'when last page' do
it 'returns correct diff files' do
last_page = diff_files_relation.count - per_page
- collection = described_class.new(diffable, last_page, per_page)
+ collection = described_class.new(diffable, last_page, per_page, diff_options)
expected_batch_files = diff_files_relation.page(last_page).per(per_page).map(&:new_path)
expect(collection.diff_files.map(&:new_path)).to eq(expected_batch_files)
end
end
+
+ context 'when collapse_generated is given' do
+ let(:diff_options) { { collapse_generated: true } }
+
+ it 'returns generated value' do
+ expect(diff_files.first.generated?).not_to be_nil
+ end
+ end
+
+ context 'when collapse_generated is not given' do
+ it 'returns nil' do
+ expect(diff_files.first.generated?).to be_nil
+ end
+ end
end
it_behaves_like 'unfoldable diff' do
subject do
- described_class.new(merge_request.merge_request_diff, page, per_page)
+ described_class.new(merge_request.merge_request_diff, page, per_page, diff_options)
end
end
@@ -100,7 +115,7 @@ RSpec.describe Gitlab::Diff::FileCollection::PaginatedMergeRequestDiff, feature_
let(:diffable) { merge_request.merge_request_diff }
subject do
- described_class.new(merge_request.merge_request_diff, page, per_page)
+ described_class.new(merge_request.merge_request_diff, page, per_page, diff_options)
end
end
end
diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb
index 2cd27472440..9e5d3ab0a20 100644
--- a/spec/lib/gitlab/diff/file_spec.rb
+++ b/spec/lib/gitlab/diff/file_spec.rb
@@ -820,7 +820,7 @@ RSpec.describe Gitlab::Diff::File do
end
context 'when neither blob exists' do
- let(:blank_diff_refs) { Gitlab::Diff::DiffRefs.new(base_sha: Gitlab::Git::BLANK_SHA, head_sha: Gitlab::Git::BLANK_SHA) }
+ let(:blank_diff_refs) { Gitlab::Diff::DiffRefs.new(base_sha: Gitlab::Git::SHA1_BLANK_SHA, head_sha: Gitlab::Git::SHA1_BLANK_SHA) }
let(:diff_file) { described_class.new(diff, diff_refs: blank_diff_refs, repository: project.repository) }
describe '#blob' do
diff --git a/spec/lib/gitlab/diff/highlight_spec.rb b/spec/lib/gitlab/diff/highlight_spec.rb
index e65f5a618a5..e9e65f64887 100644
--- a/spec/lib/gitlab/diff/highlight_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_spec.rb
@@ -25,119 +25,131 @@ RSpec.describe Gitlab::Diff::Highlight, feature_category: :source_code_managemen
end
describe '#highlight' do
- context "with a diff file" do
- let(:subject) { described_class.new(diff_file, repository: project.repository).highlight }
+ shared_examples_for 'diff highlighter' do
+ context "with a diff file" do
+ let(:subject) { described_class.new(diff_file, repository: project.repository).highlight }
- it 'returns Gitlab::Diff::Line elements' do
- expect(subject.first).to be_an_instance_of(Gitlab::Diff::Line)
- end
+ it 'returns Gitlab::Diff::Line elements' do
+ expect(subject.first).to be_an_instance_of(Gitlab::Diff::Line)
+ end
- it 'does not modify "match" lines' do
- expect(subject[0].text).to eq('@@ -6,12 +6,18 @@ module Popen')
- expect(subject[22].text).to eq('@@ -19,6 +25,7 @@ module Popen')
- end
+ it 'does not modify "match" lines' do
+ expect(subject[0].text).to eq('@@ -6,12 +6,18 @@ module Popen')
+ expect(subject[22].text).to eq('@@ -19,6 +25,7 @@ module Popen')
+ end
- it 'highlights and marks unchanged lines' do
- code = %{ <span id="LC7" class="line" lang="ruby"> <span class="k">def</span> <span class="nf">popen</span><span class="p">(</span><span class="n">cmd</span><span class="p">,</span> <span class="n">path</span><span class="o">=</span><span class="kp">nil</span><span class="p">)</span></span>\n}
+ it 'highlights and marks unchanged lines' do
+ code = %{ <span id="LC7" class="line" lang="ruby"> <span class="k">def</span> <span class="nf">popen</span><span class="p">(</span><span class="n">cmd</span><span class="p">,</span> <span class="n">path</span><span class="o">=</span><span class="kp">nil</span><span class="p">)</span></span>\n}
- expect(subject[2].rich_text).to eq(code)
- end
+ expect(subject[2].rich_text).to eq(code)
+ end
- it 'highlights and marks removed lines' do
- code = %(-<span id="LC9" class="line" lang="ruby"> <span class="k">raise</span> <span class="s2">"System commands must be given as an array of strings"</span></span>\n)
+ it 'highlights and marks removed lines' do
+ code = %(-<span id="LC9" class="line" lang="ruby"> <span class="k">raise</span> <span class="s2">"System commands must be given as an array of strings"</span></span>\n)
- expect(subject[4].rich_text).to eq(code)
- end
+ expect(subject[4].rich_text).to eq(code)
+ end
- it 'highlights and marks added lines' do
- code = %(+<span id="LC9" class="line" lang="ruby"> <span class="k">raise</span> <span class="no"><span class="idiff left addition">RuntimeError</span></span><span class="p"><span class="idiff addition">,</span></span><span class="idiff right addition"> </span><span class="s2">"System commands must be given as an array of strings"</span></span>\n)
+ it 'highlights and marks added lines' do
+ code = %(+<span id="LC9" class="line" lang="ruby"> <span class="k">raise</span> <span class="no"><span class="idiff left addition">RuntimeError</span></span><span class="p"><span class="idiff addition">,</span></span><span class="idiff right addition"> </span><span class="s2">"System commands must be given as an array of strings"</span></span>\n)
+
+ expect(subject[5].rich_text).to eq(code)
+ end
- expect(subject[5].rich_text).to eq(code)
+ context 'when no diff_refs' do
+ before do
+ allow(diff_file).to receive(:diff_refs).and_return(nil)
+ end
+
+ context 'when no inline diffs' do
+ it_behaves_like 'without inline diffs'
+ end
+ end
end
- context 'when no diff_refs' do
- before do
- allow(diff_file).to receive(:diff_refs).and_return(nil)
+ context "with diff lines" do
+ let(:subject) { described_class.new(diff_file.diff_lines, repository: project.repository).highlight }
+
+ it 'returns Gitlab::Diff::Line elements' do
+ expect(subject.first).to be_an_instance_of(Gitlab::Diff::Line)
end
- context 'when no inline diffs' do
- it_behaves_like 'without inline diffs'
+ it 'does not modify "match" lines' do
+ expect(subject[0].text).to eq('@@ -6,12 +6,18 @@ module Popen')
+ expect(subject[22].text).to eq('@@ -19,6 +25,7 @@ module Popen')
end
- end
- end
- context "with diff lines" do
- let(:subject) { described_class.new(diff_file.diff_lines, repository: project.repository).highlight }
+ it 'marks unchanged lines' do
+ code = %q{ def popen(cmd, path=nil)}
- it 'returns Gitlab::Diff::Line elements' do
- expect(subject.first).to be_an_instance_of(Gitlab::Diff::Line)
- end
+ expect(subject[2].text).to eq(code)
+ expect(subject[2].text).not_to be_html_safe
+ end
- it 'does not modify "match" lines' do
- expect(subject[0].text).to eq('@@ -6,12 +6,18 @@ module Popen')
- expect(subject[22].text).to eq('@@ -19,6 +25,7 @@ module Popen')
- end
+ it 'marks removed lines' do
+ code = %q(- raise "System commands must be given as an array of strings")
- it 'marks unchanged lines' do
- code = %q{ def popen(cmd, path=nil)}
+ expect(subject[4].text).to eq(code)
+ expect(subject[4].text).not_to be_html_safe
+ end
- expect(subject[2].text).to eq(code)
- expect(subject[2].text).not_to be_html_safe
- end
+ it 'marks added lines' do
+ code = %q(+ raise <span class="idiff left right addition">RuntimeError, </span>&quot;System commands must be given as an array of strings&quot;)
- it 'marks removed lines' do
- code = %q(- raise "System commands must be given as an array of strings")
+ expect(subject[5].rich_text).to eq(code)
+ expect(subject[5].rich_text).to be_html_safe
+ end
- expect(subject[4].text).to eq(code)
- expect(subject[4].text).not_to be_html_safe
- end
+ context 'when the inline diff marker has an invalid range' do
+ before do
+ allow_any_instance_of(Gitlab::Diff::InlineDiffMarker).to receive(:mark).and_raise(RangeError)
+ end
- it 'marks added lines' do
- code = %q(+ raise <span class="idiff left right addition">RuntimeError, </span>&quot;System commands must be given as an array of strings&quot;)
+ it 'keeps the original rich line' do
+ allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
- expect(subject[5].rich_text).to eq(code)
- expect(subject[5].rich_text).to be_html_safe
- end
+ code = %q(+ raise RuntimeError, "System commands must be given as an array of strings")
- context 'when the inline diff marker has an invalid range' do
- before do
- allow_any_instance_of(Gitlab::Diff::InlineDiffMarker).to receive(:mark).and_raise(RangeError)
- end
+ expect(subject[5].text).to eq(code)
+ expect(subject[5].text).not_to be_html_safe
+ end
- it 'keeps the original rich line' do
- allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+ it 'reports to Sentry if configured' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).and_call_original
- code = %q(+ raise RuntimeError, "System commands must be given as an array of strings")
+ expect { subject }.to raise_exception(RangeError)
+ end
+ end
- expect(subject[5].text).to eq(code)
- expect(subject[5].text).not_to be_html_safe
+ context 'when no inline diffs' do
+ it_behaves_like 'without inline diffs'
end
+ end
- it 'reports to Sentry if configured' do
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).and_call_original
+ context 'when blob is too large' do
+ let(:subject) { described_class.new(diff_file, repository: project.repository).highlight }
- expect { subject }.to raise_exception(RangeError)
+ before do
+ allow(Gitlab::Highlight).to receive(:too_large?).and_return(true)
end
- end
- context 'when no inline diffs' do
- it_behaves_like 'without inline diffs'
+ it 'blobs are highlighted as plain text without loading all data' do
+ expect(diff_file.blob).not_to receive(:load_all_data!)
+
+ expect(subject[2].rich_text).to eq(%{ <span id="LC7" class="line" lang=""> def popen(cmd, path=nil)</span>\n})
+ expect(subject[2].rich_text).to be_html_safe
+ end
end
end
- context 'when blob is too large' do
- let(:subject) { described_class.new(diff_file, repository: project.repository).highlight }
+ it_behaves_like 'diff highlighter'
+ context 'when diff_line_syntax_highlighting feature flag is disabled' do
before do
- allow(Gitlab::Highlight).to receive(:too_large?).and_return(true)
+ stub_feature_flags(diff_line_syntax_highlighting: false)
end
- it 'blobs are highlighted as plain text without loading all data' do
- expect(diff_file.blob).not_to receive(:load_all_data!)
-
- expect(subject[2].rich_text).to eq(%{ <span id="LC7" class="line" lang=""> def popen(cmd, path=nil)</span>\n})
- expect(subject[2].rich_text).to be_html_safe
- end
+ it_behaves_like 'diff highlighter'
end
end
end
diff --git a/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb b/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb
index ad92d90e253..4dd29e1fb15 100644
--- a/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb
+++ b/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb
@@ -2,7 +2,7 @@
require 'fast_spec_helper'
require 'rspec-parameterized'
-require 'set'
+require 'set' # rubocop:disable Lint/RedundantRequireStatement -- Ruby 3.1 and earlier needs this. Drop this line after Ruby 3.2+ is only supported.
MOCK_LINE = Struct.new(:text, :type, :index, :old_pos, :new_pos)
diff --git a/spec/lib/gitlab/error_tracking_spec.rb b/spec/lib/gitlab/error_tracking_spec.rb
index c9b2e21d934..7b2c5ca27cb 100644
--- a/spec/lib/gitlab/error_tracking_spec.rb
+++ b/spec/lib/gitlab/error_tracking_spec.rb
@@ -173,6 +173,20 @@ RSpec.describe Gitlab::ErrorTracking, feature_category: :shared do
)
end.to raise_error(RuntimeError)
end
+
+ it 'processes the exception even it is called within a `restrict_within_concurrent_ruby` block' do
+ expect(Gitlab::ErrorTracking::Logger).to receive(:error).with(logger_payload)
+
+ expect do
+ Gitlab::Utils.restrict_within_concurrent_ruby do
+ described_class.track_and_raise_exception(
+ exception,
+ issue_url: issue_url,
+ some_other_info: 'info'
+ )
+ end
+ end.to raise_error(RuntimeError, /boom/)
+ end
end
describe '.log_and_raise_exception' do
@@ -188,6 +202,16 @@ RSpec.describe Gitlab::ErrorTracking, feature_category: :shared do
expect { log_and_raise_exception }.to raise_error(RuntimeError)
end
+ it 'processes the exception even it is called within a `restrict_within_concurrent_ruby` block' do
+ expect(Gitlab::ErrorTracking::Logger).to receive(:error).with(logger_payload)
+
+ expect do
+ Gitlab::Utils.restrict_within_concurrent_ruby do
+ log_and_raise_exception
+ end
+ end.to raise_error(RuntimeError)
+ end
+
context 'when extra details are provided' do
let(:extra) { { test: 1, my_token: 'test' } }
@@ -230,6 +254,14 @@ RSpec.describe Gitlab::ErrorTracking, feature_category: :shared do
expect(Gitlab::ErrorTracking::Logger).to have_received(:error).with(logger_payload)
end
+ it 'processes the exception even it is called within a `restrict_within_concurrent_ruby` block' do
+ Gitlab::Utils.restrict_within_concurrent_ruby do
+ track_exception
+ end
+
+ expect(Gitlab::ErrorTracking::Logger).to have_received(:error).with(logger_payload)
+ end
+
context 'with tags' do
let(:tags) { { 'mytag' => 2 } }
diff --git a/spec/lib/gitlab/event_store/event_spec.rb b/spec/lib/gitlab/event_store/event_spec.rb
index edcb0e5dd1a..367b3334020 100644
--- a/spec/lib/gitlab/event_store/event_spec.rb
+++ b/spec/lib/gitlab/event_store/event_spec.rb
@@ -93,6 +93,13 @@ RSpec.describe Gitlab::EventStore::Event, feature_category: :shared do
expect(event_class.json_schema_valid).to eq(false)
end
+
+ it 'does not store JSON schema on subclass' do
+ expect { event }.to raise_error(Gitlab::EventStore::InvalidEvent)
+
+ expect(event_class.instance_variables).not_to include(:@json_schema)
+ expect(described_class.instance_variables).to include(:@json_schema)
+ end
end
end
end
diff --git a/spec/lib/gitlab/git/changed_path_spec.rb b/spec/lib/gitlab/git/changed_path_spec.rb
index ef51021ba4c..50cdddc9e8a 100644
--- a/spec/lib/gitlab/git/changed_path_spec.rb
+++ b/spec/lib/gitlab/git/changed_path_spec.rb
@@ -3,9 +3,12 @@
require 'fast_spec_helper'
RSpec.describe Gitlab::Git::ChangedPath do
- subject(:changed_path) { described_class.new(path: path, status: status) }
+ subject(:changed_path) { described_class.new(path: path, status: status, old_mode: old_mode, new_mode: new_mode) }
let(:path) { 'test_path' }
+ let(:status) { :MODIFIED }
+ let(:old_mode) { '100644' }
+ let(:new_mode) { '100644' }
describe '#new_file?' do
subject(:new_file?) { changed_path.new_file? }
@@ -19,11 +22,33 @@ RSpec.describe Gitlab::Git::ChangedPath do
end
context 'when it is not a new file' do
- let(:status) { :MODIFIED }
-
it 'returns false' do
expect(new_file?).to eq(false)
end
end
end
+
+ describe '#submodule_change?' do
+ subject(:submodule_change?) { changed_path.submodule_change? }
+
+ context 'with a regular file change' do
+ it { is_expected.to eq false }
+ end
+
+ context 'with a submodule addition' do
+ let(:status) { :ADDED }
+ let(:old_mode) { '0' }
+ let(:new_mode) { '160000' }
+
+ it { is_expected.to eq true }
+ end
+
+ context 'with a submodule deletion' do
+ let(:status) { :MODIFIED }
+ let(:old_mode) { '160000' }
+ let(:new_mode) { '0' }
+
+ it { is_expected.to eq true }
+ end
+ end
end
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index 6c8634281ae..a924137b8ec 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -522,7 +522,7 @@ RSpec.describe Gitlab::Git::Commit, feature_category: :source_code_management do
end
context 'when the commit cannot be found' do
- let(:commit_id) { Gitlab::Git::BLANK_SHA }
+ let(:commit_id) { Gitlab::Git::SHA1_BLANK_SHA }
it 'returns nil' do
expect(subject).to be_nil
diff --git a/spec/lib/gitlab/git/compare_spec.rb b/spec/lib/gitlab/git/compare_spec.rb
index 5ee5e18d5af..521d4588d84 100644
--- a/spec/lib/gitlab/git/compare_spec.rb
+++ b/spec/lib/gitlab/git/compare_spec.rb
@@ -116,22 +116,20 @@ RSpec.describe Gitlab::Git::Compare, feature_category: :source_code_management d
describe '#generated_files' do
subject(:generated_files) { compare.generated_files }
- context 'with a detected generated file' do
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:repository) { project.repository.raw }
- let_it_be(:branch) { 'generated-file-test' }
- let_it_be(:base) do
- project
- .repository
- .create_file(
- project.creator,
- '.gitattributes',
- "*.txt gitlab-generated\n",
- branch_name: branch,
- message: 'Add .gitattributes file')
- end
+ let(:project) do
+ create(:project, :custom_repo, files: {
+ '.gitattributes' => '*.txt gitlab-generated'
+ })
+ end
+
+ let(:repository) { project.repository.raw }
+ let(:branch) { 'generated-file-test' }
+ let(:base) { project.default_branch }
+ let(:head) { branch }
- let_it_be(:head) do
+ context 'with a detected generated file' do
+ before do
+ project.repository.create_branch(branch, project.default_branch)
project
.repository
.create_file(
@@ -150,7 +148,7 @@ RSpec.describe Gitlab::Git::Compare, feature_category: :source_code_management d
message: 'Add file2')
end
- it 'sets the diff as generated' do
+ it 'returns a set that incldues the generated file' do
expect(generated_files).to eq Set.new(['file1.txt'])
end
@@ -175,19 +173,16 @@ RSpec.describe Gitlab::Git::Compare, feature_category: :source_code_management d
end
end
- context 'with updated .gitattributes in the HEAD' do
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:repository) { project.repository.raw }
- let_it_be(:branch) { 'generated-file-test' }
- let_it_be(:head) do
+ context 'with deleted .gitattributes in the HEAD' do
+ before do
+ project.repository.create_branch(branch, project.default_branch)
project
.repository
- .create_file(
+ .delete_file(
project.creator,
'.gitattributes',
- "*.txt gitlab-generated\n",
branch_name: branch,
- message: 'Add .gitattributes file')
+ message: 'Delete .gitattributes file')
project
.repository
.create_file(
@@ -206,8 +201,8 @@ RSpec.describe Gitlab::Git::Compare, feature_category: :source_code_management d
message: 'Add file2')
end
- it 'does not set any files as generated' do
- expect(generated_files).to eq Set.new
+ it 'ignores the .gitattributes changes in the HEAD' do
+ expect(generated_files).to eq Set.new(['file1.txt'])
end
end
end
diff --git a/spec/lib/gitlab/git/push_spec.rb b/spec/lib/gitlab/git/push_spec.rb
index a0a4a22699b..f249e8316da 100644
--- a/spec/lib/gitlab/git/push_spec.rb
+++ b/spec/lib/gitlab/git/push_spec.rb
@@ -55,13 +55,13 @@ RSpec.describe Gitlab::Git::Push do
end
context 'when old revision is blank' do
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
it { is_expected.not_to be_branch_updated }
end
context 'when it is not a branch push' do
- let(:newrev) { Gitlab::Git::BLANK_SHA }
+ let(:newrev) { Gitlab::Git::SHA1_BLANK_SHA }
it { is_expected.not_to be_branch_updated }
end
@@ -105,7 +105,7 @@ RSpec.describe Gitlab::Git::Push do
end
context 'when old revision is not defined' do
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
it { is_expected.to be_branch_added }
end
@@ -117,7 +117,7 @@ RSpec.describe Gitlab::Git::Push do
end
context 'when new revision is not defined' do
- let(:newrev) { Gitlab::Git::BLANK_SHA }
+ let(:newrev) { Gitlab::Git::SHA1_BLANK_SHA }
it { is_expected.to be_branch_removed }
end
@@ -136,7 +136,7 @@ RSpec.describe Gitlab::Git::Push do
end
context 'when a push is not a branch update' do
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
it 'raises an error' do
expect { subject.modified_paths }.to raise_error(ArgumentError)
@@ -155,7 +155,7 @@ RSpec.describe Gitlab::Git::Push do
let(:oldrev) { nil }
it 'returns blank SHA' do
- expect(subject.oldrev).to eq Gitlab::Git::BLANK_SHA
+ expect(subject.oldrev).to eq Gitlab::Git::SHA1_BLANK_SHA
end
end
end
@@ -171,7 +171,7 @@ RSpec.describe Gitlab::Git::Push do
let(:newrev) { nil }
it 'returns blank SHA' do
- expect(subject.newrev).to eq Gitlab::Git::BLANK_SHA
+ expect(subject.newrev).to eq Gitlab::Git::SHA1_BLANK_SHA
end
end
end
diff --git a/spec/lib/gitlab/git/repository_cleaner_spec.rb b/spec/lib/gitlab/git/repository_cleaner_spec.rb
index 9f1bf9e48ee..a9ba87ad5b5 100644
--- a/spec/lib/gitlab/git/repository_cleaner_spec.rb
+++ b/spec/lib/gitlab/git/repository_cleaner_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Git::RepositoryCleaner do
let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
let(:head_sha) { repository.head_commit.id }
- let(:object_map_data) { "#{head_sha} #{Gitlab::Git::BLANK_SHA}" }
+ let(:object_map_data) { "#{head_sha} #{Gitlab::Git::SHA1_BLANK_SHA}" }
let(:clean_refs) { %W[refs/environments/1 refs/merge-requests/1 refs/keep-around/#{head_sha}] }
let(:keep_refs) { %w[refs/heads/_keep refs/tags/_keep] }
@@ -35,7 +35,7 @@ RSpec.describe Gitlab::Git::RepositoryCleaner do
Gitaly::ApplyBfgObjectMapStreamResponse::Entry.new(
type: :COMMIT,
old_oid: head_sha,
- new_oid: Gitlab::Git::BLANK_SHA
+ new_oid: Gitlab::Git::SHA1_BLANK_SHA
)
)
end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index cc07a16d362..7c6a54161ae 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -566,7 +566,7 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
[
{
old_sha: sha,
- new_sha: Gitlab::Git::BLANK_SHA,
+ new_sha: Gitlab::Git::SHA1_BLANK_SHA,
reference: tmp_ref
}
]
@@ -1155,7 +1155,7 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
end
context 'blank revisions' do
- let(:revisions) { [::Gitlab::Git::BLANK_SHA] }
+ let(:revisions) { [::Gitlab::Git::SHA1_BLANK_SHA] }
let(:expected_blobs) { 0 }
before do
@@ -1278,7 +1278,7 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
end
context 'with partially blank revisions' do
- let(:newrevs) { [nil, commit, Gitlab::Git::BLANK_SHA] }
+ let(:newrevs) { [nil, commit, Gitlab::Git::SHA1_BLANK_SHA] }
let(:expected_newrevs) { ['--not', '--all', '--not', commit] }
let(:expected_blobs) do
[
@@ -1326,7 +1326,7 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
end
context 'with a single zero newrev' do
- let(:newrevs) { Gitlab::Git::BLANK_SHA }
+ let(:newrevs) { Gitlab::Git::SHA1_BLANK_SHA }
it_behaves_like '#new_blobs without revisions'
end
@@ -1338,7 +1338,7 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
end
context 'with array containing only empty refs' do
- let(:newrevs) { [nil, Gitlab::Git::BLANK_SHA] }
+ let(:newrevs) { [nil, Gitlab::Git::SHA1_BLANK_SHA] }
it_behaves_like '#new_blobs without revisions'
end
@@ -1400,7 +1400,7 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
let(:changes) { repository.raw_changes_between(old_rev, new_rev) }
context 'initial commit' do
- let(:old_rev) { Gitlab::Git::BLANK_SHA }
+ let(:old_rev) { Gitlab::Git::SHA1_BLANK_SHA }
let(:new_rev) { '1a0b36b3cdad1d2ee32457c102a8c0b7056fa863' }
it 'returns the changes' do
@@ -1681,11 +1681,11 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
expect(collection.to_a).to be_empty
end
- it 'returns no Gitaly::DiffStats when there is a BLANK_SHA' do
+ it 'returns no Gitaly::DiffStats when there is a SHA1_BLANK_SHA' do
expect_any_instance_of(Gitlab::GitalyClient::CommitService)
.not_to receive(:diff_stats)
- collection = repository.diff_stats(Gitlab::Git::BLANK_SHA, 'master')
+ collection = repository.diff_stats(Gitlab::Git::SHA1_BLANK_SHA, 'master')
expect(collection).to be_a(Gitlab::Git::DiffStatsCollection)
expect(collection).to be_a(Enumerable)
@@ -1702,25 +1702,25 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
let_it_be(:diff_tree) { Gitlab::Git::DiffTree.from_commit(initial_commit) }
let(:commit_1_files) do
- [Gitlab::Git::ChangedPath.new(status: :ADDED, path: "files/executables/ls")]
+ [Gitlab::Git::ChangedPath.new(status: :ADDED, path: "files/executables/ls", old_mode: "0", new_mode: "100755")]
end
let(:commit_2_files) do
- [Gitlab::Git::ChangedPath.new(status: :ADDED, path: "bar/branch-test.txt")]
+ [Gitlab::Git::ChangedPath.new(status: :ADDED, path: "bar/branch-test.txt", old_mode: "0", new_mode: "100644")]
end
let(:commit_3_files) do
[
- Gitlab::Git::ChangedPath.new(status: :MODIFIED, path: ".gitmodules"),
- Gitlab::Git::ChangedPath.new(status: :ADDED, path: "gitlab-shell")
+ Gitlab::Git::ChangedPath.new(status: :MODIFIED, path: ".gitmodules", old_mode: "100644", new_mode: "100644"),
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: "gitlab-shell", old_mode: "0", new_mode: "160000")
]
end
let(:diff_tree_files) do
[
- Gitlab::Git::ChangedPath.new(status: :ADDED, path: ".gitignore"),
- Gitlab::Git::ChangedPath.new(status: :ADDED, path: "LICENSE"),
- Gitlab::Git::ChangedPath.new(status: :ADDED, path: "README.md")
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: ".gitignore", old_mode: "0", new_mode: "100644"),
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: "LICENSE", old_mode: "0", new_mode: "100644"),
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: "README.md", old_mode: "0", new_mode: "100644")
]
end
@@ -2222,7 +2222,7 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
end
it 'returns empty for unknown ID' do
- expect(repository.refs_by_oid(oid: Gitlab::Git::BLANK_SHA, limit: 0)).to eq([])
+ expect(repository.refs_by_oid(oid: Gitlab::Git::SHA1_BLANK_SHA, limit: 0)).to eq([])
end
it 'returns empty for an empty repo' do
@@ -2822,11 +2822,37 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
})
end
+ let(:gitattr_content) { "" }
+
let(:repository) { project.repository.raw }
- let(:rev) { 'master' }
- let(:paths) { ['file1.txt', 'file2.txt'] }
+ let(:base) { project.default_branch }
+ let(:branch) { 'detect-generated-files-test' }
+ let(:head) { branch }
+ let(:paths) do
+ [
+ Gitlab::Git::ChangedPath.new(status: :MODIFIED, path: 'file1.txt', old_mode: '100644', new_mode: '100644'),
+ Gitlab::Git::ChangedPath.new(status: :DELETED, path: 'file2.txt', old_mode: '100644', new_mode: '8')
+ ]
+ end
+
+ before do
+ project.repository.create_branch(branch, project.default_branch)
- subject(:generated_files) { repository.detect_generated_files(rev, paths) }
+ project.repository.update_file(
+ project.creator,
+ 'file1.txt',
+ 'updated first file',
+ message: 'Update file',
+ branch_name: branch)
+
+ project.repository.delete_file(
+ project.creator,
+ 'file2.txt',
+ message: 'Delete file',
+ branch_name: branch)
+ end
+
+ subject(:generated_files) { repository.detect_generated_files(base, head, paths) }
context 'when the linguist-generated attribute is used' do
let(:gitattr_content) { "*.txt text\nfile1.txt linguist-generated\n" }
@@ -2852,11 +2878,99 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
end
end
- context 'when the all files are generated' do
+ context 'when the gitlab-generated attribute is used to unset' do
+ let(:gitattr_content) { "file1.txt -gitlab-generated\n" }
+
+ it 'returns an empty set' do
+ expect(generated_files).to eq Set.new
+ end
+ end
+
+ context 'with an automatically detected file' do
+ before do
+ project.repository.create_file(
+ project.creator,
+ 'package-lock.json',
+ 'generated file content',
+ message: 'Add generated file',
+ branch_name: branch)
+ end
+
+ let(:paths) do
+ [
+ Gitlab::Git::ChangedPath.new(status: :MODIFIED, path: 'file1.txt', old_mode: '100644', new_mode: '100644'),
+ Gitlab::Git::ChangedPath.new(status: :DELETED, path: 'file2.txt', old_mode: '100644', new_mode: '100644'),
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: 'package-lock.json', old_mode: '0', new_mode: '100644')
+ ]
+ end
+
+ context 'when the manual override is used on non-detectable file' do
+ let(:gitattr_content) { "file1.txt gitlab-generated\n" }
+
+ it 'returns both manually overriden file and the detected file' do
+ expect(generated_files).to contain_exactly('file1.txt', 'package-lock.json')
+ end
+ end
+
+ context 'when the manual override is used on the detectable file' do
+ let(:gitattr_content) { "package-lock.json gitlab-generated\n" }
+
+ it 'returns the overriden file' do
+ expect(generated_files).to contain_exactly('package-lock.json')
+ end
+ end
+
+ context 'when the manual override is used on the detectable file to unset' do
+ let(:gitattr_content) { "package-lock.json -gitlab-generated\n" }
+
+ it 'returns an empty set' do
+ expect(generated_files).to eq Set.new
+ end
+ end
+
+ shared_examples 'an invalid request' do
+ it 'returns an empty set' do
+ expect(generated_files).to eq Set.new
+ end
+
+ it 'reports the exception' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(
+ instance_of(Gitlab::Git::CommandError),
+ gl_project_path: repository.gl_project_path,
+ base: base,
+ head: head,
+ paths: paths.map(&:path)
+ )
+
+ generated_files
+ end
+ end
+
+ context 'when an unknown revision is given' do
+ let(:head) { 'unknownrevision' }
+
+ it_behaves_like 'an invalid request'
+ end
+
+ context 'when an unknown path is given' do
+ let(:paths) do
+ [
+ Gitlab::Git::ChangedPath.new(status: :MODIFIED, path: 'file1.txt', old_mode: '100644', new_mode: '100644'),
+ Gitlab::Git::ChangedPath.new(status: :MODIFIED, path: 'unknownpath', old_mode: '100644', new_mode: '100644')
+ ]
+ end
+
+ it_behaves_like 'an invalid request'
+ end
+ end
+
+ context 'when all files are marked as generated' do
let(:gitattr_content) { "*.txt gitlab-generated\n" }
it 'returns all generated files' do
- expect(generated_files).to eq paths.to_set
+ expect(generated_files).to eq paths.map(&:path).to_set
end
end
diff --git a/spec/lib/gitlab/git/tag_spec.rb b/spec/lib/gitlab/git/tag_spec.rb
index a15c74a058d..7f1887f519b 100644
--- a/spec/lib/gitlab/git/tag_spec.rb
+++ b/spec/lib/gitlab/git/tag_spec.rb
@@ -91,7 +91,7 @@ RSpec.describe Gitlab::Git::Tag, feature_category: :source_code_management do
end
context 'when the tag cannot be found' do
- let(:tag_id) { Gitlab::Git::BLANK_SHA }
+ let(:tag_id) { Gitlab::Git::SHA1_BLANK_SHA }
it 'raises GRPC::Internal' do
expect { subject }.to raise_error(GRPC::Internal)
diff --git a/spec/lib/gitlab/git/tree_spec.rb b/spec/lib/gitlab/git/tree_spec.rb
index 090f9af2620..468df96b356 100644
--- a/spec/lib/gitlab/git/tree_spec.rb
+++ b/spec/lib/gitlab/git/tree_spec.rb
@@ -9,7 +9,17 @@ RSpec.describe Gitlab::Git::Tree, feature_category: :source_code_management do
let_it_be(:repository) { project.repository.raw }
shared_examples 'repo' do
- subject(:tree) { Gitlab::Git::Tree.where(repository, sha, path, recursive, skip_flat_paths, rescue_not_found, pagination_params) }
+ subject(:tree) do
+ Gitlab::Git::Tree.tree_entries(
+ repository: repository,
+ sha: sha,
+ path: path,
+ recursive: recursive,
+ skip_flat_paths: skip_flat_paths,
+ rescue_not_found: rescue_not_found,
+ pagination_params: pagination_params
+ )
+ end
let(:sha) { SeedRepo::Commit::ID }
let(:path) { nil }
diff --git a/spec/lib/gitlab/git_access_project_spec.rb b/spec/lib/gitlab/git_access_project_spec.rb
index ad593cbf005..abcc5c5863e 100644
--- a/spec/lib/gitlab/git_access_project_spec.rb
+++ b/spec/lib/gitlab/git_access_project_spec.rb
@@ -141,7 +141,9 @@ RSpec.describe Gitlab::GitAccessProject do
end
context 'when check contains actual changes' do
- let(:changes) { "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/new_branch" }
+ let(:changes) do
+ "#{Gitlab::Git::SHA1_BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/new_branch"
+ end
it_behaves_like 'no project is created'
end
diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb
index 975e8bdd3ac..82daaba6448 100644
--- a/spec/lib/gitlab/git_access_spec.rb
+++ b/spec/lib/gitlab/git_access_spec.rb
@@ -802,13 +802,13 @@ RSpec.describe Gitlab::GitAccess, :aggregate_failures, feature_category: :system
let(:changes) do
{ any: Gitlab::GitAccess::ANY,
- push_new_branch: "#{Gitlab::Git::BLANK_SHA} 570e7b2ab refs/heads/wow",
+ push_new_branch: "#{Gitlab::Git::SHA1_BLANK_SHA} 570e7b2ab refs/heads/wow",
push_master: '6f6d7e7ed 570e7b2ab refs/heads/master',
push_protected_branch: '6f6d7e7ed 570e7b2ab refs/heads/feature',
- push_remove_protected_branch: "570e7b2ab #{Gitlab::Git::BLANK_SHA} "\
+ push_remove_protected_branch: "570e7b2ab #{Gitlab::Git::SHA1_BLANK_SHA} "\
'refs/heads/feature',
push_tag: '6f6d7e7ed 570e7b2ab refs/tags/v1.0.0',
- push_new_tag: "#{Gitlab::Git::BLANK_SHA} 570e7b2ab refs/tags/v7.8.9",
+ push_new_tag: "#{Gitlab::Git::SHA1_BLANK_SHA} 570e7b2ab refs/tags/v7.8.9",
push_all: ['6f6d7e7ed 570e7b2ab refs/heads/master', '6f6d7e7ed 570e7b2ab refs/heads/feature'],
merge_into_protected_branch: "0b4bc9a #{merge_into_protected_branch} refs/heads/feature" }
end
@@ -1018,7 +1018,7 @@ RSpec.describe Gitlab::GitAccess, :aggregate_failures, feature_category: :system
context 'when pushing to a project' do
let(:project) { create(:project, :public, :repository) }
- let(:changes) { "#{Gitlab::Git::BLANK_SHA} 570e7b2ab refs/heads/wow" }
+ let(:changes) { "#{Gitlab::Git::SHA1_BLANK_SHA} 570e7b2ab refs/heads/wow" }
before do
project.add_developer(user)
@@ -1062,14 +1062,14 @@ RSpec.describe Gitlab::GitAccess, :aggregate_failures, feature_category: :system
# additional queries.
access.check('git-receive-pack', changes)
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
access.check('git-receive-pack', changes)
end
changes = ['6f6d7e7ed 570e7b2ab refs/heads/master', '6f6d7e7ed 570e7b2ab refs/heads/feature']
# There is still an N+1 query with protected branches
- expect { access.check('git-receive-pack', changes) }.not_to exceed_query_limit(control_count).with_threshold(2)
+ expect { access.check('git-receive-pack', changes) }.not_to exceed_query_limit(control).with_threshold(2)
end
it 'raises TimeoutError when #check_access! raises a timeout error' do
diff --git a/spec/lib/gitlab/git_spec.rb b/spec/lib/gitlab/git_spec.rb
index 0f6ef55b4b1..61f66c9cd0c 100644
--- a/spec/lib/gitlab/git_spec.rb
+++ b/spec/lib/gitlab/git_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe Gitlab::Git do
'zzz25dc642cb6eb9a060e54bf8d69288fbee4904' | false
'4b825dc642cb6eb9a060e54bf8d69288fbee4904' | true
- Gitlab::Git::BLANK_SHA | true
+ Gitlab::Git::SHA1_BLANK_SHA | true
end
with_them do
diff --git a/spec/lib/gitlab/gitaly_client/analysis_service_spec.rb b/spec/lib/gitlab/gitaly_client/analysis_service_spec.rb
new file mode 100644
index 00000000000..c57d9b9592c
--- /dev/null
+++ b/spec/lib/gitlab/gitaly_client/analysis_service_spec.rb
@@ -0,0 +1,137 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GitalyClient::AnalysisService, feature_category: :gitaly do
+ let_it_be(:project) do
+ create(:project, :repository)
+ end
+
+ let(:repository) { project.repository.raw }
+ let(:base) { project.default_branch }
+ let(:head) { branch }
+ let(:branch) { 'test-check-blobs-generated' }
+
+ let(:client) { described_class.new(repository) }
+
+ describe '#check_blobs_generated' do
+ subject(:check_blobs_generated) { client.check_blobs_generated(base, head, changed_paths) }
+
+ before do
+ project.repository.create_branch(branch, project.default_branch)
+
+ project.repository.create_file(
+ project.creator,
+ 'file1.txt',
+ 'new file content',
+ message: 'Add new file',
+ branch_name: branch)
+
+ project.repository.create_file(
+ project.creator,
+ 'package-lock.json',
+ 'new file content',
+ message: 'Add new file',
+ branch_name: branch)
+
+ project.repository.delete_file(
+ project.creator,
+ 'README',
+ message: 'Delete README',
+ branch_name: branch)
+ end
+
+ context 'when valid changed_paths are given' do
+ let(:changed_paths) do
+ [
+ Gitlab::Git::ChangedPath.new(status: :DELETED, path: 'README', old_mode: '100644', new_mode: '0'),
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: 'file1.txt', old_mode: '0', new_mode: '100644'),
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: 'package-lock.json', old_mode: '0', new_mode: '100644')
+ ]
+ end
+
+ it 'returns an expected array' do
+ expect(check_blobs_generated).to contain_exactly(
+ { generated: false, path: 'README' },
+ { generated: false, path: 'file1.txt' },
+ { generated: true, path: 'package-lock.json' }
+ )
+ end
+
+ context 'when changed_paths includes a submodule' do
+ let(:changed_paths) do
+ [
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: 'package-lock.json', old_mode: '0', new_mode: '100644'),
+ Gitlab::Git::ChangedPath.new(status: :DELETED, path: 'gitlab-shell', old_mode: '160000', new_mode: '0')
+ ]
+ end
+
+ it 'returns an array wihout the submodule change' do
+ expect(check_blobs_generated).to contain_exactly(
+ { generated: true, path: 'package-lock.json' }
+ )
+ end
+ end
+
+ context 'when changed_paths only has a submodule' do
+ let(:changed_paths) do
+ [
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: 'gitlab-shell', old_mode: '0', new_mode: '160000')
+ ]
+ end
+
+ it 'returns an empty array' do
+ expect(check_blobs_generated).to eq([])
+ end
+ end
+ end
+
+ context 'when changed_paths includes a path with :' do
+ before do
+ project.repository.create_file(
+ project.creator,
+ 'abc:def',
+ 'new file content',
+ message: 'Add new file',
+ branch_name: branch)
+ end
+
+ let(:changed_paths) do
+ [
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: 'abc:def', old_mode: '0', new_mode: '100644')
+ ]
+ end
+
+ it 'returns an expected array' do
+ expect(check_blobs_generated).to contain_exactly(
+ { generated: false, path: 'abc:def' }
+ )
+ end
+ end
+
+ context 'when an unknown revision is given' do
+ let(:head) { 'unknownrevision' }
+ let(:changed_paths) do
+ [
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: 'file1.txt', old_mode: '0', new_mode: '100644')
+ ]
+ end
+
+ it 'raises an error' do
+ expect { check_blobs_generated }.to raise_error(GRPC::Internal)
+ end
+ end
+
+ context 'when an unknown path is given' do
+ let(:changed_paths) do
+ [
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: 'unknownpath', old_mode: '0', new_mode: '100644')
+ ]
+ end
+
+ it 'raises an error' do
+ expect { check_blobs_generated }.to raise_error(GRPC::Internal)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index 02c7abadd99..9b924440989 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -203,15 +203,15 @@ RSpec.describe Gitlab::GitalyClient::CommitService, feature_category: :gitaly do
shared_examples 'includes paths different in any parent' do
let(:changed_paths) do
[
- { path: 'files/locked/foo.lfs', status: 'ADDED' },
- { path: 'files/locked/foo.lfs', status: 'MODIFIED' },
- { path: 'files/locked/bar.lfs', status: 'ADDED' },
- { path: 'files/locked/foo.lfs', status: 'MODIFIED' },
- { path: 'files/locked/bar.lfs', status: 'ADDED' },
- { path: 'files/locked/bar.lfs', status: 'MODIFIED' },
- { path: 'files/locked/bar.lfs', status: 'MODIFIED' },
- { path: 'files/locked/baz.lfs', status: 'ADDED' },
- { path: 'files/locked/baz.lfs', status: 'ADDED' }
+ { path: 'files/locked/foo.lfs', status: 'ADDED', old_mode: '0', new_mode: '100644' },
+ { path: 'files/locked/foo.lfs', status: 'MODIFIED', old_mode: '100644', new_mode: '100644' },
+ { path: 'files/locked/bar.lfs', status: 'ADDED', old_mode: '0', new_mode: '100644' },
+ { path: 'files/locked/foo.lfs', status: 'MODIFIED', old_mode: '100644', new_mode: '100644' },
+ { path: 'files/locked/bar.lfs', status: 'ADDED', old_mode: '0', new_mode: '100644' },
+ { path: 'files/locked/bar.lfs', status: 'MODIFIED', old_mode: '100644', new_mode: '100644' },
+ { path: 'files/locked/bar.lfs', status: 'MODIFIED', old_mode: '100644', new_mode: '100644' },
+ { path: 'files/locked/baz.lfs', status: 'ADDED', old_mode: '0', new_mode: '100644' },
+ { path: 'files/locked/baz.lfs', status: 'ADDED', old_mode: '0', new_mode: '100644' }
].as_json
end
@@ -223,12 +223,12 @@ RSpec.describe Gitlab::GitalyClient::CommitService, feature_category: :gitaly do
shared_examples 'includes paths different in all parents' do
let(:changed_paths) do
[
- { path: 'files/locked/foo.lfs', status: 'ADDED' },
- { path: 'files/locked/foo.lfs', status: 'MODIFIED' },
- { path: 'files/locked/bar.lfs', status: 'ADDED' },
- { path: 'files/locked/bar.lfs', status: 'MODIFIED' },
- { path: 'files/locked/baz.lfs', status: 'ADDED' },
- { path: 'files/locked/baz.lfs', status: 'ADDED' }
+ { path: 'files/locked/foo.lfs', status: 'ADDED', old_mode: '0', new_mode: '100644' },
+ { path: 'files/locked/foo.lfs', status: 'MODIFIED', old_mode: '100644', new_mode: '100644' },
+ { path: 'files/locked/bar.lfs', status: 'ADDED', old_mode: '0', new_mode: '100644' },
+ { path: 'files/locked/bar.lfs', status: 'MODIFIED', old_mode: '100644', new_mode: '100644' },
+ { path: 'files/locked/baz.lfs', status: 'ADDED', old_mode: '0', new_mode: '100644' },
+ { path: 'files/locked/baz.lfs', status: 'ADDED', old_mode: '0', new_mode: '100644' }
].as_json
end
diff --git a/spec/lib/gitlab/github_gists_import/importer/gists_importer_spec.rb b/spec/lib/gitlab/github_gists_import/importer/gists_importer_spec.rb
index d555a847ea5..b6a57ef8b57 100644
--- a/spec/lib/gitlab/github_gists_import/importer/gists_importer_spec.rb
+++ b/spec/lib/gitlab/github_gists_import/importer/gists_importer_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::GithubGistsImport::Importer::GistsImporter, feature_categ
let_it_be(:user) { create(:user) }
let(:client) { instance_double('Gitlab::GithubImport::Client', rate_limit_resets_in: 5) }
let(:token) { 'token' }
- let(:page_counter) { instance_double('Gitlab::GithubImport::PageCounter', current: 1, set: true, expire!: true) }
+ let(:page_counter) { instance_double('Gitlab::Import::PageCounter', current: 1, set: true, expire!: true) }
let(:page) { instance_double('Gitlab::GithubImport::Client::Page', objects: [gist], number: 1) }
let(:url) { 'https://gist.github.com/foo/bar.git' }
let(:waiter) { Gitlab::JobWaiter.new(0, 'some-job-key') }
@@ -62,7 +62,7 @@ RSpec.describe Gitlab::GithubGistsImport::Importer::GistsImporter, feature_categ
.with(token, parallel: true)
.and_return(client)
- allow(Gitlab::GithubImport::PageCounter)
+ allow(Gitlab::Import::PageCounter)
.to receive(:new)
.with(user, :gists, 'github-gists-importer')
.and_return(page_counter)
diff --git a/spec/lib/gitlab/github_import/attachments_downloader_spec.rb b/spec/lib/gitlab/github_import/attachments_downloader_spec.rb
index 65c5a7daeb2..c7dd2a9538c 100644
--- a/spec/lib/gitlab/github_import/attachments_downloader_spec.rb
+++ b/spec/lib/gitlab/github_import/attachments_downloader_spec.rb
@@ -8,27 +8,12 @@ RSpec.describe Gitlab::GithubImport::AttachmentsDownloader, feature_category: :i
let_it_be(:file_url) { 'https://example.com/avatar.png' }
let_it_be(:content_type) { 'application/octet-stream' }
- let(:content_length) { 1000 }
let(:chunk_double) { instance_double(HTTParty::ResponseFragment, code: 200) }
- let(:headers_double) do
- instance_double(
- HTTParty::Response,
- code: 200,
- success?: true,
- parsed_response: {},
- headers: {
- 'content-length' => content_length,
- 'content-type' => content_type
- }
- )
- end
describe '#perform' do
before do
allow(Gitlab::HTTP).to receive(:perform_request)
.with(Net::HTTP::Get, file_url, stream_body: true).and_yield(chunk_double)
- allow(Gitlab::HTTP).to receive(:perform_request)
- .with(Net::HTTP::Head, file_url, {}).and_return(headers_double)
end
context 'when file valid' do
@@ -71,12 +56,12 @@ RSpec.describe Gitlab::GithubImport::AttachmentsDownloader, feature_category: :i
end
context 'when file size exceeds limit' do
- let(:content_length) { 26.megabytes }
+ subject(:downloader) { described_class.new(file_url, file_size_limit: 1.byte) }
it 'raises expected exception' do
expect { downloader.perform }.to raise_exception(
Gitlab::GithubImport::AttachmentsDownloader::DownloadError,
- 'File size 26 MiB exceeds limit of 25 MiB'
+ 'File size 57 B exceeds limit of 1 B'
)
end
end
@@ -94,6 +79,33 @@ RSpec.describe Gitlab::GithubImport::AttachmentsDownloader, feature_category: :i
end
end
+ context 'when chunk download returns a redirect' do
+ let(:chunk_double) { instance_double(HTTParty::ResponseFragment, code: 302, http_response: {}) }
+
+ it 'skips the redirect and continues' do
+ allow(Gitlab::HTTP).to receive(:perform_request)
+ .with(Net::HTTP::Get, file_url, stream_body: true).and_yield(chunk_double)
+
+ file = downloader.perform
+
+ expect(File.exist?(file.path)).to eq(true)
+ end
+ end
+
+ context 'when chunk download returns an error' do
+ let(:chunk_double) { instance_double(HTTParty::ResponseFragment, code: 500, http_response: {}) }
+
+ it 'raises expected exception' do
+ allow(Gitlab::HTTP).to receive(:perform_request)
+ .with(Net::HTTP::Get, file_url, stream_body: true).and_yield(chunk_double)
+
+ expect { downloader.perform }.to raise_exception(
+ Gitlab::GithubImport::AttachmentsDownloader::DownloadError,
+ "Error downloading file from #{file_url}. Error code: #{chunk_double.code}"
+ )
+ end
+ end
+
context 'when attachment is behind a github asset endpoint' do
let(:file_url) { "https://github.com/test/project/assets/142635249/4b9f9c90-f060-4845-97cf-b24c558bcb11" }
let(:redirect_url) { "https://github-production-user-asset-6210df.s3.amazonaws.com/142635249/740edb05293e.jpg" }
diff --git a/spec/lib/gitlab/github_import/events_cache_spec.rb b/spec/lib/gitlab/github_import/events_cache_spec.rb
new file mode 100644
index 00000000000..8637f236977
--- /dev/null
+++ b/spec/lib/gitlab/github_import/events_cache_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::EventsCache, :clean_gitlab_redis_cache, feature_category: :importers do
+ let(:project) { build_stubbed(:project, id: 1) }
+ let(:issue) { build_stubbed(:issue, iid: 2) }
+
+ let(:event_cache) { described_class.new(project) }
+
+ def build_event(event)
+ Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(event)
+ end
+
+ describe '#add' do
+ it 'adds event to cache' do
+ expect(Gitlab::Cache::Import::Caching).to receive(:list_add).with(
+ 'github-importer/events/1/Issue/2',
+ an_instance_of(String),
+ limit: described_class::MAX_NUMBER_OF_EVENTS
+ )
+
+ event_cache.add(issue, build_event({ event: 'closed' }))
+ end
+
+ context 'when events is too large to cache' do
+ before do
+ stub_const("#{described_class}::MAX_EVENT_SIZE", 1.byte)
+ end
+
+ it 'does not add event to cache' do
+ expect(Gitlab::Cache::Import::Caching).not_to receive(:list_add)
+ expect(Gitlab::GithubImport::Logger).to receive(:warn).with(
+ message: 'Event too large to cache',
+ project_id: project.id,
+ github_identifiers: {
+ event: 'closed',
+ id: '99',
+ issuable_iid: '2'
+ }
+ )
+
+ event_cache.add(issue, build_event({ event: 'closed', id: '99', issue: { number: '2' } }))
+ end
+ end
+ end
+
+ describe '#events' do
+ it 'retrieves the list of events from the cache in the correct order' do
+ key = 'github-importer/events/1/Issue/2'
+
+ Gitlab::Cache::Import::Caching.list_add(key, { event: 'merged', created_at: '2023-01-02T00:00:00Z' }.to_json)
+ Gitlab::Cache::Import::Caching.list_add(key, { event: 'closed', created_at: '2023-01-03T00:00:00Z' }.to_json)
+ Gitlab::Cache::Import::Caching.list_add(key, { event: 'commented', created_at: '2023-01-01T00:00:00Z' }.to_json)
+
+ events = event_cache.events(issue).map(&:to_hash)
+
+ expect(events).to match([
+ a_hash_including(event: 'commented', created_at: '2023-01-01 00:00:00 UTC'),
+ a_hash_including(event: 'merged', created_at: '2023-01-02 00:00:00 UTC'),
+ a_hash_including(event: 'closed', created_at: '2023-01-03 00:00:00 UTC')
+ ])
+ end
+
+ context 'when no event was added' do
+ it 'returns an empty array' do
+ expect(event_cache.events(issue)).to eq([])
+ end
+ end
+ end
+
+ describe '#delete' do
+ it 'deletes the list' do
+ expect(Gitlab::Cache::Import::Caching).to receive(:del).with('github-importer/events/1/Issue/2')
+
+ event_cache.delete(issue)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/attachments/base_importer_spec.rb b/spec/lib/gitlab/github_import/importer/attachments/base_importer_spec.rb
index 5e60be44621..bc1b32661b8 100644
--- a/spec/lib/gitlab/github_import/importer/attachments/base_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/attachments/base_importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::Importer::Attachments::BaseImporter do
+RSpec.describe Gitlab::GithubImport::Importer::Attachments::BaseImporter, feature_category: :importers do
subject(:importer) { importer_class.new(project, client) }
let(:project) { instance_double(Project, id: 1) }
diff --git a/spec/lib/gitlab/github_import/importer/attachments/issues_importer_spec.rb b/spec/lib/gitlab/github_import/importer/attachments/issues_importer_spec.rb
index b44f1ec85f3..20152020897 100644
--- a/spec/lib/gitlab/github_import/importer/attachments/issues_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/attachments/issues_importer_spec.rb
@@ -10,39 +10,68 @@ RSpec.describe Gitlab::GithubImport::Importer::Attachments::IssuesImporter, feat
let(:client) { instance_double(Gitlab::GithubImport::Client) }
describe '#sequential_import', :clean_gitlab_redis_cache do
- let_it_be(:issue_1) { create(:issue, project: project) }
- let_it_be(:issue_2) { create(:issue, project: project) }
+ let_it_be(:issue) { create(:issue, project: project) }
- let(:importer_stub) { instance_double('Gitlab::GithubImport::Importer::NoteAttachmentsImporter') }
- let(:importer_attrs) { [instance_of(Gitlab::GithubImport::Representation::NoteText), project, client] }
+ let_it_be(:issue_with_attachment) do
+ create(:issue,
+ project: project,
+ description: "![image](https://user-images.githubusercontent.com/1/uuid-1.png)"
+ )
+ end
+
+ it 'selects both issues, and selects only properties it needs' do
+ stubbed_collection = class_double(Issue, each_batch: [])
+
+ expect(project.issues).to receive(:id_not_in).with([]).and_return(stubbed_collection)
+ expect(stubbed_collection).to receive(:select).with(:id, :description, :iid).and_return(stubbed_collection)
- it 'imports each project issue attachments' do
- expect(project.issues).to receive(:id_not_in).with([]).and_return(project.issues)
- expect(project.issues).to receive(:select).with(:id, :description, :iid).and_call_original
+ importer.sequential_import
+ end
- expect_next_instances_of(
- Gitlab::GithubImport::Importer::NoteAttachmentsImporter, 2, false, *importer_attrs
- ) do |note_attachments_importer|
- expect(note_attachments_importer).to receive(:execute)
+ it 'executes importer only for the issue with an attachment' do
+ expect_next_instance_of(
+ Gitlab::GithubImport::Importer::NoteAttachmentsImporter,
+ have_attributes(record_db_id: issue_with_attachment.id),
+ project,
+ client
+ ) do |importer|
+ expect(importer).to receive(:execute)
end
importer.sequential_import
end
- context 'when issue is already processed' do
- it "doesn't import this issue attachments" do
- importer.mark_as_imported(issue_1)
+ context 'when flag is disabled' do
+ before do
+ stub_feature_flags(github_importer_attachments: false)
+ end
- expect(project.issues).to receive(:id_not_in).with([issue_1.id.to_s]).and_call_original
- expect_next_instance_of(
- Gitlab::GithubImport::Importer::NoteAttachmentsImporter, *importer_attrs
- ) do |note_attachments_importer|
- expect(note_attachments_importer).to receive(:execute)
+ it 'executes importer for both issues' do
+ expect_next_instances_of(Gitlab::GithubImport::Importer::NoteAttachmentsImporter, 2) do |importer|
+ expect(importer).to receive(:execute)
end
importer.sequential_import
end
end
+
+ context 'when issue has already been processed' do
+ before do
+ importer.mark_as_imported(issue_with_attachment)
+ end
+
+ it 'does not select issues that were processed' do
+ expect(project.issues).to receive(:id_not_in).with([issue_with_attachment.id.to_s]).and_call_original
+
+ importer.sequential_import
+ end
+
+ it 'does not execute importer for the issue with an attachment' do
+ expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).not_to receive(:new)
+
+ importer.sequential_import
+ end
+ end
end
describe '#sidekiq_worker_class' do
diff --git a/spec/lib/gitlab/github_import/importer/attachments/merge_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/attachments/merge_requests_importer_spec.rb
index 381cb17bb52..5ed6dce8507 100644
--- a/spec/lib/gitlab/github_import/importer/attachments/merge_requests_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/attachments/merge_requests_importer_spec.rb
@@ -10,39 +10,69 @@ RSpec.describe Gitlab::GithubImport::Importer::Attachments::MergeRequestsImporte
let(:client) { instance_double(Gitlab::GithubImport::Client) }
describe '#sequential_import', :clean_gitlab_redis_cache do
- let_it_be(:merge_request_1) { create(:merge_request, source_project: project, target_branch: 'feature1') }
- let_it_be(:merge_request_2) { create(:merge_request, source_project: project, target_branch: 'feature2') }
+ let_it_be(:mr) { create(:merge_request, source_project: project, target_branch: 'feature1') }
+
+ let_it_be(:mr_with_attachment) do
+ create(:merge_request,
+ source_project: project,
+ target_branch: 'feature2',
+ description: "![image](https://user-images.githubusercontent.com/1/uuid-1.png)"
+ )
+ end
+
+ it 'selects both merge requests, and selects only properties it needs' do
+ stubbed_collection = class_double(MergeRequest, each_batch: [])
- let(:importer_stub) { instance_double('Gitlab::GithubImport::Importer::NoteAttachmentsImporter') }
- let(:importer_attrs) { [instance_of(Gitlab::GithubImport::Representation::NoteText), project, client] }
+ expect(project.merge_requests).to receive(:id_not_in).with([]).and_return(stubbed_collection)
+ expect(stubbed_collection).to receive(:select).with(:id, :description, :iid).and_return(stubbed_collection)
- it 'imports each project merge request attachments' do
- expect(project.merge_requests).to receive(:id_not_in).with([]).and_return(project.merge_requests)
- expect(project.merge_requests).to receive(:select).with(:id, :description, :iid).and_call_original
+ importer.sequential_import
+ end
- expect_next_instances_of(
- Gitlab::GithubImport::Importer::NoteAttachmentsImporter, 2, false, *importer_attrs
- ) do |note_attachments_importer|
- expect(note_attachments_importer).to receive(:execute)
+ it 'executes importer only for the merge request with an attachment' do
+ expect_next_instance_of(
+ Gitlab::GithubImport::Importer::NoteAttachmentsImporter,
+ have_attributes(record_db_id: mr_with_attachment.id),
+ project,
+ client
+ ) do |importer|
+ expect(importer).to receive(:execute)
end
importer.sequential_import
end
- context 'when merge request is already processed' do
- it "doesn't import this merge request attachments" do
- importer.mark_as_imported(merge_request_1)
+ context 'when flag is disabled' do
+ before do
+ stub_feature_flags(github_importer_attachments: false)
+ end
- expect(project.merge_requests).to receive(:id_not_in).with([merge_request_1.id.to_s]).and_call_original
- expect_next_instance_of(
- Gitlab::GithubImport::Importer::NoteAttachmentsImporter, *importer_attrs
- ) do |note_attachments_importer|
- expect(note_attachments_importer).to receive(:execute)
+ it 'executes importer for both merge requests' do
+ expect_next_instances_of(Gitlab::GithubImport::Importer::NoteAttachmentsImporter, 2) do |importer|
+ expect(importer).to receive(:execute)
end
importer.sequential_import
end
end
+
+ context 'when merge request has already been processed' do
+ before do
+ importer.mark_as_imported(mr_with_attachment)
+ end
+
+ it 'does not select merge requests that were processed' do
+ expect(project.merge_requests).to receive(:id_not_in).with([mr_with_attachment.id.to_s]).and_call_original
+
+ importer.sequential_import
+ end
+
+ it 'does not execute importer for the merge request with an attachment' do
+ expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).not_to receive(:new)
+
+ importer.sequential_import
+ end
+ end
end
describe '#sidekiq_worker_class' do
diff --git a/spec/lib/gitlab/github_import/importer/attachments/notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/attachments/notes_importer_spec.rb
index 5b3ad032702..da0ee1ed0dd 100644
--- a/spec/lib/gitlab/github_import/importer/attachments/notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/attachments/notes_importer_spec.rb
@@ -10,30 +10,75 @@ RSpec.describe Gitlab::GithubImport::Importer::Attachments::NotesImporter, featu
let(:client) { instance_double(Gitlab::GithubImport::Client) }
describe '#sequential_import', :clean_gitlab_redis_cache do
- let_it_be(:note_1) { create(:note, project: project) }
- let_it_be(:note_2) { create(:note, project: project) }
- let_it_be(:system_note) { create(:note, :system, project: project) }
+ let_it_be(:note) { create(:note, project: project) }
- let(:importer_stub) { instance_double('Gitlab::GithubImport::Importer::NoteAttachmentsImporter') }
- let(:importer_attrs) { [instance_of(Gitlab::GithubImport::Representation::NoteText), project, client] }
+ let_it_be(:note_with_attachment) do
+ create(:note,
+ project: project,
+ note: "![image](https://user-images.githubusercontent.com/1/uuid-1.png)"
+ )
+ end
+
+ let_it_be(:system_note_with_attachment) do
+ create(:note,
+ :system,
+ project: project,
+ note: "![image](https://user-images.githubusercontent.com/1/uuid-1.png)"
+ )
+ end
- it 'imports each project user note' do
- expect(project.notes).to receive(:id_not_in).with([]).and_call_original
- expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).to receive(:new)
- .with(*importer_attrs).twice.and_return(importer_stub)
- expect(importer_stub).to receive(:execute).twice
+ it 'selects only user notes, and selects only properties it needs' do
+ stubbed_collection = class_double(Note, each_batch: [])
+
+ expect(project.notes).to receive(:id_not_in).with([]).and_return(stubbed_collection)
+ expect(stubbed_collection).to receive(:user).and_return(stubbed_collection)
+ expect(stubbed_collection)
+ .to receive(:select).with(:id, :note, :system, :noteable_type)
+ .and_return(stubbed_collection)
importer.sequential_import
end
- context 'when note is already processed' do
- it "doesn't import this note" do
- importer.mark_as_imported(note_1)
+ it 'executes importer only for the note with an attachment' do
+ expect_next_instance_of(
+ Gitlab::GithubImport::Importer::NoteAttachmentsImporter,
+ have_attributes(record_db_id: note_with_attachment.id),
+ project,
+ client
+ ) do |importer|
+ expect(importer).to receive(:execute)
+ end
+
+ importer.sequential_import
+ end
+
+ context 'when flag is disabled' do
+ before do
+ stub_feature_flags(github_importer_attachments: false)
+ end
+
+ it 'executes importer for both user notes' do
+ expect_next_instances_of(Gitlab::GithubImport::Importer::NoteAttachmentsImporter, 2) do |importer|
+ expect(importer).to receive(:execute)
+ end
+
+ importer.sequential_import
+ end
+ end
+
+ context 'when note has already been processed' do
+ before do
+ importer.mark_as_imported(note_with_attachment)
+ end
+
+ it 'does not select notes that were processed' do
+ expect(project.notes).to receive(:id_not_in).with([note_with_attachment.id.to_s]).and_call_original
+
+ importer.sequential_import
+ end
- expect(project.notes).to receive(:id_not_in).with([note_1.id.to_s]).and_call_original
- expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).to receive(:new)
- .with(*importer_attrs).once.and_return(importer_stub)
- expect(importer_stub).to receive(:execute).once
+ it 'does not execute importer for the note with an attachment' do
+ expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).not_to receive(:new)
importer.sequential_import
end
diff --git a/spec/lib/gitlab/github_import/importer/attachments/releases_importer_spec.rb b/spec/lib/gitlab/github_import/importer/attachments/releases_importer_spec.rb
index c1c19c40afb..cf51760d966 100644
--- a/spec/lib/gitlab/github_import/importer/attachments/releases_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/attachments/releases_importer_spec.rb
@@ -10,31 +10,64 @@ RSpec.describe Gitlab::GithubImport::Importer::Attachments::ReleasesImporter, fe
let(:client) { instance_double(Gitlab::GithubImport::Client) }
describe '#sequential_import', :clean_gitlab_redis_cache do
- let_it_be(:release_1) { create(:release, project: project) }
- let_it_be(:release_2) { create(:release, project: project) }
+ let_it_be(:release) { create(:release, project: project) }
- let(:importer_stub) { instance_double('Gitlab::GithubImport::Importer::NoteAttachmentsImporter') }
- let(:importer_attrs) { [instance_of(Gitlab::GithubImport::Representation::NoteText), project, client] }
+ let_it_be(:release_with_attachment) do
+ create(:release,
+ project: project,
+ description: "![image](https://user-images.githubusercontent.com/1/uuid-1.png)"
+ )
+ end
- it 'imports each project release' do
- expect(project.releases).to receive(:id_not_in).with([]).and_return(project.releases)
- expect(project.releases).to receive(:select).with(:id, :description, :tag).and_call_original
+ it 'selects both releases, and selects only properties it needs' do
+ stubbed_collection = class_double(Release, each_batch: [])
- expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).to receive(:new)
- .with(*importer_attrs).twice.and_return(importer_stub)
- expect(importer_stub).to receive(:execute).twice
+ expect(project.releases).to receive(:id_not_in).with([]).and_return(stubbed_collection)
+ expect(stubbed_collection).to receive(:select).with(:id, :description, :tag).and_return(stubbed_collection)
importer.sequential_import
end
- context 'when note is already processed' do
- it "doesn't import this release" do
- importer.mark_as_imported(release_1)
+ it 'executes importer only for the release with an attachment' do
+ expect_next_instance_of(
+ Gitlab::GithubImport::Importer::NoteAttachmentsImporter,
+ have_attributes(record_db_id: release_with_attachment.id),
+ project,
+ client
+ ) do |importer|
+ expect(importer).to receive(:execute)
+ end
+
+ importer.sequential_import
+ end
+
+ context 'when flag is disabled' do
+ before do
+ stub_feature_flags(github_importer_attachments: false)
+ end
+
+ it 'executes importer for both releases' do
+ expect_next_instances_of(Gitlab::GithubImport::Importer::NoteAttachmentsImporter, 2) do |importer|
+ expect(importer).to receive(:execute)
+ end
+
+ importer.sequential_import
+ end
+ end
+
+ context 'when release has already been processed' do
+ before do
+ importer.mark_as_imported(release_with_attachment)
+ end
+
+ it 'does not select releases that were processed' do
+ expect(project.releases).to receive(:id_not_in).with([release_with_attachment.id.to_s]).and_call_original
+
+ importer.sequential_import
+ end
- expect(project.releases).to receive(:id_not_in).with([release_1.id.to_s]).and_call_original
- expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).to receive(:new)
- .with(*importer_attrs).once.and_return(importer_stub)
- expect(importer_stub).to receive(:execute).once
+ it 'does not execute importer for the release with an attachment' do
+ expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).not_to receive(:new)
importer.sequential_import
end
diff --git a/spec/lib/gitlab/github_import/importer/collaborators_importer_spec.rb b/spec/lib/gitlab/github_import/importer/collaborators_importer_spec.rb
index c1e9bed5681..d0d3e6c6da8 100644
--- a/spec/lib/gitlab/github_import/importer/collaborators_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/collaborators_importer_spec.rb
@@ -83,7 +83,7 @@ RSpec.describe Gitlab::GithubImport::Importer::CollaboratorsImporter, feature_ca
it 'imports each collaborator in parallel' do
expect(Gitlab::GithubImport::ImportCollaboratorWorker).to receive(:perform_in)
- .with(1, project.id, an_instance_of(Hash), an_instance_of(String))
+ .with(an_instance_of(Float), project.id, an_instance_of(Hash), an_instance_of(String))
waiter = importer.parallel_import
diff --git a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
index 1eb146ea958..ed74e978f16 100644
--- a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
@@ -102,7 +102,7 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNotesImporter, feature_catego
.and_yield(github_comment)
expect(Gitlab::GithubImport::ImportDiffNoteWorker).to receive(:perform_in)
- .with(1, project.id, an_instance_of(Hash), an_instance_of(String))
+ .with(an_instance_of(Float), project.id, an_instance_of(Hash), an_instance_of(String))
waiter = importer.parallel_import
diff --git a/spec/lib/gitlab/github_import/importer/events/commented_spec.rb b/spec/lib/gitlab/github_import/importer/events/commented_spec.rb
new file mode 100644
index 00000000000..bd3bea87688
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/events/commented_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::Events::Commented, feature_category: :importers do
+ subject(:importer) { described_class.new(project, client) }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
+ let(:issuable) { create(:issue, project: project) }
+
+ let(:issue_event) do
+ Gitlab::GithubImport::Representation::IssueEvent.new(
+ id: 1196850910,
+ actor: { id: user.id, login: user.username },
+ event: 'commented',
+ created_at: '2022-07-27T14:41:11Z',
+ updated_at: '2022-07-27T14:41:11Z',
+ body: 'This is my note',
+ issue: { number: issuable.iid, pull_request: issuable.is_a?(MergeRequest) }
+ )
+ end
+
+ let(:extended_events) { true }
+
+ before do
+ allow_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |finder|
+ allow(finder).to receive(:database_id).and_return(issuable.id)
+ end
+ allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
+ allow(finder).to receive(:find).with(user.id, user.username).and_return(user.id)
+ end
+ allow_next_instance_of(Gitlab::GithubImport::Settings) do |setting|
+ allow(setting).to receive(:extended_events?).and_return(extended_events)
+ end
+ end
+
+ shared_examples 'new note' do
+ it 'creates a note' do
+ expect { importer.execute(issue_event) }.to change { Note.count }.by(1)
+
+ expect(issuable.notes.last).to have_attributes(
+ note: 'This is my note',
+ author_id: user.id,
+ noteable_type: issuable.class.name.to_s
+ )
+ end
+
+ context 'when extended_events is disabled' do
+ let(:extended_events) { false }
+
+ it 'does not create a note' do
+ expect { importer.execute(issue_event) }.not_to change { Note.count }
+ end
+ end
+ end
+
+ context 'with Issue' do
+ it_behaves_like 'new note'
+ end
+
+ context 'with MergeRequest' do
+ let(:issuable) { create(:merge_request, source_project: project, target_project: project) }
+
+ it_behaves_like 'new note'
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/events/merged_spec.rb b/spec/lib/gitlab/github_import/importer/events/merged_spec.rb
index 4ea62557dd6..30bc8aabe12 100644
--- a/spec/lib/gitlab/github_import/importer/events/merged_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/events/merged_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::Merged, feature_category:
let(:client) { instance_double('Gitlab::GithubImport::Client') }
let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
let(:commit_id) { nil }
+ let(:extended_events) { false }
let(:issue_event) do
Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(
@@ -32,6 +33,9 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::Merged, feature_category:
allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
allow(finder).to receive(:find).with(user.id, user.username).and_return(user.id)
end
+ allow_next_instance_of(Gitlab::GithubImport::Settings) do |setting|
+ allow(setting).to receive(:extended_events?).and_return(extended_events)
+ end
end
it 'creates expected event and state event' do
@@ -71,4 +75,27 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::Merged, feature_category:
expect(state_event.source_commit).to eq commit_id[0..40]
end
end
+
+ describe 'extended events' do
+ context 'when using extended events' do
+ let(:extended_events) { true }
+
+ it 'creates a merged by note' do
+ expect { importer.execute(issue_event) }.to change { Note.count }.by(1)
+
+ last_note = merge_request.notes.last
+ expect(last_note.created_at).to eq(issue_event.created_at)
+ expect(last_note.author).to eq(project.owner)
+ expect(last_note.note).to eq("*Merged by: #{user.username} at #{issue_event.created_at}*")
+ end
+ end
+
+ context 'when not using extended events' do
+ let(:extended_events) { false }
+
+ it 'does not create a merged by note' do
+ expect { importer.execute(issue_event) }.not_to change { Note.count }
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/importer/events/reviewed_spec.rb b/spec/lib/gitlab/github_import/importer/events/reviewed_spec.rb
new file mode 100644
index 00000000000..f60a9d65269
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/events/reviewed_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::Events::Reviewed, feature_category: :importers do
+ subject(:importer) { described_class.new(project, client) }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
+ let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let(:extended_events) { true }
+
+ let(:issue_event) do
+ Gitlab::GithubImport::Representation::IssueEvent.new(
+ id: 1196850910,
+ actor: { id: user.id, login: user.username },
+ event: 'reviewed',
+ submitted_at: '2022-07-27T14:41:11Z',
+ body: 'This is my review',
+ state: state,
+ issue: { number: merge_request.iid, pull_request: true }
+ )
+ end
+
+ let(:state) { 'commented' }
+
+ before do
+ allow_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |finder|
+ allow(finder).to receive(:database_id).and_return(merge_request.id)
+ end
+ allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
+ allow(finder).to receive(:find).with(user.id, user.username).and_return(user.id)
+ end
+ allow_next_instance_of(Gitlab::GithubImport::Settings) do |setting|
+ allow(setting).to receive(:extended_events?).and_return(extended_events)
+ end
+ end
+
+ it 'creates a review note', :aggregate_failures do
+ expect { importer.execute(issue_event) }.to change { Note.count }.by(1)
+
+ last_note = merge_request.notes.last
+ expect(last_note.note).to include("This is my review")
+ expect(last_note.author).to eq(user)
+ expect(last_note.created_at).to eq(issue_event.submitted_at)
+ end
+
+ it 'does not create a reviewer for the Merge Request', :aggregate_failures do
+ expect { importer.execute(issue_event) }.not_to change { MergeRequestReviewer.count }
+ end
+
+ context 'when stage is approved' do
+ let(:state) { 'approved' }
+
+ it 'creates an approval for the Merge Request', :aggregate_failures do
+ expect { importer.execute(issue_event) }.to change { Approval.count }.by(1).and change { Note.count }.by(2)
+
+ expect(merge_request.approved_by_users.reload).to include(user)
+ expect(merge_request.approvals.last.created_at).to eq(issue_event.submitted_at)
+
+ note = merge_request.notes.where(system: false).last
+ expect(note.note).to include("This is my review")
+ expect(note.author).to eq(user)
+ expect(note.created_at).to eq(issue_event.submitted_at)
+
+ system_note = merge_request.notes.where(system: true).last
+ expect(system_note.note).to eq('approved this merge request')
+ expect(system_note.author).to eq(user)
+ expect(system_note.created_at).to eq(issue_event.submitted_at)
+ expect(system_note.system_note_metadata.action).to eq('approved')
+ end
+ end
+
+ context 'when extended events is false' do
+ let(:extended_events) { false }
+
+ it 'does nothing' do
+ expect { importer.execute(issue_event) }
+ .to not_change { Note.count }
+ .and not_change { Approval.count }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb
index 2389489e867..ffe6c237506 100644
--- a/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb
@@ -115,6 +115,18 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueEventImporter, :clean_gitlab
it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::Merged
end
+ context "when it's commented issue event" do
+ let(:event_name) { 'commented' }
+
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::Commented
+ end
+
+ context "when it's reviewed issue event" do
+ let(:event_name) { 'reviewed' }
+
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::Reviewed
+ end
+
context "when it's unknown issue event" do
let(:event_name) { 'fake' }
diff --git a/spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb
index f7ee6fee6dc..7e926b3af46 100644
--- a/spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueEventsImporter, feature_cate
struct = Struct.new(
:id, :node_id, :url, :actor, :event, :commit_id, :commit_url, :label, :rename, :milestone, :source,
:assignee, :assigner, :review_requester, :requested_reviewer, :issue, :created_at, :performed_via_github_app,
- keyword_init: true
+ :body, :updated_at, :submitted_at, :state, keyword_init: true
)
struct.new(id: rand(10), event: 'closed', created_at: '2022-04-26 18:30:53 UTC')
end
@@ -82,7 +82,7 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueEventsImporter, feature_cate
allow(importer).to receive(:each_object_to_import).and_yield(issue_event)
expect(Gitlab::GithubImport::ImportIssueEventWorker).to receive(:perform_in).with(
- 1, project.id, an_instance_of(Hash), an_instance_of(String)
+ an_instance_of(Float), project.id, an_instance_of(Hash), an_instance_of(String)
)
waiter = importer.parallel_import
diff --git a/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
index 9451d1dfc37..93466497ceb 100644
--- a/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
@@ -96,7 +96,7 @@ RSpec.describe Gitlab::GithubImport::Importer::IssuesImporter, feature_category:
expect(Gitlab::GithubImport::ImportIssueWorker)
.to receive(:perform_in)
- .with(1, project.id, an_instance_of(Hash), an_instance_of(String))
+ .with(an_instance_of(Float), project.id, an_instance_of(Hash), an_instance_of(String))
waiter = importer.parallel_import
diff --git a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
index a5ec39b4177..eaf4d41df43 100644
--- a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
@@ -123,7 +123,7 @@ RSpec.describe Gitlab::GithubImport::Importer::LfsObjectsImporter, feature_categ
end
expect(Gitlab::GithubImport::ImportLfsObjectWorker).to receive(:perform_in)
- .with(1, project.id, an_instance_of(Hash), an_instance_of(String))
+ .with(an_instance_of(Float), project.id, an_instance_of(Hash), an_instance_of(String))
waiter = importer.parallel_import
diff --git a/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
index 92d3071c826..722470cbc1d 100644
--- a/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
@@ -88,7 +88,7 @@ RSpec.describe Gitlab::GithubImport::Importer::NotesImporter, feature_category:
.and_yield(github_comment)
expect(Gitlab::GithubImport::ImportNoteWorker).to receive(:perform_in)
- .with(1, project.id, an_instance_of(Hash), an_instance_of(String))
+ .with(an_instance_of(Float), project.id, an_instance_of(Hash), an_instance_of(String))
waiter = importer.parallel_import
diff --git a/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb b/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb
index b0892767fb3..b2fc1bea39e 100644
--- a/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb
@@ -145,7 +145,7 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchesImporter, featur
it 'imports each protected branch in parallel' do
expect(Gitlab::GithubImport::ImportProtectedBranchWorker)
.to receive(:perform_in)
- .with(1, project.id, an_instance_of(Hash), an_instance_of(String))
+ .with(an_instance_of(Float), project.id, an_instance_of(Hash), an_instance_of(String))
expect(Gitlab::GithubImport::ObjectCounter)
.to receive(:increment).with(project, :protected_branch, :fetched)
@@ -166,7 +166,7 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchesImporter, featur
# when user has no admin rights on repo
let(:unknown_protection_branch) { branch_struct.new(name: 'development', protection: nil) }
- let(:page_counter) { instance_double(Gitlab::GithubImport::PageCounter) }
+ let(:page_counter) { instance_double(Gitlab::Import::PageCounter) }
before do
allow(client).to receive(:branches).with(project.import_source)
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests/review_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests/review_importer_spec.rb
index 6846c99fb63..1651774b5ce 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests/review_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests/review_importer_spec.rb
@@ -30,6 +30,12 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewImporter,
expect(merge_request.reviewers).to contain_exactly(author)
end
+ context 'when add_reviewer option is false' do
+ it 'does not change Merge Request reviewers' do
+ expect { subject.execute(add_reviewer: false) }.not_to change { MergeRequestReviewer.count }
+ end
+ end
+
context 'when reviewer already exists' do
before do
create(
@@ -309,6 +315,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewImporter,
extra.reverse_merge(
author: { id: 999, login: 'author' },
merge_request_id: merge_request.id,
+ merge_request_iid: merge_request.iid,
review_type: type,
note: 'note',
submitted_at: submitted_at.to_s
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb
index 1977815e3a0..7ba88b4fa79 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb
@@ -116,10 +116,10 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewRequestsImpor
it 'schedule import for each merge request reviewers' do
expect(Gitlab::GithubImport::PullRequests::ImportReviewRequestWorker)
- .to receive(:perform_in).with(1, *expected_worker_payload.first)
+ .to receive(:perform_in).with(an_instance_of(Float), *expected_worker_payload.first)
expect(Gitlab::GithubImport::PullRequests::ImportReviewRequestWorker)
- .to receive(:perform_in).with(1, *expected_worker_payload.second)
+ .to receive(:perform_in).with(an_instance_of(Float), *expected_worker_payload.second)
expect(Gitlab::GithubImport::ObjectCounter)
.to receive(:increment).twice.with(project, :pull_request_review_request, :fetched)
@@ -137,7 +137,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewRequestsImpor
it "doesn't schedule import this merge request reviewers" do
expect(Gitlab::GithubImport::PullRequests::ImportReviewRequestWorker)
- .to receive(:perform_in).with(1, *expected_worker_payload.second)
+ .to receive(:perform_in).with(an_instance_of(Float), *expected_worker_payload.second)
expect(Gitlab::GithubImport::ObjectCounter)
.to receive(:increment).once.with(project, :pull_request_review_request, :fetched)
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests/reviews_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests/reviews_importer_spec.rb
index f5779f300b8..94248f60a0b 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests/reviews_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests/reviews_importer_spec.rb
@@ -69,7 +69,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewsImporter, fe
end
it 'skips cached pages' do
- Gitlab::GithubImport::PageCounter
+ Gitlab::Import::PageCounter
.new(project, "merge_request/#{merge_request.id}/pull_request_reviews")
.set(2)
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
index 1a0adbbe3a3..4c6b6a81d35 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
@@ -106,7 +106,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter, feature_cat
expect(Gitlab::GithubImport::ImportPullRequestWorker)
.to receive(:perform_in)
- .with(1, project.id, an_instance_of(Hash), an_instance_of(String))
+ .with(an_instance_of(Float), project.id, an_instance_of(Hash), an_instance_of(String))
waiter = importer.parallel_import
diff --git a/spec/lib/gitlab/github_import/importer/replay_events_importer_spec.rb b/spec/lib/gitlab/github_import/importer/replay_events_importer_spec.rb
new file mode 100644
index 00000000000..2b21232c642
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/replay_events_importer_spec.rb
@@ -0,0 +1,139 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::ReplayEventsImporter, feature_category: :importers do
+ let_it_be(:association) { create(:merged_merge_request) }
+ let_it_be(:project) { association.project }
+ let(:user1) { build(:user1) }
+ let(:user2) { build(:user2) }
+ let(:user3) { build(:user3) }
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
+
+ let(:representation) do
+ Gitlab::GithubImport::Representation::ReplayEvent.new(
+ issuable_type: association.class.name.to_s, issuable_iid: association.iid
+ )
+ end
+
+ let(:events) do
+ [
+ {
+ requested_reviewer: { id: 1, login: 'user1' },
+ event: 'review_requested'
+ },
+ {
+ requested_reviewer: { id: 1, login: 'user1' },
+ event: 'review_request_removed'
+ },
+ {
+ requested_reviewer: { id: 2, login: 'user2' },
+ event: 'review_requested'
+ },
+ {
+ requested_reviewer: { id: 2, login: 'user2' },
+ event: 'review_request_removed'
+ },
+ {
+ requested_reviewer: { id: 2, login: 'user2' },
+ event: 'review_requested'
+ },
+ {
+ requested_reviewer: { id: 3, login: 'user3' },
+ event: 'review_requested'
+ }
+ ]
+ end
+
+ subject(:importer) { described_class.new(representation, project, client) }
+
+ describe '#execute' do
+ before do
+ representations = events.map { |e| Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(e) }
+
+ allow_next_instance_of(Gitlab::GithubImport::EventsCache) do |events_cache|
+ allow(events_cache).to receive(:events).with(association).and_return(representations)
+ end
+ end
+
+ context 'when association is a MergeRequest' do
+ it 'imports reviewers' do
+ representation = instance_double(Gitlab::GithubImport::Representation::PullRequests::ReviewRequests)
+
+ expect(Gitlab::GithubImport::Representation::PullRequests::ReviewRequests).to receive(:from_json_hash).with(
+ merge_request_id: association.id,
+ merge_request_iid: association.iid,
+ users: [
+ { id: 2, login: 'user2' },
+ { id: 3, login: 'user3' }
+ ]
+ ).and_return(representation)
+
+ expect_next_instance_of(
+ Gitlab::GithubImport::Importer::PullRequests::ReviewRequestImporter, anything, project, client
+ ) do |review_impoter|
+ expect(review_impoter).to receive(:execute)
+ end
+
+ importer.execute
+ end
+
+ context 'when reviewer is a team' do
+ let(:events) do
+ [
+ {
+ event: 'review_requested',
+ requested_team: { name: 'backend-team' }
+ },
+ {
+ event: 'review_requested',
+ requested_team: { name: 'frontend-team' }
+ },
+ {
+ event: 'review_request_removed',
+ requested_team: { name: 'frontend-team' }
+ }
+ ]
+ end
+
+ it 'ignores the events and do not assign the reviewers' do
+ expect(Gitlab::GithubImport::Representation::PullRequests::ReviewRequests).to receive(:from_json_hash).with(
+ merge_request_id: association.id,
+ merge_request_iid: association.iid,
+ users: []
+ ).and_call_original
+
+ importer.execute
+ end
+ end
+ end
+
+ context 'when association is not found' do
+ let(:representation) do
+ Gitlab::GithubImport::Representation::ReplayEvent.new(
+ issuable_type: association.class.name.to_s, issuable_iid: -1
+ )
+ end
+
+ it 'does not read events' do
+ expect(Gitlab::GithubImport::EventsCache).not_to receive(:new)
+
+ importer.execute
+ end
+ end
+
+ context 'when issueable type is not supported' do
+ let(:representation) do
+ Gitlab::GithubImport::Representation::ReplayEvent.new(
+ issuable_type: 'Issue', issuable_iid: association.iid
+ )
+ end
+
+ it 'does not read events' do
+ expect(Gitlab::GithubImport::EventsCache).not_to receive(:new)
+
+ importer.execute
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb
index 6fe0494d7cd..d2e63eba954 100644
--- a/spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointDiffNotesImporter d
end
it 'skips cached pages' do
- Gitlab::GithubImport::PageCounter
+ Gitlab::Import::PageCounter
.new(project, "merge_request/#{merge_request.id}/pull_request_comments")
.set(2)
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb
index 91f89f0779c..19142b94519 100644
--- a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter, feature_category: :importers do
- let(:client) { double }
+ let(:client) { Gitlab::GithubImport::Client.new('token') }
- let_it_be(:project) { create(:project, :import_started, import_source: 'http://somegithub.com') }
+ let_it_be(:project) { create(:project, :import_started, import_source: 'foo/bar') }
let!(:issuable) { create(:issue, project: project) }
@@ -88,23 +88,32 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
describe '#each_object_to_import', :clean_gitlab_redis_cache do
let(:issue_event) do
struct = Struct.new(:id, :event, :created_at, :issue, keyword_init: true)
- struct.new(id: 1, event: 'closed', created_at: '2022-04-26 18:30:53 UTC')
+ struct.new(id: 1, event: event_name, created_at: '2022-04-26 18:30:53 UTC')
end
+ let(:event_name) { 'closed' }
+
+ let(:page_events) { [issue_event] }
+
let(:page) do
instance_double(
Gitlab::GithubImport::Client::Page,
- number: 1, objects: [issue_event]
+ number: 1, objects: page_events
)
end
- let(:page_counter) { instance_double(Gitlab::GithubImport::PageCounter) }
+ let(:page_counter) { instance_double(Gitlab::Import::PageCounter) }
+
+ let(:extended_events) { true }
before do
allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
allow(client).to receive(:each_page).once.with(:issue_timeline,
project.import_source, issuable.iid, { state: 'all', sort: 'created', direction: 'asc', page: 1 }
).and_yield(page)
+ allow_next_instance_of(Gitlab::GithubImport::Settings) do |setting|
+ allow(setting).to receive(:extended_events?).and_return(extended_events)
+ end
end
context 'with issues' do
@@ -152,7 +161,7 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
end
it 'triggers page number increment' do
- expect(Gitlab::GithubImport::PageCounter)
+ expect(Gitlab::Import::PageCounter)
.to receive(:new).with(project, 'issues/1/issue_timeline')
.and_return(page_counter)
expect(page_counter).to receive(:current).and_return(1)
@@ -166,7 +175,7 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
context 'when page is already processed' do
before do
- page_counter = Gitlab::GithubImport::PageCounter.new(
+ page_counter = Gitlab::Import::PageCounter.new(
project, subject.page_counter_id(issuable)
)
page_counter.set(page.number)
@@ -190,10 +199,7 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
end
context 'when event is not supported' do
- let(:issue_event) do
- struct = Struct.new(:id, :event, :created_at, :issue, keyword_init: true)
- struct.new(id: 1, event: 'not_supported_event', created_at: '2022-04-26 18:30:53 UTC')
- end
+ let(:event_name) { 'not_supported_event' }
it "doesn't process this event" do
counter = 0
@@ -201,5 +207,188 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
expect(counter).to eq 0
end
end
+
+ describe 'increment object counter' do
+ it 'increments counter' do
+ expect(Gitlab::GithubImport::ObjectCounter).to receive(:increment).with(project, :issue_event, :fetched)
+
+ subject.each_object_to_import { |event| event }
+ end
+
+ context 'when event should increment a mapped fetched counter' do
+ before do
+ stub_const('Gitlab::GithubImport::Importer::IssueEventImporter::EVENT_COUNTER_MAP', {
+ 'closed' => 'custom_type'
+ })
+ end
+
+ it 'increments the mapped fetched counter' do
+ expect(Gitlab::GithubImport::ObjectCounter).to receive(:increment).with(project, 'custom_type', :fetched)
+
+ subject.each_object_to_import { |event| event }
+ end
+
+ context 'when extended_events is disabled' do
+ let(:extended_events) { false }
+
+ it 'increments the issue_event fetched counter' do
+ expect(Gitlab::GithubImport::ObjectCounter).to receive(:increment).with(project, :issue_event, :fetched)
+
+ subject.each_object_to_import { |event| event }
+ end
+ end
+ end
+ end
+
+ describe 'save events' do
+ shared_examples 'saves event' do
+ it 'saves event' do
+ expect(Gitlab::GithubImport::Representation::IssueEvent).to receive(:from_api_response).with(issue_event.to_h)
+ .and_call_original
+
+ expect_next_instance_of(Gitlab::GithubImport::EventsCache) do |events_cache|
+ expect(events_cache).to receive(:add).with(
+ issuable,
+ an_instance_of(Gitlab::GithubImport::Representation::IssueEvent)
+ )
+ end
+
+ subject.each_object_to_import { |event| event }
+ end
+ end
+
+ context 'when event is review_requested' do
+ let(:event_name) { 'review_requested' }
+
+ it_behaves_like 'saves event'
+ end
+
+ context 'when event is review_request_removed' do
+ let(:event_name) { 'review_request_removed' }
+
+ it_behaves_like 'saves event'
+ end
+
+ context 'when event is closed' do
+ let(:event_name) { 'closed' }
+
+ it 'does not save event' do
+ expect_next_instance_of(Gitlab::GithubImport::EventsCache) do |events_cache|
+ expect(events_cache).not_to receive(:add)
+ end
+
+ subject.each_object_to_import { |event| event }
+ end
+ end
+
+ context 'when extended_events is disabled' do
+ let(:event_name) { 'review_requested' }
+ let(:extended_events) { false }
+
+ it 'does not save event' do
+ expect(Gitlab::GithubImport::EventsCache).not_to receive(:new)
+
+ subject.each_object_to_import { |event| event }
+ end
+ end
+ end
+
+ describe 'after batch processed' do
+ context 'when events should be replayed' do
+ let(:event_name) { 'review_requested' }
+
+ it 'enqueues worker to replay events' do
+ allow(Gitlab::JobWaiter).to receive(:generate_key).and_return('job_waiter_key')
+
+ expect(Gitlab::GithubImport::ReplayEventsWorker).to receive(:perform_async)
+ .with(
+ project.id,
+ { 'issuable_type' => issuable.class.name.to_s, 'issuable_iid' => issuable.iid },
+ 'job_waiter_key'
+ )
+
+ subject.each_object_to_import { |event| event }
+ end
+ end
+
+ context 'when events are not relevant' do
+ let(:event_name) { 'closed' }
+
+ it 'does not replay events' do
+ expect(Gitlab::GithubImport::ReplayEventsWorker).not_to receive(:perform_async)
+
+ subject.each_object_to_import { |event| event }
+ end
+ end
+
+ context 'when extended_events is disabled' do
+ let(:extended_events) { false }
+
+ it 'does not replay events' do
+ expect(Gitlab::GithubImport::ReplayEventsWorker).not_to receive(:perform_async)
+
+ subject.each_object_to_import { |event| event }
+ end
+ end
+ end
+ end
+
+ describe '#execute', :clean_gitlab_redis_cache do
+ let(:extended_events) { false }
+
+ before do
+ allow_next_instance_of(Gitlab::GithubImport::Settings) do |setting|
+ allow(setting).to receive(:extended_events?).and_return(extended_events)
+ end
+
+ stub_request(:get, 'https://api.github.com/rate_limit')
+ .to_return(status: 200, headers: { 'X-RateLimit-Limit' => 5000, 'X-RateLimit-Remaining' => 5000 })
+
+ events = [
+ {
+ id: 1,
+ event: 'review_requested',
+ created_at: '2022-04-26 18:30:53 UTC',
+ issue: {
+ number: issuable.iid,
+ pull_request: true
+ }
+ }
+ ]
+
+ endpoint = 'https://api.github.com/repos/foo/bar/issues/1/timeline' \
+ '?direction=asc&page=1&per_page=100&sort=created&state=all'
+
+ stub_request(:get, endpoint)
+ .to_return(status: 200, body: events.to_json, headers: { 'Content-Type' => 'application/json' })
+ end
+
+ context 'when extended_events is disabled' do
+ it 'enqueues importer worker' do
+ expect { subject.execute }.to change { Gitlab::GithubImport::ReplayEventsWorker.jobs.size }.by(0)
+ .and change { Gitlab::GithubImport::ImportIssueEventWorker.jobs.size }.by(1)
+ end
+
+ it 'returns job waiter with the correct remaining jobs count' do
+ job_waiter = subject.execute
+
+ expect(job_waiter.jobs_remaining).to eq(1)
+ end
+ end
+
+ context 'when extended_events is enabled' do
+ let(:extended_events) { true }
+
+ it 'enqueues importer worker and replay worker' do
+ expect { subject.execute }.to change { Gitlab::GithubImport::ReplayEventsWorker.jobs.size }.by(1)
+ .and change { Gitlab::GithubImport::ImportIssueEventWorker.jobs.size }.by(1)
+ end
+
+ it 'returns job waiter with the correct remaining jobs count' do
+ job_waiter = subject.execute
+
+ expect(job_waiter.jobs_remaining).to eq(2)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb
index 88613244c8b..c0f0d86d625 100644
--- a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueNotesImporter
end
it 'skips cached pages' do
- Gitlab::GithubImport::PageCounter
+ Gitlab::Import::PageCounter
.new(project, "issue/#{issue.id}/issue_comments")
.set(2)
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb
index 601cd7a8f15..2d981a3d14f 100644
--- a/spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointMergeRequestNotesIm
end
it 'skips cached pages' do
- Gitlab::GithubImport::PageCounter
+ Gitlab::Import::PageCounter
.new(project, "merge_request/#{merge_request.id}/issue_comments")
.set(2)
diff --git a/spec/lib/gitlab/github_import/markdown_text_spec.rb b/spec/lib/gitlab/github_import/markdown_text_spec.rb
index 3f771970588..ff3821dedec 100644
--- a/spec/lib/gitlab/github_import/markdown_text_spec.rb
+++ b/spec/lib/gitlab/github_import/markdown_text_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::MarkdownText do
+RSpec.describe Gitlab::GithubImport::MarkdownText, feature_category: :importers do
describe '.format' do
it 'formats the text' do
author = double(:author, login: 'Alice')
@@ -103,6 +103,10 @@ RSpec.describe Gitlab::GithubImport::MarkdownText do
"https://github.com/nickname/public-test-repo/files/3/git-cheat-sheet.#{doc_extension}"
)
end
+
+ it 'returns an empty array when passed nil' do
+ expect(described_class.fetch_attachments(nil)).to be_empty
+ end
end
describe '#to_s' do
diff --git a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
index 3188206de5b..2418c2e08af 100644
--- a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
+++ b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
@@ -279,26 +279,53 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling, feature_category: :impo
allow(importer).to receive(:representation_class).and_return(repr_class)
allow(importer).to receive(:sidekiq_worker_class).and_return(worker_class)
allow(repr_class).to receive(:from_api_response).with(object, {})
- .and_return({ title: 'One' }, { title: 'Two' }, { title: 'Three' })
+ .and_return({ title: 'One' }, { title: 'Two' }, { title: 'Three' }, { title: 'Four' })
end
it 'imports data in parallel with delays respecting parallel_import_batch definition and return job waiter' do
- allow(::Gitlab::JobWaiter).to receive(:generate_key).and_return('waiter-key')
- allow(importer).to receive(:parallel_import_batch).and_return({ size: 2, delay: 1.minute })
-
- expect(importer).to receive(:each_object_to_import)
- .and_yield(object).and_yield(object).and_yield(object)
- expect(worker_class).to receive(:perform_in)
- .with(1, project.id, { 'title' => 'One' }, 'waiter-key').ordered
- expect(worker_class).to receive(:perform_in)
- .with(1, project.id, { 'title' => 'Two' }, 'waiter-key').ordered
- expect(worker_class).to receive(:perform_in)
- .with(61, project.id, { 'title' => 'Three' }, 'waiter-key').ordered
-
- job_waiter = importer.parallel_import
-
- expect(job_waiter.key).to eq('waiter-key')
- expect(job_waiter.jobs_remaining).to eq(3)
+ freeze_time do
+ allow(::Gitlab::JobWaiter).to receive(:generate_key).and_return('waiter-key')
+ allow(importer).to receive(:parallel_import_batch).and_return({ size: 2, delay: 1.minute })
+
+ expect(importer).to receive(:each_object_to_import)
+ .and_yield(object).and_yield(object).and_yield(object).and_yield(object)
+ expect(worker_class).to receive(:perform_in)
+ .with(1.0, project.id, { 'title' => 'One' }, 'waiter-key').ordered
+ expect(worker_class).to receive(:perform_in)
+ .with(31.0, project.id, { 'title' => 'Two' }, 'waiter-key').ordered
+ expect(worker_class).to receive(:perform_in)
+ .with(61.0, project.id, { 'title' => 'Three' }, 'waiter-key').ordered
+ expect(worker_class).to receive(:perform_in)
+ .with(91.0, project.id, { 'title' => 'Four' }, 'waiter-key').ordered
+
+ job_waiter = importer.parallel_import
+
+ expect(job_waiter.key).to eq('waiter-key')
+ expect(job_waiter.jobs_remaining).to eq(4)
+ end
+ end
+
+ context 'when job is running for a long time' do
+ it 'deducts the job runtime from the delay' do
+ freeze_time do
+ allow(::Gitlab::JobWaiter).to receive(:generate_key).and_return('waiter-key')
+ allow(importer).to receive(:parallel_import_batch).and_return({ size: 2, delay: 1.minute })
+ allow(importer).to receive(:job_started_at).and_return(45.seconds.ago)
+ allow(importer).to receive(:each_object_to_import)
+ .and_yield(object).and_yield(object).and_yield(object).and_yield(object)
+
+ expect(worker_class).to receive(:perform_in)
+ .with(1.0, project.id, { 'title' => 'One' }, 'waiter-key').ordered
+ expect(worker_class).to receive(:perform_in)
+ .with(1.0, project.id, { 'title' => 'Two' }, 'waiter-key').ordered
+ expect(worker_class).to receive(:perform_in)
+ .with(16.0, project.id, { 'title' => 'Three' }, 'waiter-key').ordered
+ expect(worker_class).to receive(:perform_in)
+ .with(46.0, project.id, { 'title' => 'Four' }, 'waiter-key').ordered
+
+ importer.parallel_import
+ end
+ end
end
context 'when job restarts due to API rate limit or Sidekiq interruption' do
@@ -313,21 +340,23 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling, feature_category: :impo
end
it "restores job waiter's key and jobs_remaining" do
- allow(importer).to receive(:parallel_import_batch).and_return({ size: 1, delay: 1.minute })
+ freeze_time do
+ allow(importer).to receive(:parallel_import_batch).and_return({ size: 1, delay: 1.minute })
- expect(importer).to receive(:each_object_to_import).and_yield(object).and_yield(object).and_yield(object)
+ expect(importer).to receive(:each_object_to_import).and_yield(object).and_yield(object).and_yield(object)
- expect(worker_class).to receive(:perform_in)
- .with(1, project.id, { 'title' => 'One' }, 'waiter-key').ordered
- expect(worker_class).to receive(:perform_in)
- .with(61, project.id, { 'title' => 'Two' }, 'waiter-key').ordered
- expect(worker_class).to receive(:perform_in)
- .with(121, project.id, { 'title' => 'Three' }, 'waiter-key').ordered
+ expect(worker_class).to receive(:perform_in)
+ .with(1.0, project.id, { 'title' => 'One' }, 'waiter-key').ordered
+ expect(worker_class).to receive(:perform_in)
+ .with(61.0, project.id, { 'title' => 'Two' }, 'waiter-key').ordered
+ expect(worker_class).to receive(:perform_in)
+ .with(121.0, project.id, { 'title' => 'Three' }, 'waiter-key').ordered
- job_waiter = importer.parallel_import
+ job_waiter = importer.parallel_import
- expect(job_waiter.key).to eq('waiter-key')
- expect(job_waiter.jobs_remaining).to eq(6)
+ expect(job_waiter.key).to eq('waiter-key')
+ expect(job_waiter.jobs_remaining).to eq(6)
+ end
end
end
end
diff --git a/spec/lib/gitlab/github_import/representation/issue_event_spec.rb b/spec/lib/gitlab/github_import/representation/issue_event_spec.rb
index 6620dee0fd0..de0509c3e5e 100644
--- a/spec/lib/gitlab/github_import/representation/issue_event_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/issue_event_spec.rb
@@ -168,8 +168,8 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
describe '.from_api_response' do
let(:response) do
event_resource = Struct.new(
- :id, :node_id, :url, :actor, :event, :commit_id, :commit_url, :label, :rename, :milestone,
- :source, :assignee, :requested_reviewer, :review_requester, :issue, :created_at,
+ :id, :node_id, :url, :actor, :event, :commit_id, :commit_url, :label, :rename, :milestone, :state, :body,
+ :source, :assignee, :requested_reviewer, :review_requester, :issue, :created_at, :updated_at, :submitted_at,
:performed_via_github_app,
keyword_init: true
)
diff --git a/spec/lib/gitlab/github_import/representation/note_text_spec.rb b/spec/lib/gitlab/github_import/representation/note_text_spec.rb
index 7aa458a1c33..b1ca1512855 100644
--- a/spec/lib/gitlab/github_import/representation/note_text_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/note_text_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::Representation::NoteText do
+RSpec.describe Gitlab::GithubImport::Representation::NoteText, feature_category: :importers do
shared_examples 'a Note text data' do |match_record_type|
it 'returns an instance of NoteText' do
expect(representation).to be_an_instance_of(described_class)
@@ -153,4 +153,36 @@ RSpec.describe Gitlab::GithubImport::Representation::NoteText do
end
end
end
+
+ describe '#has_attachments?' do
+ subject { described_class.new({ text: text }).has_attachments? }
+
+ context 'when text has attachments' do
+ let(:text) { 'See ![image](https://user-images.githubusercontent.com/1/uuid-1.png) for details' }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when text does not have attachments' do
+ let(:text) { 'Some text here' }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '#attachments' do
+ subject { described_class.new({ text: text }).attachments }
+
+ context 'when text has attachments' do
+ let(:text) { 'See ![image](https://user-images.githubusercontent.com/1/uuid-1.png) for details' }
+
+ it { is_expected.to contain_exactly(instance_of(Gitlab::GithubImport::Markdown::Attachment)) }
+ end
+
+ context 'when text does not have attachments' do
+ let(:text) { 'Some text here' }
+
+ it { is_expected.to be_empty }
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/representation/replay_event_spec.rb b/spec/lib/gitlab/github_import/representation/replay_event_spec.rb
new file mode 100644
index 00000000000..1afefb76c6a
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/replay_event_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Representation::ReplayEvent, feature_category: :importers do
+ describe '.from_json_hash' do
+ it 'returns an instance of ReplayEvent' do
+ representation = described_class.from_json_hash(issuable_iid: 1, issuable_type: 'MergeRequest')
+
+ expect(representation).to be_an_instance_of(described_class)
+ end
+ end
+
+ describe '#github_identifiers' do
+ it 'returns a hash with needed identifiers' do
+ representation = described_class.new(issuable_type: 'MergeRequest', issuable_iid: 1)
+
+ expect(representation.github_identifiers).to eq({
+ issuable_type: 'MergeRequest',
+ issuable_iid: 1
+ })
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/settings_spec.rb b/spec/lib/gitlab/github_import/settings_spec.rb
index ea1526ca25f..d268f3a8650 100644
--- a/spec/lib/gitlab/github_import/settings_spec.rb
+++ b/spec/lib/gitlab/github_import/settings_spec.rb
@@ -21,12 +21,6 @@ RSpec.describe Gitlab::GithubImport::Settings, feature_category: :importers do
stages = described_class::OPTIONAL_STAGES
[
{
- name: 'single_endpoint_issue_events_import',
- label: stages[:single_endpoint_issue_events_import][:label],
- selected: false,
- details: stages[:single_endpoint_issue_events_import][:details]
- },
- {
name: 'single_endpoint_notes_import',
label: stages[:single_endpoint_notes_import][:label],
selected: false,
@@ -48,7 +42,31 @@ RSpec.describe Gitlab::GithubImport::Settings, feature_category: :importers do
end
it 'returns stages list as array' do
- expect(described_class.stages_array).to match_array(expected_list)
+ expect(described_class.stages_array(project.owner)).to match_array(expected_list)
+ end
+
+ context 'when `github_import_extended_events` feature flag is disabled' do
+ let(:expected_list_with_deprecated_options) do
+ stages = described_class::OPTIONAL_STAGES
+
+ expected_list.concat(
+ [
+ {
+ name: 'single_endpoint_issue_events_import',
+ label: stages[:single_endpoint_issue_events_import][:label],
+ selected: false,
+ details: stages[:single_endpoint_issue_events_import][:details]
+ }
+ ])
+ end
+
+ before do
+ stub_feature_flags(github_import_extended_events: false)
+ end
+
+ it 'returns stages list as array' do
+ expect(described_class.stages_array(project.owner)).to match_array(expected_list_with_deprecated_options)
+ end
end
end
@@ -99,4 +117,24 @@ RSpec.describe Gitlab::GithubImport::Settings, feature_category: :importers do
expect(settings.disabled?(:collaborators_import)).to eq true
end
end
+
+ describe '#extended_events?' do
+ it 'when extended_events is set to true' do
+ project.build_or_assign_import_data(data: { extended_events: true })
+
+ expect(settings.extended_events?).to eq(true)
+ end
+
+ it 'when extended_events is set to false' do
+ project.build_or_assign_import_data(data: { extended_events: false })
+
+ expect(settings.extended_events?).to eq(false)
+ end
+
+ it 'when extended_events is not present' do
+ project.build_or_assign_import_data(data: {})
+
+ expect(settings.extended_events?).to eq(false)
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/user_finder_spec.rb b/spec/lib/gitlab/github_import/user_finder_spec.rb
index a394b4eba13..998fa8b2c9f 100644
--- a/spec/lib/gitlab/github_import/user_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/user_finder_spec.rb
@@ -211,6 +211,7 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache, feat
let(:username) { 'kittens' }
let(:user) { {} }
let(:etag) { 'etag' }
+ let(:lease_name) { "gitlab:github_import:user_finder:#{project.id}" }
let(:cache_key) { described_class::EMAIL_FOR_USERNAME_CACHE_KEY % username }
let(:etag_cache_key) { described_class::USERNAME_ETAG_CACHE_KEY % username }
let(:email_fetched_for_project_key) do
@@ -305,6 +306,9 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache, feat
it 'makes an API call' do
expect(client).to receive(:user).with(username, { headers: {} }).and_return({ email: email }).once
+ expect(finder).to receive(:in_lock).with(
+ lease_name, ttl: 3.minutes, sleep_sec: 1.second, retries: 30
+ ).and_call_original
email_for_github_username
end
@@ -315,6 +319,14 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache, feat
end
it_behaves_like 'returns and caches the email'
+
+ context 'when retried' do
+ before do
+ allow(finder).to receive(:in_lock).and_yield(true)
+ end
+
+ it_behaves_like 'returns and caches the email'
+ end
end
context 'if the response does not contain an email' do
@@ -344,6 +356,9 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache, feat
it 'makes a non-rate-limited API call' do
expect(client).to receive(:user).with(username, { headers: { 'If-None-Match' => etag } }).once
+ expect(finder).to receive(:in_lock).with(
+ lease_name, ttl: 3.minutes, sleep_sec: 1.second, retries: 30
+ ).and_call_original
email_for_github_username
end
@@ -413,6 +428,9 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache, feat
it 'makes a non-rate-limited API call' do
expect(client).to receive(:user).with(username, { headers: { 'If-None-Match' => etag } }).once
+ expect(finder).to receive(:in_lock).with(
+ lease_name, ttl: 3.minutes, sleep_sec: 1.second, retries: 30
+ ).and_call_original
email_for_github_username
end
@@ -443,6 +461,30 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache, feat
it_behaves_like 'a user resource not found on GitHub'
end
+
+ context 'if the cached etag is nil' do
+ context 'when lock was executed by another process and an email was fetched' do
+ it 'does not fetch user detail' do
+ expect(finder).to receive(:read_email_from_cache).ordered.and_return('')
+ expect(finder).to receive(:read_email_from_cache).ordered.and_return(email)
+ expect(finder).to receive(:in_lock).and_yield(true)
+ expect(client).not_to receive(:user)
+
+ email_for_github_username
+ end
+ end
+
+ context 'when lock was executed by another process and an email in cache is still blank' do
+ it 'fetch user detail' do
+ expect(finder).to receive(:read_email_from_cache).ordered.and_return('')
+ expect(finder).to receive(:read_email_from_cache).ordered.and_return('')
+ expect(finder).to receive(:in_lock).and_yield(true)
+ expect(client).to receive(:user).with(username, { headers: {} }).and_return({ email: email }).once
+
+ email_for_github_username
+ end
+ end
+ end
end
context 'if the email has been checked for the project' do
diff --git a/spec/lib/gitlab/gon_helper_spec.rb b/spec/lib/gitlab/gon_helper_spec.rb
index e4684597ddf..d9dcae3cdc7 100644
--- a/spec/lib/gitlab/gon_helper_spec.rb
+++ b/spec/lib/gitlab/gon_helper_spec.rb
@@ -205,7 +205,6 @@ RSpec.describe Gitlab::GonHelper do
context 'when feature flag is false' do
before do
- stub_feature_flags(browsersdk_tracking: false)
stub_feature_flags(gl_analytics_tracking: false)
end
diff --git a/spec/lib/gitlab/highlight_spec.rb b/spec/lib/gitlab/highlight_spec.rb
index ef3765e479f..cd596555107 100644
--- a/spec/lib/gitlab/highlight_spec.rb
+++ b/spec/lib/gitlab/highlight_spec.rb
@@ -116,7 +116,7 @@ RSpec.describe Gitlab::Highlight do
it 'links dependencies via DependencyLinker' do
expect(Gitlab::DependencyLinker).to receive(:link)
- .with('file.name', 'Contents', anything).and_call_original
+ .with('file.name', 'Contents', anything, used_on: :blob).and_call_original
described_class.highlight('file.name', 'Contents')
end
@@ -133,5 +133,32 @@ RSpec.describe Gitlab::Highlight do
highlight
end
end
+
+ it 'increments usage counter', :prometheus do
+ described_class.highlight(file_name, content)
+
+ gitlab_highlight_usage_counter = Gitlab::Metrics.registry.get(:gitlab_highlight_usage)
+
+ expect(gitlab_highlight_usage_counter.get(used_on: :blob)).to eq(1)
+ expect(gitlab_highlight_usage_counter.get(used_on: :diff)).to eq(0)
+ end
+
+ context 'when used_on is specified' do
+ it 'increments usage counter', :prometheus do
+ described_class.highlight(file_name, content, used_on: :diff)
+
+ gitlab_highlight_usage_counter = Gitlab::Metrics.registry.get(:gitlab_highlight_usage)
+
+ expect(gitlab_highlight_usage_counter.get(used_on: :diff)).to eq(1)
+ expect(gitlab_highlight_usage_counter.get(used_on: :blob)).to eq(0)
+ end
+
+ it 'links dependencies via DependencyLinker' do
+ expect(Gitlab::DependencyLinker).to receive(:link)
+ .with(file_name, content, anything, used_on: :diff).and_call_original
+
+ described_class.highlight(file_name, content, used_on: :diff)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/http_spec.rb b/spec/lib/gitlab/http_spec.rb
index 3fc486a8984..c23f4ea8ffa 100644
--- a/spec/lib/gitlab/http_spec.rb
+++ b/spec/lib/gitlab/http_spec.rb
@@ -55,8 +55,19 @@ RSpec.describe Gitlab::HTTP, feature_category: :shared do
end
context 'when there is a DB call in the concurrent thread' do
- it 'raises Gitlab::Utils::ConcurrentRubyThreadIsUsedError error',
- quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/432145' do
+ before do
+ # Simulating Sentry is active and configured.
+ # More info: https://gitlab.com/gitlab-org/gitlab/-/issues/432145#note_1671305713
+ stub_sentry_settings
+ allow(Gitlab::ErrorTracking).to receive(:sentry_configurable?).and_return(true)
+ Gitlab::ErrorTracking.configure
+ end
+
+ after do
+ clear_sentry_settings
+ end
+
+ it 'raises Gitlab::Utils::ConcurrentRubyThreadIsUsedError error' do
stub_request(:get, 'http://example.org').to_return(status: 200, body: 'hello world')
result = described_class.get('http://example.org', async: true) do |_fragment|
@@ -104,40 +115,4 @@ RSpec.describe Gitlab::HTTP, feature_category: :shared do
end
end
end
-
- context 'when the FF use_gitlab_http_v2 is disabled' do
- before do
- stub_feature_flags(use_gitlab_http_v2: false)
- end
-
- describe '.get' do
- it 'calls Gitlab::LegacyHTTP.get with default options' do
- expect(Gitlab::LegacyHTTP).to receive(:get).with('/path', {})
-
- described_class.get('/path')
- end
- end
-
- describe '.try_get' do
- it 'calls .get' do
- expect(described_class).to receive(:get).with('/path', {})
-
- described_class.try_get('/path')
- end
-
- it 'returns nil when .get raises an error' do
- expect(described_class).to receive(:get).and_raise(SocketError)
-
- expect(described_class.try_get('/path')).to be_nil
- end
- end
-
- describe '.perform_request' do
- it 'calls Gitlab::LegacyHTTP.perform_request with default options' do
- expect(Gitlab::LegacyHTTP).to receive(:perform_request).with(Net::HTTP::Get, '/path', {})
-
- described_class.perform_request(Net::HTTP::Get, '/path', {})
- end
- end
- end
end
diff --git a/spec/lib/gitlab/github_import/page_counter_spec.rb b/spec/lib/gitlab/import/page_counter_spec.rb
index ddb62cc8fad..a7a4e301aa3 100644
--- a/spec/lib/gitlab/github_import/page_counter_spec.rb
+++ b/spec/lib/gitlab/import/page_counter_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::PageCounter, :clean_gitlab_redis_cache, feature_category: :importers do
- let(:project) { double(:project, id: 1) }
+RSpec.describe Gitlab::Import::PageCounter, :clean_gitlab_redis_cache, feature_category: :importers do
+ let(:project) { instance_double(Project, id: 1) }
let(:counter) { described_class.new(project, :issues) }
describe '#initialize' do
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 688487df778..8da05ed7b7e 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -439,6 +439,7 @@ builds:
- dast_scanner_profile
- job_annotations
- job_artifacts_annotations
+- project_mirror
bridges:
- user
- pipeline
@@ -500,6 +501,7 @@ protected_branches:
- push_access_levels
- unprotect_access_levels
- approval_project_rules
+- approval_project_rules_with_unique_policies
- external_status_checks
- required_code_owners_sections
protected_tags:
@@ -588,6 +590,7 @@ project:
- hangouts_chat_integration
- unify_circuit_integration
- buildkite_integration
+- diffblue_cover_integration
- bamboo_integration
- teamcity_integration
- pushover_integration
@@ -848,6 +851,7 @@ timelogs:
- merge_request
- user
- note
+- timelog_category
push_event_payload:
- event
issue_assignees:
@@ -1081,3 +1085,8 @@ approval_project_rules_users:
approval_project_rules_protected_branches:
- protected_branch
- approval_project_rule
+timelog_category:
+ - name
+ - description
+ - billable
+ - billing_rate
diff --git a/spec/lib/gitlab/import_export/group/relation_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/group/relation_tree_restorer_spec.rb
index 9852f6c9652..8ab99875a0a 100644
--- a/spec/lib/gitlab/import_export/group/relation_tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/group/relation_tree_restorer_spec.rb
@@ -52,10 +52,10 @@ RSpec.describe Gitlab::ImportExport::Group::RelationTreeRestorer, feature_catego
)
end
- subject { relation_tree_restorer.restore }
+ subject(:restore_relations) { relation_tree_restorer.restore }
it 'restores group tree' do
- expect(subject).to eq(true)
+ expect(restore_relations).to eq(true)
end
it 'logs top-level relation creation' do
@@ -64,7 +64,7 @@ RSpec.describe Gitlab::ImportExport::Group::RelationTreeRestorer, feature_catego
.with(hash_including(message: '[Project/Group Import] Created new object relation'))
.at_least(:once)
- subject
+ restore_relations
end
describe 'relation object saving' do
@@ -100,7 +100,7 @@ RSpec.describe Gitlab::ImportExport::Group::RelationTreeRestorer, feature_catego
error_messages: "Label can't be blank, Position can't be blank, and Position is not a number"
)
- subject
+ restore_relations
board = importable.boards.last
failure = importable.import_failures.first
@@ -115,6 +115,33 @@ RSpec.describe Gitlab::ImportExport::Group::RelationTreeRestorer, feature_catego
end
end
+ context 'when invalid relation object has a loggable external identifier' do
+ before do
+ allow(relation_reader)
+ .to receive(:consume_relation)
+ .with(importable_name, 'milestones')
+ .and_return([
+ [invalid_milestone, 0],
+ [invalid_milestone_with_no_iid, 1]
+ ])
+ end
+
+ let(:invalid_milestone) { build(:milestone, iid: 123, name: nil) }
+ let(:invalid_milestone_with_no_iid) { build(:milestone, iid: nil, name: nil) }
+
+ it 'logs invalid record with external identifier' do
+ restore_relations
+
+ iids_for_failures = importable.import_failures.collect { |f| [f.relation_key, f.external_identifiers] }
+ expected_iids = [
+ ["milestones", { "iid" => invalid_milestone.iid }],
+ ["milestones", {}]
+ ]
+
+ expect(iids_for_failures).to match_array(expected_iids)
+ end
+ end
+
context 'when relation object is persisted' do
before do
allow(relation_reader)
@@ -129,7 +156,7 @@ RSpec.describe Gitlab::ImportExport::Group::RelationTreeRestorer, feature_catego
it 'saves import failure with nested errors' do
label.priorities << [LabelPriority.new, LabelPriority.new]
- subject
+ restore_relations
failure = importable.import_failures.first
diff --git a/spec/lib/gitlab/import_export/import_failure_service_spec.rb b/spec/lib/gitlab/import_export/import_failure_service_spec.rb
index 30d16347828..9628e9fbf4d 100644
--- a/spec/lib/gitlab/import_export/import_failure_service_spec.rb
+++ b/spec/lib/gitlab/import_export/import_failure_service_spec.rb
@@ -21,7 +21,8 @@ RSpec.describe Gitlab::ImportExport::ImportFailureService, feature_category: :im
relation_key: relation_key,
relation_index: relation_index,
exception: exception,
- retry_count: retry_count)
+ retry_count: retry_count,
+ external_identifiers: { iid: 1234 })
end
before do
diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
index 14af3028a6e..d565f3f3150 100644
--- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
@@ -1114,9 +1114,10 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i
let(:user) { create(:user) }
let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') }
let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) }
+ let(:project_fixture) { 'with_invalid_records' }
before do
- setup_import_export_config('with_invalid_records')
+ setup_import_export_config(project_fixture)
setup_reader
subject
@@ -1142,6 +1143,21 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i
expect(import_failure.correlation_id_value).not_to be_empty
expect(import_failure.created_at).to be_present
end
+
+ context 'when there are a mix of invalid milestones and issues with IIDs' do
+ let(:project_fixture) { 'with_invalid_issues_and_milestones' }
+
+ it 'tracks the relation IID if present' do
+ iids_for_failures = project.import_failures.collect { |f| [f.relation_key, f.external_identifiers] }
+ expected_iids = [
+ ["milestones", { "iid" => 1 }],
+ ["issues", { "iid" => 9 }],
+ ["issues", {}]
+ ]
+
+ expect(iids_for_failures).to match_array(expected_iids)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 3efa33d8879..73b945d4274 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -765,6 +765,7 @@ Timelog:
- created_at
- updated_at
- summary
+- timelog_category_id
ProjectAutoDevops:
- id
- enabled
@@ -1087,3 +1088,9 @@ ApprovalProjectRulesProtectedBranch:
- branch_name
WorkItems::Type:
- base_type
+TimeTracking::TimelogCategories:
+ - id
+ - name
+ - description
+ - billable
+ - billing_rate
diff --git a/spec/lib/gitlab/instrumentation/redis_client_middleware_spec.rb b/spec/lib/gitlab/instrumentation/redis_client_middleware_spec.rb
new file mode 100644
index 00000000000..eca75d93c80
--- /dev/null
+++ b/spec/lib/gitlab/instrumentation/redis_client_middleware_spec.rb
@@ -0,0 +1,224 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require 'rspec-parameterized'
+require 'support/helpers/rails_helpers'
+
+RSpec.describe Gitlab::Instrumentation::RedisClientMiddleware, :request_store, feature_category: :scalability do
+ using RSpec::Parameterized::TableSyntax
+ include RedisHelpers
+
+ let_it_be(:redis_store_class) { define_helper_redis_store_class }
+ let_it_be(:redis_client) { RedisClient.new(redis_store_class.redis_client_params) }
+
+ before do
+ redis_client.call("flushdb")
+ end
+
+ describe 'read and write' do
+ where(:setup, :command, :expect_write, :expect_read) do
+ # The response is 'OK', the request size is the combined size of array
+ # elements. Exercise counting of a status reply.
+ [] | [:set, 'foo', 'bar'] | (3 + 3 + 3) | 2
+
+ # The response is 1001, so 4 bytes. Exercise counting an integer reply.
+ [[:set, 'foobar', 1000]] | [:incr, 'foobar'] | (4 + 6) | 4
+
+ # Exercise counting empty multi bulk reply. Returns an empty hash `{}`
+ [] | [:hgetall, 'foobar'] | (7 + 6) | 2
+
+ # Hgetall response length is combined length of keys and values in the
+ # hash. Exercises counting of a multi bulk reply
+ # Returns `{"field"=>"hello world"}`, 5 for field, 11 for hello world, 8 for {, }, 4 "s, =, >
+ [[:hset, 'myhash', 'field', 'hello world']] | [:hgetall, 'myhash'] | (7 + 6) | (5 + 11 + 8)
+
+ # Exercise counting of a bulk reply
+ [[:set, 'foo', 'bar' * 100]] | [:get, 'foo'] | (3 + 3) | (3 * 100)
+
+ # Nested array response: [['foo', 0.0], ['bar', 1.0]]. Returns scores as float.
+ [[:zadd, 'myset', 0, 'foo'],
+ [:zadd, 'myset', 1, 'bar']] | [:zrange, 'myset', 0, -1, 'withscores'] | (6 + 5 + 1 + 2 + 10) | (3 + 3 + 3 + 3)
+ end
+
+ with_them do
+ it 'counts bytes read and written' do
+ setup.each { |cmd| redis_client.call(*cmd) }
+ RequestStore.clear!
+ redis_client.call(*command)
+
+ expect(Gitlab::Instrumentation::Redis.read_bytes).to eq(expect_read)
+ expect(Gitlab::Instrumentation::Redis.write_bytes).to eq(expect_write)
+ end
+ end
+ end
+
+ describe 'counting' do
+ let(:instrumentation_class) { redis_store_class.instrumentation_class }
+
+ it 'counts successful requests' do
+ expect(instrumentation_class).to receive(:instance_count_request).with(1).and_call_original
+
+ redis_client.call(:get, 'foobar')
+ end
+
+ it 'counts successful pipelined requests' do
+ expect(instrumentation_class).to receive(:instance_count_request).with(2).and_call_original
+ expect(instrumentation_class).to receive(:instance_count_pipelined_request).with(2).and_call_original
+
+ redis_client.pipelined do |pipeline|
+ pipeline.call(:get, '{foobar}buz')
+ pipeline.call(:get, '{foobar}baz')
+ end
+ end
+
+ context 'when encountering exceptions' do
+ before do
+ allow(redis_client.instance_variable_get(:@raw_connection)).to receive(:call).and_raise(
+ RedisClient::ConnectionError, 'Connection was closed or lost')
+ end
+
+ it 'counts exception' do
+ expect(instrumentation_class).to receive(:instance_count_exception)
+ .with(instance_of(RedisClient::ConnectionError)).and_call_original
+ expect(instrumentation_class).to receive(:log_exception)
+ .with(instance_of(RedisClient::ConnectionError)).and_call_original
+ expect(instrumentation_class).to receive(:instance_count_request).and_call_original
+
+ expect do
+ redis_client.call(:auth, 'foo', 'bar')
+ end.to raise_error(RedisClient::Error)
+ end
+ end
+
+ context 'in production environment' do
+ before do
+ stub_rails_env('production') # to avoid raising CrossSlotError
+ end
+
+ it 'counts disallowed cross-slot requests' do
+ expect(instrumentation_class).to receive(:increment_cross_slot_request_count).and_call_original
+ expect(instrumentation_class).not_to receive(:increment_allowed_cross_slot_request_count).and_call_original
+
+ redis_client.call(:mget, 'foo', 'bar')
+ end
+
+ it 'does not count allowed cross-slot requests' do
+ expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
+ expect(instrumentation_class).to receive(:increment_allowed_cross_slot_request_count).and_call_original
+
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ redis_client.call(:mget, 'foo', 'bar')
+ end
+ end
+
+ it 'does not count allowed non-cross-slot requests' do
+ expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
+ expect(instrumentation_class).not_to receive(:increment_allowed_cross_slot_request_count).and_call_original
+
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ redis_client.call(:mget, 'bar')
+ end
+ end
+
+ it 'skips count for non-cross-slot requests' do
+ expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
+ expect(instrumentation_class).not_to receive(:increment_allowed_cross_slot_request_count).and_call_original
+
+ redis_client.call(:mget, '{foo}bar', '{foo}baz')
+ end
+ end
+
+ context 'without active RequestStore' do
+ before do
+ ::RequestStore.end!
+ end
+
+ it 'still runs cross-slot validation' do
+ expect do
+ redis_client.call('mget', 'foo', 'bar')
+ end.to raise_error(instance_of(Gitlab::Instrumentation::RedisClusterValidator::CrossSlotError))
+ end
+ end
+ end
+
+ describe 'latency' do
+ let(:instrumentation_class) { redis_store_class.instrumentation_class }
+
+ describe 'commands in the apdex' do
+ where(:command) do
+ [
+ [[:get, 'foobar']],
+ [%w[GET foobar]]
+ ]
+ end
+
+ with_them do
+ it 'measures requests we want in the apdex' do
+ expect(instrumentation_class).to receive(:instance_observe_duration).with(a_value > 0)
+ .and_call_original
+
+ redis_client.call(*command)
+ end
+ end
+
+ context 'with pipelined commands' do
+ it 'measures requests that do not have blocking commands' do
+ expect(instrumentation_class).to receive(:instance_observe_duration).twice.with(a_value > 0)
+ .and_call_original
+
+ redis_client.pipelined do |pipeline|
+ pipeline.call(:get, '{foobar}buz')
+ pipeline.call(:get, '{foobar}baz')
+ end
+ end
+
+ it 'raises error when keys are not from the same slot' do
+ expect do
+ redis_client.pipelined do |pipeline|
+ pipeline.call(:get, 'foo')
+ pipeline.call(:get, 'bar')
+ end
+ end.to raise_error(instance_of(Gitlab::Instrumentation::RedisClusterValidator::CrossSlotError))
+ end
+ end
+ end
+
+ describe 'commands not in the apdex' do
+ where(:setup, :command) do
+ [['rpush', 'foobar', 1]] | ['brpop', 'foobar', 0]
+ [['rpush', 'foobar', 1]] | ['blpop', 'foobar', 0]
+ [['rpush', '{abc}foobar', 1]] | ['brpoplpush', '{abc}foobar', '{abc}bazqux', 0]
+ [['rpush', '{abc}foobar', 1]] | ['brpoplpush', '{abc}foobar', '{abc}bazqux', 0]
+ [['zadd', 'foobar', 1, 'a']] | ['bzpopmin', 'foobar', 0]
+ [['zadd', 'foobar', 1, 'a']] | ['bzpopmax', 'foobar', 0]
+ [['xadd', 'mystream', 1, 'myfield', 'mydata']] | ['xread', 'block', 1, 'streams', 'mystream', '0-0']
+ [['xadd', 'foobar', 1, 'myfield', 'mydata'],
+ ['xgroup', 'create', 'foobar', 'mygroup',
+ 0]] | ['xreadgroup', 'group', 'mygroup', 'myconsumer', 'block', 1, 'streams', 'foobar', '0-0']
+ [] | ['command']
+ end
+
+ with_them do
+ it 'skips requests we do not want in the apdex' do
+ setup.each { |cmd| redis_client.call(*cmd) }
+
+ expect(instrumentation_class).not_to receive(:instance_observe_duration)
+
+ redis_client.call(*command)
+ end
+ end
+
+ context 'with pipelined commands' do
+ it 'skips requests that have blocking commands' do
+ expect(instrumentation_class).not_to receive(:instance_observe_duration)
+
+ redis_client.pipelined do |pipeline|
+ pipeline.call(:get, '{foobar}buz')
+ pipeline.call(:rpush, '{foobar}baz', 1)
+ pipeline.call(:brpop, '{foobar}baz', 0)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/legacy_github_import/comment_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/comment_formatter_spec.rb
index 8d6415b8179..8b6d628833e 100644
--- a/spec/lib/gitlab/legacy_github_import/comment_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/comment_formatter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::LegacyGithubImport::CommentFormatter do
+RSpec.describe Gitlab::LegacyGithubImport::CommentFormatter, feature_category: :importers do
let_it_be(:project) { create(:project) }
let(:client) { double }
let(:octocat) { { id: 123456, login: 'octocat', email: 'octocat@example.com' } }
@@ -76,12 +76,6 @@ RSpec.describe Gitlab::LegacyGithubImport::CommentFormatter do
context 'when author is a GitLab user' do
let(:raw) { base.merge(user: octocat) }
- it 'returns GitLab user id associated with GitHub id as author_id' do
- gl_user = create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
-
- expect(comment.attributes.fetch(:author_id)).to eq gl_user.id
- end
-
it 'returns GitLab user id associated with GitHub email as author_id' do
gl_user = create(:user, email: octocat[:email])
@@ -89,7 +83,7 @@ RSpec.describe Gitlab::LegacyGithubImport::CommentFormatter do
end
it 'returns note without created at tag line' do
- create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
+ create(:user, email: octocat[:email])
expect(comment.attributes.fetch(:note)).to eq("I'm having a problem with this.")
end
diff --git a/spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb
index d3548fecbcd..9baf234b14b 100644
--- a/spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::LegacyGithubImport::IssueFormatter do
+RSpec.describe Gitlab::LegacyGithubImport::IssueFormatter, feature_category: :importers do
let_it_be(:project) { create(:project, namespace: create(:namespace, path: 'octocat')) }
let(:client) { double }
let(:octocat) { { id: 123456, login: 'octocat', email: 'octocat@example.com' } }
@@ -82,12 +82,6 @@ RSpec.describe Gitlab::LegacyGithubImport::IssueFormatter do
expect(issue.attributes.fetch(:assignee_ids)).to be_empty
end
- it 'returns GitLab user id associated with GitHub id as assignee_id' do
- gl_user = create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
-
- expect(issue.attributes.fetch(:assignee_ids)).to eq [gl_user.id]
- end
-
it 'returns GitLab user id associated with GitHub email as assignee_id' do
gl_user = create(:user, email: octocat[:email])
@@ -117,12 +111,6 @@ RSpec.describe Gitlab::LegacyGithubImport::IssueFormatter do
expect(issue.attributes.fetch(:author_id)).to eq project.creator_id
end
- it 'returns GitLab user id associated with GitHub id as author_id' do
- gl_user = create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
-
- expect(issue.attributes.fetch(:author_id)).to eq gl_user.id
- end
-
it 'returns GitLab user id associated with GitHub email as author_id' do
gl_user = create(:user, email: octocat[:email])
@@ -130,7 +118,7 @@ RSpec.describe Gitlab::LegacyGithubImport::IssueFormatter do
end
it 'returns description without created at tag line' do
- create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
+ create(:user, email: octocat[:email])
expect(issue.attributes.fetch(:description)).to eq("I'm having a problem with this.")
end
diff --git a/spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb
index 90469693820..1555e3e0d4c 100644
--- a/spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
+RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter, feature_category: :importers do
let_it_be(:project) { create(:project, :repository) }
let(:client) { double }
let(:source_sha) { create(:commit, project: project).id }
@@ -136,12 +136,6 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
expect(pull_request.attributes.fetch(:assignee_id)).to be_nil
end
- it 'returns GitLab user id associated with GitHub id as assignee_id' do
- gl_user = create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
-
- expect(pull_request.attributes.fetch(:assignee_id)).to eq gl_user.id
- end
-
it 'returns GitLab user id associated with GitHub email as assignee_id' do
gl_user = create(:user, email: octocat[:email])
@@ -156,12 +150,6 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
expect(pull_request.attributes.fetch(:author_id)).to eq project.creator_id
end
- it 'returns GitLab user id associated with GitHub id as author_id' do
- gl_user = create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
-
- expect(pull_request.attributes.fetch(:author_id)).to eq gl_user.id
- end
-
it 'returns GitLab user id associated with GitHub email as author_id' do
gl_user = create(:user, email: octocat[:email])
@@ -169,7 +157,7 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
end
it 'returns description without created at tag line' do
- create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
+ create(:user, email: octocat[:email])
expect(pull_request.attributes.fetch(:description)).to eq('Please pull these awesome changes')
end
diff --git a/spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb
index 0844ab7eccc..d387d79aa30 100644
--- a/spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::LegacyGithubImport::UserFormatter do
+RSpec.describe Gitlab::LegacyGithubImport::UserFormatter, feature_category: :importers do
let(:client) { double }
let(:octocat) { { id: 123456, login: 'octocat', email: 'octocat@example.com' } }
let(:gitea_ghost) { { id: -1, login: 'Ghost', email: '' } }
@@ -15,12 +15,6 @@ RSpec.describe Gitlab::LegacyGithubImport::UserFormatter do
end
context 'when GitHub user is a GitLab user' do
- it 'return GitLab user id when user associated their account with GitHub' do
- gl_user = create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
-
- expect(user.gitlab_id).to eq gl_user.id
- end
-
it 'returns GitLab user id when user confirmed primary email matches GitHub email' do
gl_user = create(:user, email: octocat[:email])
diff --git a/spec/lib/gitlab/legacy_http_spec.rb b/spec/lib/gitlab/legacy_http_spec.rb
deleted file mode 100644
index 07a30b194b6..00000000000
--- a/spec/lib/gitlab/legacy_http_spec.rb
+++ /dev/null
@@ -1,448 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::LegacyHTTP, feature_category: :shared do
- include StubRequests
-
- let(:default_options) { Gitlab::HTTP::DEFAULT_TIMEOUT_OPTIONS }
-
- context 'when allow_local_requests' do
- it 'sends the request to the correct URI' do
- stub_full_request('https://example.org:8080', ip_address: '8.8.8.8').to_return(status: 200)
-
- described_class.get('https://example.org:8080', allow_local_requests: false)
-
- expect(WebMock).to have_requested(:get, 'https://8.8.8.8:8080').once
- end
- end
-
- context 'when not allow_local_requests' do
- it 'sends the request to the correct URI' do
- stub_full_request('https://example.org:8080')
-
- described_class.get('https://example.org:8080', allow_local_requests: true)
-
- expect(WebMock).to have_requested(:get, 'https://8.8.8.9:8080').once
- end
- end
-
- context 'when reading the response is too slow' do
- before_all do
- # Override Net::HTTP to add a delay between sending each response chunk
- mocked_http = Class.new(Net::HTTP) do
- def request(*)
- super do |response|
- response.instance_eval do
- def read_body(*)
- mock_stream = @body.split(' ')
- mock_stream.each do |fragment|
- sleep 0.002.seconds
-
- yield fragment if block_given?
- end
-
- @body
- end
- end
-
- yield response if block_given?
-
- response
- end
- end
- end
-
- @original_net_http = Net.send(:remove_const, :HTTP)
- @webmock_net_http = WebMock::HttpLibAdapters::NetHttpAdapter.instance_variable_get(:@webMockNetHTTP)
-
- Net.send(:const_set, :HTTP, mocked_http)
- WebMock::HttpLibAdapters::NetHttpAdapter.instance_variable_set(:@webMockNetHTTP, mocked_http)
-
- # Reload Gitlab::NetHttpAdapter
- Gitlab.send(:remove_const, :NetHttpAdapter)
- load "#{Rails.root}/lib/gitlab/net_http_adapter.rb"
- end
-
- before do
- stub_const("Gitlab::HTTP::DEFAULT_READ_TOTAL_TIMEOUT", 0.001.seconds)
-
- WebMock.stub_request(:post, /.*/).to_return do
- { body: "chunk-1 chunk-2", status: 200 }
- end
- end
-
- after(:all) do
- Net.send(:remove_const, :HTTP)
- Net.send(:const_set, :HTTP, @original_net_http)
- WebMock::HttpLibAdapters::NetHttpAdapter.instance_variable_set(:@webMockNetHTTP, @webmock_net_http)
-
- # Reload Gitlab::NetHttpAdapter
- Gitlab.send(:remove_const, :NetHttpAdapter)
- load "#{Rails.root}/lib/gitlab/net_http_adapter.rb"
- end
-
- let(:options) { {} }
-
- subject(:request_slow_responder) { described_class.post('http://example.org', **options) }
-
- it 'raises an error' do
- expect { request_slow_responder }.to raise_error(
- Gitlab::HTTP::ReadTotalTimeout, /Request timed out after ?([0-9]*[.])?[0-9]+ seconds/)
- end
-
- context 'and timeout option is greater than DEFAULT_READ_TOTAL_TIMEOUT' do
- let(:options) { { timeout: 10.seconds } }
-
- it 'does not raise an error' do
- expect { request_slow_responder }.not_to raise_error
- end
- end
-
- context 'and stream_body option is truthy' do
- let(:options) { { stream_body: true } }
-
- it 'does not raise an error' do
- expect { request_slow_responder }.not_to raise_error
- end
- end
- end
-
- it 'calls a block' do
- WebMock.stub_request(:post, /.*/)
-
- expect { |b| described_class.post('http://example.org', &b) }.to yield_with_args
- end
-
- describe 'allow_local_requests_from_web_hooks_and_services is' do
- before do
- WebMock.stub_request(:get, /.*/).to_return(status: 200, body: 'Success')
- end
-
- context 'disabled' do
- before do
- allow(Gitlab::CurrentSettings).to receive(:allow_local_requests_from_web_hooks_and_services?).and_return(false)
- end
-
- it 'deny requests to localhost' do
- expect { described_class.get('http://localhost:3003') }.to raise_error(Gitlab::HTTP::BlockedUrlError)
- end
-
- it 'deny requests to private network' do
- expect { described_class.get('http://192.168.1.2:3003') }.to raise_error(Gitlab::HTTP::BlockedUrlError)
- end
-
- context 'if allow_local_requests set to true' do
- it 'override the global value and allow requests to localhost or private network' do
- stub_full_request('http://localhost:3003')
-
- expect { described_class.get('http://localhost:3003', allow_local_requests: true) }.not_to raise_error
- end
- end
- end
-
- context 'enabled' do
- before do
- allow(Gitlab::CurrentSettings).to receive(:allow_local_requests_from_web_hooks_and_services?).and_return(true)
- end
-
- it 'allow requests to localhost' do
- stub_full_request('http://localhost:3003')
-
- expect { described_class.get('http://localhost:3003') }.not_to raise_error
- end
-
- it 'allow requests to private network' do
- expect { described_class.get('http://192.168.1.2:3003') }.not_to raise_error
- end
-
- context 'if allow_local_requests set to false' do
- it 'override the global value and ban requests to localhost or private network' do
- expect { described_class.get('http://localhost:3003', allow_local_requests: false) }.to raise_error(
- Gitlab::HTTP::BlockedUrlError)
- end
- end
- end
- end
-
- describe 'handle redirect loops' do
- before do
- stub_full_request("http://example.org", method: :any).to_raise(
- HTTParty::RedirectionTooDeep.new("Redirection Too Deep"))
- end
-
- it 'handles GET requests' do
- expect { described_class.get('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
- end
-
- it 'handles POST requests' do
- expect { described_class.post('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
- end
-
- it 'handles PUT requests' do
- expect { described_class.put('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
- end
-
- it 'handles DELETE requests' do
- expect { described_class.delete('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
- end
-
- it 'handles HEAD requests' do
- expect { described_class.head('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
- end
- end
-
- describe 'setting default timeouts' do
- before do
- stub_full_request('http://example.org', method: :any)
- end
-
- context 'when no timeouts are set' do
- it 'sets default open and read and write timeouts' do
- expect(described_class).to receive(:httparty_perform_request).with(
- Net::HTTP::Get, 'http://example.org', default_options
- ).and_call_original
-
- described_class.get('http://example.org')
- end
- end
-
- context 'when :timeout is set' do
- it 'does not set any default timeouts' do
- expect(described_class).to receive(:httparty_perform_request).with(
- Net::HTTP::Get, 'http://example.org', { timeout: 1 }
- ).and_call_original
-
- described_class.get('http://example.org', { timeout: 1 })
- end
- end
-
- context 'when :open_timeout is set' do
- it 'only sets default read and write timeout' do
- expect(described_class).to receive(:httparty_perform_request).with(
- Net::HTTP::Get, 'http://example.org', default_options.merge(open_timeout: 1)
- ).and_call_original
-
- described_class.get('http://example.org', open_timeout: 1)
- end
- end
-
- context 'when :read_timeout is set' do
- it 'only sets default open and write timeout' do
- expect(described_class).to receive(:httparty_perform_request).with(
- Net::HTTP::Get, 'http://example.org', default_options.merge(read_timeout: 1)
- ).and_call_original
-
- described_class.get('http://example.org', read_timeout: 1)
- end
- end
-
- context 'when :write_timeout is set' do
- it 'only sets default open and read timeout' do
- expect(described_class).to receive(:httparty_perform_request).with(
- Net::HTTP::Put, 'http://example.org', default_options.merge(write_timeout: 1)
- ).and_call_original
-
- described_class.put('http://example.org', write_timeout: 1)
- end
- end
- end
-
- describe '.try_get' do
- let(:path) { 'http://example.org' }
-
- let(:extra_log_info_proc) do
- proc do |error, url, options|
- { klass: error.class, url: url, options: options }
- end
- end
-
- let(:request_options) do
- default_options.merge({
- verify: false,
- basic_auth: { username: 'user', password: 'pass' }
- })
- end
-
- Gitlab::HTTP::HTTP_ERRORS.each do |exception_class|
- context "with #{exception_class}" do
- let(:klass) { exception_class }
-
- context 'with path' do
- before do
- expect(described_class).to receive(:httparty_perform_request)
- .with(Net::HTTP::Get, path, default_options)
- .and_raise(klass)
- end
-
- it 'handles requests without extra_log_info' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), {})
-
- expect(described_class.try_get(path)).to be_nil
- end
-
- it 'handles requests with extra_log_info as hash' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), { a: :b })
-
- expect(described_class.try_get(path, extra_log_info: { a: :b })).to be_nil
- end
-
- it 'handles requests with extra_log_info as proc' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), { url: path, klass: klass, options: {} })
-
- expect(described_class.try_get(path, extra_log_info: extra_log_info_proc)).to be_nil
- end
- end
-
- context 'with path and options' do
- before do
- expect(described_class).to receive(:httparty_perform_request)
- .with(Net::HTTP::Get, path, request_options)
- .and_raise(klass)
- end
-
- it 'handles requests without extra_log_info' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), {})
-
- expect(described_class.try_get(path, request_options)).to be_nil
- end
-
- it 'handles requests with extra_log_info as hash' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), { a: :b })
-
- expect(described_class.try_get(path, **request_options, extra_log_info: { a: :b })).to be_nil
- end
-
- it 'handles requests with extra_log_info as proc' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), { klass: klass, url: path, options: request_options })
-
- expect(described_class.try_get(path, **request_options, extra_log_info: extra_log_info_proc)).to be_nil
- end
- end
-
- context 'with path, options, and block' do
- let(:block) do
- proc {}
- end
-
- before do
- expect(described_class).to receive(:httparty_perform_request)
- .with(Net::HTTP::Get, path, request_options, &block)
- .and_raise(klass)
- end
-
- it 'handles requests without extra_log_info' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), {})
-
- expect(described_class.try_get(path, request_options, &block)).to be_nil
- end
-
- it 'handles requests with extra_log_info as hash' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), { a: :b })
-
- expect(described_class.try_get(path, **request_options, extra_log_info: { a: :b }, &block)).to be_nil
- end
-
- it 'handles requests with extra_log_info as proc' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), { klass: klass, url: path, options: request_options })
-
- expect(
- described_class.try_get(path, **request_options, extra_log_info: extra_log_info_proc, &block)
- ).to be_nil
- end
- end
- end
- end
- end
-
- describe 'silent mode', feature_category: :geo_replication do
- before do
- stub_full_request("http://example.org", method: :any)
- stub_application_setting(silent_mode_enabled: silent_mode)
- end
-
- context 'when silent mode is enabled' do
- let(:silent_mode) { true }
-
- it 'allows GET requests' do
- expect { described_class.get('http://example.org') }.not_to raise_error
- end
-
- it 'allows HEAD requests' do
- expect { described_class.head('http://example.org') }.not_to raise_error
- end
-
- it 'allows OPTIONS requests' do
- expect { described_class.options('http://example.org') }.not_to raise_error
- end
-
- it 'blocks POST requests' do
- expect { described_class.post('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
- end
-
- it 'blocks PUT requests' do
- expect { described_class.put('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
- end
-
- it 'blocks DELETE requests' do
- expect { described_class.delete('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
- end
-
- it 'logs blocked requests' do
- expect(::Gitlab::AppJsonLogger).to receive(:info).with(
- message: "Outbound HTTP request blocked",
- outbound_http_request_method: 'Net::HTTP::Post',
- silent_mode_enabled: true
- )
-
- expect { described_class.post('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
- end
- end
-
- context 'when silent mode is disabled' do
- let(:silent_mode) { false }
-
- it 'allows GET requests' do
- expect { described_class.get('http://example.org') }.not_to raise_error
- end
-
- it 'allows HEAD requests' do
- expect { described_class.head('http://example.org') }.not_to raise_error
- end
-
- it 'allows OPTIONS requests' do
- expect { described_class.options('http://example.org') }.not_to raise_error
- end
-
- it 'blocks POST requests' do
- expect { described_class.post('http://example.org') }.not_to raise_error
- end
-
- it 'blocks PUT requests' do
- expect { described_class.put('http://example.org') }.not_to raise_error
- end
-
- it 'blocks DELETE requests' do
- expect { described_class.delete('http://example.org') }.not_to raise_error
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/memory/watchdog/handlers/sidekiq_handler_spec.rb b/spec/lib/gitlab/memory/watchdog/handlers/sidekiq_handler_spec.rb
index 68dd784fb7e..1c62f5679d0 100644
--- a/spec/lib/gitlab/memory/watchdog/handlers/sidekiq_handler_spec.rb
+++ b/spec/lib/gitlab/memory/watchdog/handlers/sidekiq_handler_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Gitlab::Memory::Watchdog::Handlers::SidekiqHandler, feature_categ
before do
allow(Gitlab::Metrics::System).to receive(:monotonic_time)
- .and_return(0, 1, shutdown_timeout_seconds, 0, 1, Sidekiq[:timeout] + 2)
+ .and_return(0, 1, shutdown_timeout_seconds, 0, 1, Sidekiq.default_configuration[:timeout] + 2)
allow(Process).to receive(:kill)
allow(::Sidekiq).to receive(:logger).and_return(logger)
allow(logger).to receive(:warn)
@@ -81,7 +81,7 @@ RSpec.describe Gitlab::Memory::Watchdog::Handlers::SidekiqHandler, feature_categ
let(:signal_params) do
[
[:TSTP, pid, 'stop fetching new jobs', shutdown_timeout_seconds],
- [:TERM, pid, 'gracefully shut down', Sidekiq[:timeout] + 2]
+ [:TERM, pid, 'gracefully shut down', Sidekiq.default_configuration[:timeout] + 2]
]
end
@@ -95,7 +95,7 @@ RSpec.describe Gitlab::Memory::Watchdog::Handlers::SidekiqHandler, feature_categ
let(:signal_params) do
[
[:TSTP, pid, 'stop fetching new jobs', shutdown_timeout_seconds],
- [:TERM, pid, 'gracefully shut down', Sidekiq[:timeout] + 2],
+ [:TERM, pid, 'gracefully shut down', Sidekiq.default_configuration[:timeout] + 2],
[:KILL, kill_pid, 'hard shut down', nil]
]
end
diff --git a/spec/lib/gitlab/namespaced_session_store_spec.rb b/spec/lib/gitlab/namespaced_session_store_spec.rb
index 2c258ce3da6..4e9b35e6859 100644
--- a/spec/lib/gitlab/namespaced_session_store_spec.rb
+++ b/spec/lib/gitlab/namespaced_session_store_spec.rb
@@ -8,19 +8,28 @@ RSpec.describe Gitlab::NamespacedSessionStore do
context 'current session' do
subject { described_class.new(key) }
- it 'stores data under the specified key' do
- Gitlab::Session.with_session({}) do
- subject[:new_data] = 123
-
- expect(Thread.current[:session_storage][key]).to eq(new_data: 123)
- end
- end
-
it 'retrieves data from the given key' do
Thread.current[:session_storage] = { key => { existing_data: 123 } }
expect(subject[:existing_data]).to eq 123
end
+
+ context 'when namespace key does not exist' do
+ before do
+ Thread.current[:session_storage] = {}
+ end
+
+ it 'does not create namespace key when reading a value' do
+ expect(subject[:non_existent_key]).to eq(nil)
+ expect(Thread.current[:session_storage]).to eq({})
+ end
+
+ it 'stores data under the specified key' do
+ subject[:new_data] = 123
+
+ expect(Thread.current[:session_storage][key]).to eq(new_data: 123)
+ end
+ end
end
context 'passed in session' do
diff --git a/spec/lib/gitlab/pagination/keyset/iterator_spec.rb b/spec/lib/gitlab/pagination/keyset/iterator_spec.rb
index afaad48d363..326f3c6d344 100644
--- a/spec/lib/gitlab/pagination/keyset/iterator_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/iterator_spec.rb
@@ -46,12 +46,6 @@ RSpec.describe Gitlab::Pagination::Keyset::Iterator do
end
end
- it 'raises error when ordering configuration cannot be automatically determined' do
- expect do
- described_class.new(scope: MergeRequestDiffCommit.order(:merge_request_diff_id, :relative_order))
- end.to raise_error /The order on the scope does not support keyset pagination/
- end
-
it 'accepts a custom batch size' do
count = 0
diff --git a/spec/lib/gitlab/pagination/keyset/paginator_spec.rb b/spec/lib/gitlab/pagination/keyset/paginator_spec.rb
index 230ac01af31..16c5b3ab748 100644
--- a/spec/lib/gitlab/pagination/keyset/paginator_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/paginator_spec.rb
@@ -3,11 +3,11 @@
require 'spec_helper'
RSpec.describe Gitlab::Pagination::Keyset::Paginator do
- let_it_be(:project_1) { create(:project, created_at: 10.weeks.ago) }
- let_it_be(:project_2) { create(:project, created_at: 2.weeks.ago) }
- let_it_be(:project_3) { create(:project, created_at: 3.weeks.ago) }
- let_it_be(:project_4) { create(:project, created_at: 5.weeks.ago) }
- let_it_be(:project_5) { create(:project, created_at: 2.weeks.ago) }
+ let_it_be(:project_1) { create(:project, :public, name: 'Project A', created_at: 10.weeks.ago) }
+ let_it_be(:project_2) { create(:project, :public, name: 'Project E', created_at: 2.weeks.ago) }
+ let_it_be(:project_3) { create(:project, :private, name: 'Project C', created_at: 3.weeks.ago) }
+ let_it_be(:project_4) { create(:project, :private, name: 'Project B', created_at: 5.weeks.ago) }
+ let_it_be(:project_5) { create(:project, :private, name: 'Project B', created_at: 2.weeks.ago) }
describe 'pagination' do
let(:per_page) { 10 }
@@ -98,6 +98,13 @@ RSpec.describe Gitlab::Pagination::Keyset::Paginator do
end
end
+ context 'when the relation is ordered by more than 2 columns' do
+ let(:scope) { Project.order(visibility_level: :asc, name: :asc, id: :asc) }
+ let(:expected_order) { [project_4, project_5, project_3, project_1, project_2] }
+
+ it { expect(paginator.records).to eq(expected_order) }
+ end
+
describe 'default keyset direction parameter' do
let(:cursor_converter_class) { Gitlab::Pagination::Keyset::Paginator::Base64CursorConverter }
let(:per_page) { 2 }
@@ -110,14 +117,6 @@ RSpec.describe Gitlab::Pagination::Keyset::Paginator do
end
end
- context 'when unsupported order is given' do
- it 'raises error' do
- scope = Project.order(path: :asc, name: :asc, id: :desc) # Cannot build 3 column order automatically
-
- expect { scope.keyset_paginate }.to raise_error(/does not support keyset pagination/)
- end
- end
-
context 'when use_union_optimization option is true and ordering by two columns' do
let(:scope) { Project.order(name: :asc, id: :desc) }
diff --git a/spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb b/spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb
index e85b0354ff6..fd38fff2b81 100644
--- a/spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb
@@ -146,7 +146,7 @@ RSpec.describe Gitlab::Pagination::Keyset::SimpleOrderBuilder do
context 'when more than 2 columns are given for the order' do
let(:scope) { Project.order(created_at: :asc, updated_at: :desc, id: :asc) }
- it { is_expected.to eq(false) }
+ it { is_expected.to eq(true) }
end
end
end
diff --git a/spec/lib/gitlab/quick_actions/extractor_spec.rb b/spec/lib/gitlab/quick_actions/extractor_spec.rb
index bb0adbc87f1..ef76b1e5fdf 100644
--- a/spec/lib/gitlab/quick_actions/extractor_spec.rb
+++ b/spec/lib/gitlab/quick_actions/extractor_spec.rb
@@ -185,21 +185,21 @@ RSpec.describe Gitlab::QuickActions::Extractor, feature_category: :team_planning
context 'at the start of content' do
it_behaves_like 'command with a single argument' do
let(:original_msg) { "/assign @joe\nworld" }
- let(:final_msg) { "\n/assign @joe\n\nworld" }
+ let(:final_msg) { "<p>/assign @joe</p>\nworld" }
end
end
context 'in the middle of content' do
it_behaves_like 'command with a single argument' do
let(:original_msg) { "hello\n/assign @joe\nworld" }
- let(:final_msg) { "hello\n\n/assign @joe\n\nworld" }
+ let(:final_msg) { "hello\n<p>/assign @joe</p>\nworld" }
end
end
context 'at the end of content' do
it_behaves_like 'command with a single argument' do
let(:original_msg) { "hello\n/assign @joe" }
- let(:final_msg) { "hello\n\n/assign @joe" }
+ let(:final_msg) { "hello\n<p>/assign @joe</p>" }
end
end
end
@@ -282,7 +282,7 @@ RSpec.describe Gitlab::QuickActions::Extractor, feature_category: :team_planning
msg, commands = extractor.extract_commands(msg)
expect(commands).to match_array [['reopen'], ['substitution', 'wow this is a thing.']]
- expect(msg).to eq "hello\nworld\n\n/reopen\n\nfoo"
+ expect(msg).to eq "hello\nworld\n<p>/reopen</p>\nfoo"
end
it 'extracts multiple commands' do
diff --git a/spec/lib/gitlab/redis/cluster_util_spec.rb b/spec/lib/gitlab/redis/cluster_util_spec.rb
index 3993004518d..f167065fd3f 100644
--- a/spec/lib/gitlab/redis/cluster_util_spec.rb
+++ b/spec/lib/gitlab/redis/cluster_util_spec.rb
@@ -29,10 +29,15 @@ RSpec.describe Gitlab::Redis::ClusterUtil, feature_category: :scalability do
with_them do
it 'returns expected value' do
- primary_store = pri_store == :cluster ? ::Redis.new(cluster: ['redis://localhost:6000']) : ::Redis.new
- secondary_store = sec_store == :cluster ? ::Redis.new(cluster: ['redis://localhost:6000']) : ::Redis.new
- multistore = Gitlab::Redis::MultiStore.new(primary_store, secondary_store, 'teststore')
- expect(described_class.cluster?(multistore)).to eq(expected_val)
+ primary_redis = pri_store == :cluster ? ::Redis.new(cluster: ['redis://localhost:6000']) : ::Redis.new
+ secondary_redis = sec_store == :cluster ? ::Redis.new(cluster: ['redis://localhost:6000']) : ::Redis.new
+ primary_pool = ConnectionPool.new { primary_redis }
+ secondary_pool = ConnectionPool.new { secondary_redis }
+ multistore = Gitlab::Redis::MultiStore.new(primary_pool, secondary_pool, 'teststore')
+
+ multistore.with_borrowed_connection do
+ expect(described_class.cluster?(multistore)).to eq(expected_val)
+ end
end
end
end
diff --git a/spec/lib/gitlab/redis/cross_slot_spec.rb b/spec/lib/gitlab/redis/cross_slot_spec.rb
index e2f5fcf7694..ccf2de1f28f 100644
--- a/spec/lib/gitlab/redis/cross_slot_spec.rb
+++ b/spec/lib/gitlab/redis/cross_slot_spec.rb
@@ -38,7 +38,9 @@ RSpec.describe Gitlab::Redis::CrossSlot, feature_category: :redis do
let_it_be(:secondary_db) { 2 }
let_it_be(:primary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
let_it_be(:secondary_store) { create_redis_store(redis_store_class.params, db: secondary_db, serializer: nil) }
- let_it_be(:multistore) { Gitlab::Redis::MultiStore.new(primary_store, secondary_store, 'testing') }
+ let_it_be(:primary_pool) { ConnectionPool.new { primary_store } }
+ let_it_be(:secondary_pool) { ConnectionPool.new { secondary_store } }
+ let_it_be(:multistore) { Gitlab::Redis::MultiStore.new(primary_pool, secondary_pool, 'testing') }
before do
primary_store.set('a', 1)
@@ -52,9 +54,11 @@ RSpec.describe Gitlab::Redis::CrossSlot, feature_category: :redis do
expect(
Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
- described_class::Pipeline.new(multistore).pipelined do |p|
- p.get('a')
- p.set('b', 1)
+ multistore.with_borrowed_connection do
+ described_class::Pipeline.new(multistore).pipelined do |p|
+ p.get('a')
+ p.set('b', 1)
+ end
end
end
).to eq(%w[1 OK])
diff --git a/spec/lib/gitlab/redis/multi_store_spec.rb b/spec/lib/gitlab/redis/multi_store_spec.rb
index 6b1c0fb2e81..125bfec990c 100644
--- a/spec/lib/gitlab/redis/multi_store_spec.rb
+++ b/spec/lib/gitlab/redis/multi_store_spec.rb
@@ -10,11 +10,15 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
let_it_be(:secondary_db) { 2 }
let_it_be(:primary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
let_it_be(:secondary_store) { create_redis_store(redis_store_class.params, db: secondary_db, serializer: nil) }
+ let_it_be(:primary_pool) { ConnectionPool.new { primary_store } }
+ let_it_be(:secondary_pool) { ConnectionPool.new { secondary_store } }
let_it_be(:instance_name) { 'TestStore' }
- let_it_be(:multi_store) { described_class.new(primary_store, secondary_store, instance_name) }
+ let_it_be(:multi_store) { described_class.new(primary_pool, secondary_pool, instance_name) }
subject do
- multi_store.send(name, *args)
+ multi_store.with_borrowed_connection do
+ multi_store.send(name, *args)
+ end
end
before do
@@ -23,12 +27,12 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
after(:all) do
- primary_store.flushdb
- secondary_store.flushdb
+ primary_store.with(&:flushdb)
+ secondary_store.with(&:flushdb)
end
context 'when primary_store is nil' do
- let(:multi_store) { described_class.new(nil, secondary_store, instance_name) }
+ let(:multi_store) { described_class.new(nil, secondary_pool, instance_name) }
it 'fails with exception' do
expect { multi_store }.to raise_error(ArgumentError, /primary_store is required/)
@@ -36,7 +40,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
context 'when secondary_store is nil' do
- let(:multi_store) { described_class.new(primary_store, nil, instance_name) }
+ let(:multi_store) { described_class.new(primary_pool, nil, instance_name) }
it 'fails with exception' do
expect { multi_store }.to raise_error(ArgumentError, /secondary_store is required/)
@@ -45,7 +49,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when instance_name is nil' do
let(:instance_name) { nil }
- let(:multi_store) { described_class.new(primary_store, secondary_store, instance_name) }
+ let(:multi_store) { described_class.new(primary_pool, secondary_pool, instance_name) }
it 'fails with exception' do
expect { multi_store }.to raise_error(ArgumentError, /instance_name is required/)
@@ -58,7 +62,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
it 'fails with exception' do
- expect { described_class.new(primary_store, secondary_store, instance_name) }
+ expect { described_class.new(primary_pool, secondary_pool, instance_name) }
.to raise_error(ArgumentError, /invalid primary_store/)
end
end
@@ -69,7 +73,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
it 'fails with exception' do
- expect { described_class.new(primary_store, secondary_store, instance_name) }
+ expect { described_class.new(primary_pool, secondary_pool, instance_name) }
.to raise_error(ArgumentError, /invalid secondary_store/)
end
end
@@ -77,7 +81,9 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
# rubocop:disable RSpec/MultipleMemoizedHelpers
context 'with READ redis commands' do
subject do
- multi_store.send(name, *args, **kwargs)
+ multi_store.with_borrowed_connection do
+ multi_store.send(name, *args, **kwargs)
+ end
end
let(:args) { 'args' }
@@ -117,7 +123,9 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when reading from default instance is raising an exception' do
before do
- allow(multi_store.default_store).to receive(name).with(*expected_args).and_raise(StandardError)
+ multi_store.with_borrowed_connection do
+ allow(multi_store.default_store).to receive(name).with(*expected_args).and_raise(StandardError)
+ end
allow(Gitlab::ErrorTracking).to receive(:log_exception)
end
@@ -132,8 +140,10 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when the command is executed within pipelined block' do
subject do
- multi_store.pipelined do |pipeline|
- pipeline.send(name, *args, **kwargs)
+ multi_store.with_borrowed_connection do
+ multi_store.pipelined do |pipeline|
+ pipeline.send(name, *args, **kwargs)
+ end
end
end
@@ -153,7 +163,9 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when block provided' do
subject do
- multi_store.send(name, expected_args) { nil }
+ multi_store.with_borrowed_connection do
+ multi_store.send(name, expected_args) { nil }
+ end
end
it 'only default store to execute' do
@@ -167,7 +179,9 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'with both primary and secondary store using same redis instance' do
let(:primary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
let(:secondary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
- let(:multi_store) { described_class.new(primary_store, secondary_store, instance_name) }
+ let(:primary_pool) { ConnectionPool.new { primary_store } }
+ let(:secondary_pool) { ConnectionPool.new { secondary_store } }
+ let(:multi_store) { described_class.new(primary_pool, secondary_pool, instance_name) }
it_behaves_like 'secondary store'
end
@@ -219,8 +233,10 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
subject do
- multi_store.mget(values) do |v|
- multi_store.sadd(skey, v)
+ multi_store.with_borrowed_connection do
+ multi_store.mget(values) do |v|
+ multi_store.sadd(skey, v)
+ end
end
end
@@ -335,19 +351,27 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when executing on the default instance is raising an exception' do
before do
- allow(multi_store.default_store).to receive(name).with(*args).and_raise(StandardError)
+ multi_store.with_borrowed_connection do
+ allow(multi_store.default_store).to receive(name).with(*args).and_raise(StandardError)
+ end
+
allow(Gitlab::ErrorTracking).to receive(:log_exception)
end
it 'raises error and does not execute on non default instance', :aggregate_failures do
- expect(multi_store.non_default_store).not_to receive(name).with(*args)
+ multi_store.with_borrowed_connection do
+ expect(multi_store.non_default_store).not_to receive(name).with(*args)
+ end
+
expect { subject }.to raise_error(StandardError)
end
end
context 'when executing on the non default instance is raising an exception' do
before do
- allow(multi_store.non_default_store).to receive(name).with(*args).and_raise(StandardError)
+ multi_store.with_borrowed_connection do
+ allow(multi_store.non_default_store).to receive(name).with(*args).and_raise(StandardError)
+ end
allow(Gitlab::ErrorTracking).to receive(:log_exception)
end
@@ -355,7 +379,9 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
hash_including(:multi_store_error_message,
command_name: name, instance_name: instance_name))
- expect(multi_store.default_store).to receive(name).with(*args)
+ multi_store.with_borrowed_connection do
+ expect(multi_store.default_store).to receive(name).with(*args)
+ end
subject
end
@@ -363,8 +389,10 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when the command is executed within pipelined block' do
subject do
- multi_store.pipelined do |pipeline|
- pipeline.send(name, *args)
+ multi_store.with_borrowed_connection do
+ multi_store.pipelined do |pipeline|
+ pipeline.send(name, *args)
+ end
end
end
@@ -390,7 +418,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
it "#{store} redis store contains correct values", :aggregate_failures do
subject
- redis_store = multi_store.send(store)
+ redis_store = multi_store.with_borrowed_connection { multi_store.send(store) }
if expected_value.is_a?(Array)
# :smembers does not guarantee the order it will return the values
@@ -425,8 +453,10 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
subject do
- multi_store.send(name) do |redis|
- redis.set(key1, value1)
+ multi_store.with_borrowed_connection do
+ multi_store.send(name) do |redis|
+ redis.set(key1, value1)
+ end
end
end
@@ -444,11 +474,15 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when executing on the default instance is raising an exception' do
before do
- allow(multi_store.default_store).to receive(name).and_raise(StandardError)
+ multi_store.with_borrowed_connection do
+ allow(multi_store.default_store).to receive(name).and_raise(StandardError)
+ end
end
it 'raises error and does not execute on non default instance', :aggregate_failures do
- expect(multi_store.non_default_store).not_to receive(name)
+ multi_store.with_borrowed_connection do
+ expect(multi_store.non_default_store).not_to receive(name)
+ end
expect { subject }.to raise_error(StandardError)
end
@@ -456,14 +490,18 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when executing on the non default instance is raising an exception' do
before do
- allow(multi_store.non_default_store).to receive(name).and_raise(StandardError)
+ multi_store.with_borrowed_connection do
+ allow(multi_store.non_default_store).to receive(name).and_raise(StandardError)
+ end
allow(Gitlab::ErrorTracking).to receive(:log_exception)
end
it 'logs the exception and execute on default instance', :aggregate_failures do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
hash_including(:multi_store_error_message, command_name: name))
- expect(multi_store.default_store).to receive(name).and_call_original
+ multi_store.with_borrowed_connection do
+ expect(multi_store.default_store).to receive(name).and_call_original
+ end
subject
end
@@ -481,8 +519,10 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
subject do
- multi_store.send(name) do |redis|
- redis.get(key1)
+ multi_store.with_borrowed_connection do
+ multi_store.send(name) do |redis|
+ redis.get(key1)
+ end
end
end
@@ -501,8 +541,10 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when the value exists on both but differ' do
before do
- multi_store.non_default_store.set(key1, value1)
- multi_store.default_store.set(key1, value2)
+ multi_store.with_borrowed_connection do
+ multi_store.non_default_store.set(key1, value1)
+ multi_store.default_store.set(key1, value2)
+ end
end
it 'returns the value from the secondary store, logging an error' do
@@ -522,7 +564,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when the value does not exist on the non-default store but it does on the default' do
before do
- multi_store.default_store.set(key1, value2)
+ multi_store.with_borrowed_connection { multi_store.default_store.set(key1, value2) }
end
it 'returns the value from the secondary store, logging an error' do
@@ -584,18 +626,22 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
before do
allow(client).to receive(:instance_of?).with(::Redis::Cluster).and_return(true)
allow(pipeline).to receive(:pipelined)
- allow(multi_store.default_store).to receive(:_client).and_return(client)
+ multi_store.with_borrowed_connection do
+ allow(multi_store.default_store).to receive(:_client).and_return(client)
+ end
end
it 'calls cross-slot pipeline within multistore' do
if name == :pipelined
# we intentionally exclude `.and_call_original` since primary_store/secondary_store
# may not be running on a proper Redis Cluster.
- expect(Gitlab::Redis::CrossSlot::Pipeline).to receive(:new)
- .with(multi_store.default_store)
- .exactly(:once)
- .and_return(pipeline)
- expect(Gitlab::Redis::CrossSlot::Pipeline).not_to receive(:new).with(multi_store.non_default_store)
+ multi_store.with_borrowed_connection do
+ expect(Gitlab::Redis::CrossSlot::Pipeline).to receive(:new)
+ .with(multi_store.default_store)
+ .exactly(:once)
+ .and_return(pipeline)
+ expect(Gitlab::Redis::CrossSlot::Pipeline).not_to receive(:new).with(multi_store.non_default_store)
+ end
end
subject
@@ -637,7 +683,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
describe '#ping' do
- subject { multi_store.ping }
+ subject { multi_store.with_borrowed_connection { multi_store.ping } }
context 'when using both stores' do
before do
@@ -652,7 +698,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'with message' do
it 'returns the same message' do
- expect(multi_store.ping('hello world')).to eq('hello world')
+ expect(multi_store.with_borrowed_connection { multi_store.ping('hello world') }).to eq('hello world')
end
end
@@ -757,11 +803,13 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
describe '#blpop' do
let_it_be(:key) { "mylist" }
- subject { multi_store.blpop(key, timeout: 0.1) }
+ subject { multi_store.with_borrowed_connection { multi_store.blpop(key, timeout: 0.1) } }
shared_examples 'calls blpop on default_store' do
it 'calls blpop on default_store' do
- expect(multi_store.default_store).to receive(:blpop).with(key, { timeout: 0.1 })
+ multi_store.with_borrowed_connection do
+ expect(multi_store.default_store).to receive(:blpop).with(key, { timeout: 0.1 })
+ end
subject
end
@@ -769,7 +817,9 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
shared_examples 'does not call lpop on non_default_store' do
it 'does not call blpop on non_default_store' do
- expect(multi_store.non_default_store).not_to receive(:blpop)
+ multi_store.with_borrowed_connection do
+ expect(multi_store.non_default_store).not_to receive(:blpop)
+ end
subject
end
@@ -784,11 +834,13 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context "when an element exists in the default_store" do
before do
- multi_store.default_store.lpush(key, 'abc')
+ multi_store.with_borrowed_connection { multi_store.default_store.lpush(key, 'abc') }
end
it 'calls lpop on non_default_store' do
- expect(multi_store.non_default_store).to receive(:blpop).with(key, { timeout: 1 })
+ multi_store.with_borrowed_connection do
+ expect(multi_store.non_default_store).to receive(:blpop).with(key, { timeout: 1 })
+ end
subject
end
@@ -818,7 +870,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
allow(Gitlab::Metrics).to receive(:counter).and_return(counter)
end
- subject { multi_store.command }
+ subject { multi_store.with_borrowed_connection { multi_store.command } }
context 'when in test environment' do
it 'raises error' do
@@ -868,7 +920,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
context 'when command is in SKIP_LOG_METHOD_MISSING_FOR_COMMANDS' do
- subject { multi_store.info }
+ subject { multi_store.with_borrowed_connection { multi_store.info } }
it 'does not log MethodMissingError' do
expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
@@ -907,7 +959,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when the command is executed within pipelined block' do
subject do
- multi_store.pipelined(&:command)
+ multi_store.with_borrowed_connection { multi_store.pipelined(&:command) }
end
it 'is executed only 1 time on each instance', :aggregate_failures do
@@ -927,7 +979,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
describe '#to_s' do
- subject { multi_store.to_s }
+ subject { multi_store.with_borrowed_connection { multi_store.to_s } }
it 'returns same value as primary_store' do
is_expected.to eq(primary_store.to_s)
@@ -936,13 +988,17 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
describe '#is_a?' do
it 'returns true for ::Redis::Store' do
- expect(multi_store.is_a?(::Redis::Store)).to be true
+ expect(multi_store.with_borrowed_connection { multi_store.is_a?(::Redis::Store) }).to be true
end
end
describe '#use_primary_and_secondary_stores?' do
+ subject(:use_both) do
+ multi_store.with_borrowed_connection { multi_store.use_primary_and_secondary_stores? }
+ end
+
it 'multi store is enabled' do
- expect(multi_store.use_primary_and_secondary_stores?).to be true
+ expect(use_both).to be true
end
context 'with empty DB' do
@@ -951,7 +1007,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
it 'multi store is disabled' do
- expect(multi_store.use_primary_and_secondary_stores?).to be false
+ expect(use_both).to be false
end
end
@@ -961,14 +1017,18 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
it 'multi store is disabled' do
- expect(multi_store.use_primary_and_secondary_stores?).to be false
+ expect(use_both).to be false
end
end
end
describe '#use_primary_store_as_default?' do
+ subject(:primary_default) do
+ multi_store.with_borrowed_connection { multi_store.use_primary_store_as_default? }
+ end
+
it 'multi store is disabled' do
- expect(multi_store.use_primary_store_as_default?).to be true
+ expect(primary_default).to be true
end
context 'with empty DB' do
@@ -977,7 +1037,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
it 'multi store is disabled' do
- expect(multi_store.use_primary_and_secondary_stores?).to be false
+ expect(primary_default).to be false
end
end
@@ -987,7 +1047,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
it 'multi store is disabled' do
- expect(multi_store.use_primary_and_secondary_stores?).to be false
+ expect(primary_default).to be false
end
end
end
@@ -1003,7 +1063,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
it 'publishes to one or more stores' do
expect(stores).to all(receive(:publish))
- multi_store.publish(channel_name, message)
+ multi_store.with_borrowed_connection { multi_store.publish(channel_name, message) }
end
end
@@ -1012,14 +1072,14 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
expect(default_store).to receive(:subscribe)
expect(non_default_store).not_to receive(:subscribe)
- multi_store.subscribe(channel_name)
+ multi_store.with_borrowed_connection { multi_store.subscribe(channel_name) }
end
it 'unsubscribes to the default store' do
expect(default_store).to receive(:unsubscribe)
expect(non_default_store).not_to receive(:unsubscribe)
- multi_store.unsubscribe
+ multi_store.with_borrowed_connection { multi_store.unsubscribe }
end
end
@@ -1106,4 +1166,30 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
expect(duplicated_commands).to be_empty, "commands #{duplicated_commands} defined more than once"
end
end
+
+ describe '.with_borrowed_connection' do
+ it 'permits nested borrows' do
+ multi_store.with_borrowed_connection do
+ expect(Thread.current[multi_store.borrow_counter]).to eq(1)
+
+ multi_store.with_borrowed_connection do
+ multi_store.ping
+
+ expect(Thread.current[multi_store.borrow_counter]).to eq(2)
+ expect(multi_store.primary_store).not_to eq(nil)
+ expect(multi_store.secondary_store).not_to eq(nil)
+ end
+
+ multi_store.ping
+
+ expect(Thread.current[multi_store.borrow_counter]).to eq(1)
+ expect(multi_store.primary_store).not_to eq(nil)
+ expect(multi_store.secondary_store).not_to eq(nil)
+ end
+
+ expect(Thread.current[multi_store.borrow_counter]).to eq(0)
+ expect(multi_store.primary_store).to eq(nil)
+ expect(multi_store.secondary_store).to eq(nil)
+ end
+ end
end
diff --git a/spec/lib/gitlab/redis/sessions_spec.rb b/spec/lib/gitlab/redis/sessions_spec.rb
index b02864cb73d..874822e3e6a 100644
--- a/spec/lib/gitlab/redis/sessions_spec.rb
+++ b/spec/lib/gitlab/redis/sessions_spec.rb
@@ -5,27 +5,6 @@ require 'spec_helper'
RSpec.describe Gitlab::Redis::Sessions do
it_behaves_like "redis_new_instance_shared_examples", 'sessions', Gitlab::Redis::SharedState
- describe 'redis instance used in connection pool' do
- around do |example|
- clear_pool
- example.run
- ensure
- clear_pool
- end
-
- it 'uses ::Redis instance' do
- described_class.pool.with do |redis_instance|
- expect(redis_instance).to be_instance_of(::Redis)
- end
- end
-
- def clear_pool
- described_class.remove_instance_variable(:@pool)
- rescue NameError
- # raised if @pool was not set; ignore
- end
- end
-
describe '#store' do
subject(:store) { described_class.store(namespace: described_class::SESSION_NAMESPACE) }
diff --git a/spec/lib/gitlab/redis/shared_state_spec.rb b/spec/lib/gitlab/redis/shared_state_spec.rb
index a5247903d50..fe8f42cf8eb 100644
--- a/spec/lib/gitlab/redis/shared_state_spec.rb
+++ b/spec/lib/gitlab/redis/shared_state_spec.rb
@@ -6,4 +6,5 @@ RSpec.describe Gitlab::Redis::SharedState do
let(:instance_specific_config_file) { "config/redis.shared_state.yml" }
include_examples "redis_shared_examples"
+ include_examples "multi_store_wrapper_shared_examples"
end
diff --git a/spec/lib/gitlab/runtime_spec.rb b/spec/lib/gitlab/runtime_spec.rb
index 05bcdf2fc96..bd5914c9df8 100644
--- a/spec/lib/gitlab/runtime_spec.rb
+++ b/spec/lib/gitlab/runtime_spec.rb
@@ -127,10 +127,10 @@ RSpec.describe Gitlab::Runtime, feature_category: :cloud_connector do
before do
stub_const('::Sidekiq', sidekiq_type)
allow(sidekiq_type).to receive(:server?).and_return(true)
- allow(sidekiq_type).to receive(:[]).with(:concurrency).and_return(2)
+ allow(sidekiq_type).to receive(:default_configuration).and_return({ concurrency: 2 })
end
- it_behaves_like "valid runtime", :sidekiq, 5
+ it_behaves_like "valid runtime", :sidekiq, 2
it 'identifies as an application runtime' do
expect(described_class.application?).to be true
diff --git a/spec/lib/gitlab/security/scan_configuration_spec.rb b/spec/lib/gitlab/security/scan_configuration_spec.rb
index 9151db3c5ff..491be85584b 100644
--- a/spec/lib/gitlab/security/scan_configuration_spec.rb
+++ b/spec/lib/gitlab/security/scan_configuration_spec.rb
@@ -86,4 +86,105 @@ RSpec.describe ::Gitlab::Security::ScanConfiguration do
it { is_expected.to be_falsey }
end
end
+
+ describe '#security_features' do
+ subject { scan.security_features }
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:scan_type, :features_hash) do
+ :sast | { name: "Static Application Security Testing (SAST)",
+ short_name: "SAST",
+ description: "Analyze your source code for known vulnerabilities.",
+ help_path: "/help/user/application_security/sast/index",
+ configuration_help_path: "/help/user/application_security/sast/index#configuration",
+ type: "sast" }
+ :sast_iac | { name: "Infrastructure as Code (IaC) Scanning",
+ short_name: "SAST IaC",
+ description: "Analyze your infrastructure as code configuration files for known vulnerabilities.",
+ help_path: "/help/user/application_security/iac_scanning/index",
+ configuration_help_path: "/help/user/application_security/iac_scanning/index#configuration",
+ type: "sast_iac" }
+ :dast | {
+ badge: { text: "Available on demand",
+ tooltip_text: "On-demand scans run outside of the DevOps " \
+ "cycle and find vulnerabilities in your projects",
+ variant: "info" },
+ secondary: {
+ type: "dast_profiles",
+ name: "DAST profiles",
+ description: "Manage profiles for use by DAST scans.",
+ configuration_text: "Manage profiles"
+ },
+ name: "Dynamic Application Security Testing (DAST)",
+ short_name: "DAST",
+ description: "Analyze a deployed version of your web application for known " \
+ "vulnerabilities by examining it from the outside in. DAST works by simulating " \
+ "external attacks on your application while it is running.",
+ help_path: "/help/user/application_security/dast/index",
+ configuration_help_path: "/help/user/application_security/dast/index#enable-automatic-dast-run",
+ type: "dast",
+ anchor: "dast"
+ }
+ :dependency_scanning | { name: "Dependency Scanning",
+ description: "Analyze your dependencies for known vulnerabilities.",
+ help_path: "/help/user/application_security/dependency_scanning/index",
+ configuration_help_path: "/help/user/application_security/dependency_scanning/index#configuration",
+ type: "dependency_scanning",
+ anchor: "dependency-scanning" }
+ :container_scanning | { name: "Container Scanning",
+ description: "Check your Docker images for known vulnerabilities.",
+ help_path: "/help/user/application_security/container_scanning/index",
+ configuration_help_path: "/help/user/application_security/container_scanning/index#configuration",
+ type: "container_scanning" }
+ :secret_detection | { name: "Secret Detection",
+ description: "Analyze your source code and git history for secrets.",
+ help_path: "/help/user/application_security/secret_detection/index",
+ configuration_help_path: "/help/user/application_security/secret_detection/index#configuration",
+ type: "secret_detection" }
+ :api_fuzzing | { name: "API Fuzzing",
+ description: "Find bugs in your code with API fuzzing.",
+ help_path: "/help/user/application_security/api_fuzzing/index",
+ type: "api_fuzzing" }
+ :coverage_fuzzing | { name: "Coverage Fuzzing",
+ description: "Find bugs in your code with coverage-guided fuzzing.",
+ help_path: "/help/user/application_security/coverage_fuzzing/index",
+ configuration_help_path: \
+ "/help/user/application_security/coverage_fuzzing/index#enable-coverage-guided-fuzz-testing",
+ type: "coverage_fuzzing",
+ secondary: { type: "corpus_management",
+ name: "Corpus Management",
+ description: "Manage corpus files used as " \
+ "seed inputs with coverage-guided fuzzing.",
+ configuration_text: "Manage corpus" } }
+ :breach_and_attack_simulation | { anchor: "bas",
+ badge: { always_display: true,
+ text: "Incubating feature",
+ tooltip_text: "Breach and Attack Simulation is an incubating feature " \
+ "extending existing security " \
+ "testing by simulating adversary activity.",
+ variant: "info" },
+ description: "Simulate breach and attack scenarios against your running " \
+ "application by attempting to detect " \
+ "and exploit known vulnerabilities.",
+ name: "Breach and Attack Simulation (BAS)",
+ help_path: "/help/user/application_security/breach_and_attack_simulation/index",
+ secondary: { configuration_help_path: "/help/user/application_security/breach_and_attack_simulation/" \
+ "index#extend-dynamic-application-security-testing-dast",
+ description: "Enable incubating Breach and " \
+ "Attack Simulation focused features " \
+ "such as callback attacks in your DAST scans.",
+ name: "Out-of-Band Application Security Testing (OAST)" },
+ short_name: "BAS",
+ type: "breach_and_attack_simulation" }
+ :invalid | {}
+ end
+
+ with_them do
+ let(:type) { scan_type }
+ let(:configured) { true }
+
+ it { is_expected.to eq features_hash }
+ end
+ end
end
diff --git a/spec/lib/gitlab/sidekiq_config_spec.rb b/spec/lib/gitlab/sidekiq_config_spec.rb
index 5885151ecb5..f741fd8fae9 100644
--- a/spec/lib/gitlab/sidekiq_config_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config_spec.rb
@@ -186,7 +186,8 @@ RSpec.describe Gitlab::SidekiqConfig do
allow(::Gitlab::SidekiqConfig::WorkerRouter)
.to receive(:global).and_return(::Gitlab::SidekiqConfig::WorkerRouter.new(test_routes))
- allow(Sidekiq).to receive(:[]).with(:queues).and_return(%w[default background_migration])
+ allow(Sidekiq).to receive_message_chain(:default_configuration, :queues)
+ .and_return(%w[default background_migration])
mappings = described_class.current_worker_queue_mappings
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index 2e07fa100e8..b1a8a9f4da3 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -492,7 +492,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
'completed_at' => current_utc_time.to_i }
end
- subject { described_class.new }
+ subject { described_class.new(Sidekiq.logger) }
it 'update payload correctly' do
travel_to(current_utc_time) do
diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index 9cf9901007c..e1662903fa4 100644
--- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics, feature_category: :shar
describe '.initialize_process_metrics' do
it 'sets concurrency metrics' do
- expect(concurrency_metric).to receive(:set).with({}, Sidekiq[:concurrency].to_i)
+ expect(concurrency_metric).to receive(:set).with({}, Sidekiq.default_configuration[:concurrency].to_i)
described_class.initialize_process_metrics
end
@@ -122,7 +122,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics, feature_category: :shar
end
it 'sets the concurrency metric' do
- expect(concurrency_metric).to receive(:set).with({}, Sidekiq[:concurrency].to_i)
+ expect(concurrency_metric).to receive(:set).with({}, Sidekiq.default_configuration[:concurrency].to_i)
described_class.initialize_process_metrics
end
diff --git a/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
index bf379d9cb0d..96d4042b1e6 100644
--- a/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
+++ b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues,
let(:migrator) { described_class.new(mappings) }
let(:set_after) do
- Sidekiq.redis { |c| c.zrange(set_name, 0, -1, with_scores: true) }
+ Sidekiq.redis { |c| c.call("ZRANGE", set_name, 0, -1, "WITHSCORES") }
.map { |item, score| [Gitlab::Json.load(item), score] }
end
@@ -226,8 +226,9 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues,
let(:logger) { nil }
def list_queues
- queues = Sidekiq.redis do |conn|
- conn.scan_each(match: "queue:*").to_a
+ queues = []
+ Sidekiq.redis do |conn|
+ conn.scan("MATCH", "queue:*") { |key| queues << key }
end
queues.uniq.map { |queue| queue.split(':', 2).last }
end
diff --git a/spec/lib/gitlab/sidekiq_status_spec.rb b/spec/lib/gitlab/sidekiq_status_spec.rb
index 55e3885d257..ecdab2651a2 100644
--- a/spec/lib/gitlab/sidekiq_status_spec.rb
+++ b/spec/lib/gitlab/sidekiq_status_spec.rb
@@ -174,7 +174,7 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues,
context 'when both multi-store feature flags are off' do
def with_redis(&block)
- Sidekiq.redis(&block)
+ Gitlab::Redis::Queues.with(&block)
end
before do
diff --git a/spec/lib/gitlab/ssh/commit_spec.rb b/spec/lib/gitlab/ssh/commit_spec.rb
index 3b53ed9d1db..0d2621a16c0 100644
--- a/spec/lib/gitlab/ssh/commit_spec.rb
+++ b/spec/lib/gitlab/ssh/commit_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Gitlab::Ssh::Commit, feature_category: :source_code_management do
})
allow(Gitlab::Ssh::Signature).to receive(:new)
- .with(signature_text, signed_text, signer, commit.committer_email)
+ .with(signature_text, signed_text, signer, commit)
.and_return(verifier)
end
diff --git a/spec/lib/gitlab/ssh/signature_spec.rb b/spec/lib/gitlab/ssh/signature_spec.rb
index cb0b1ff049c..608fa9d71d0 100644
--- a/spec/lib/gitlab/ssh/signature_spec.rb
+++ b/spec/lib/gitlab/ssh/signature_spec.rb
@@ -8,7 +8,9 @@ RSpec.describe Gitlab::Ssh::Signature, feature_category: :source_code_management
let_it_be(:public_key_text) { 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHZ8NHEnCIpC4mnot+BRxv6L+fq+TnN1CgsRrHWLmfwb' }
let_it_be_with_reload(:user) { create(:user, email: committer_email) }
let_it_be_with_reload(:key) { create(:key, usage_type: :signing, key: public_key_text, user: user) }
+ let_it_be_with_reload(:project) { create(:project, :repository, :in_group) }
+ let(:commit) { project.commit }
let(:signed_text) { 'This message was signed by an ssh key' }
let(:signer) { :SIGNER_USER }
@@ -24,12 +26,16 @@ RSpec.describe Gitlab::Ssh::Signature, feature_category: :source_code_management
SIG
end
+ before do
+ allow(commit).to receive(:committer_email).and_return(committer_email)
+ end
+
subject(:signature) do
described_class.new(
signature_text,
signed_text,
signer,
- committer_email
+ commit
)
end
@@ -283,5 +289,30 @@ RSpec.describe Gitlab::Ssh::Signature, feature_category: :source_code_management
it 'returns the pubkey sha256 fingerprint' do
expect(signature.key_fingerprint).to eq('dw7gPSvYtkCBU+BbTolbbckUEX3sL6NsGIJTQ4PYEnM')
end
+
+ context 'when a signature has been created with a certificate' do
+ let(:signature_text) do
+ # ssh-keygen -Y sign -n git -f id_test-cert.pub message.txt
+ <<~SIG
+ -----BEGIN SSH SIGNATURE-----
+ U1NIU0lHAAAAAQAAAb0AAAAgc3NoLWVkMjU1MTktY2VydC12MDFAb3BlbnNzaC5jb20AAA
+ AgWbXlnjWbxTzOlRPcnSMlQQnnJTCsEv2y2ij5o7yVbcUAAAAgYAsBVqgfGrvGdSPjqY0H
+ t8yljpOS4VumZHnAh+wCvdEAAAAAAAAAAAAAAAEAAAARYWRtaW5AZXhhbXBsZS5jb20AAA
+ AAAAAAAGV9kqgAAAAAZX7kiwAAAAAAAACCAAAAFXBlcm1pdC1YMTEtZm9yd2FyZGluZwAA
+ AAAAAAAXcGVybWl0LWFnZW50LWZvcndhcmRpbmcAAAAAAAAAFnBlcm1pdC1wb3J0LWZvcn
+ dhcmRpbmcAAAAAAAAACnBlcm1pdC1wdHkAAAAAAAAADnBlcm1pdC11c2VyLXJjAAAAAAAA
+ AAAAAAAzAAAAC3NzaC1lZDI1NTE5AAAAIINudhvW7P4c36bBwlWTaxnCCOaSfMrUbXHcP7
+ 7zH6LyAAAAUwAAAAtzc2gtZWQyNTUxOQAAAEBp9J9YQhaz+tNIKtNpZe5sAxcqvMgcYlB+
+ fVaDsYNOj445Bz7TBoFqjrs95yaF6pwARK11IEQTcwtrihLGzGkNAAAAA2dpdAAAAAAAAA
+ AGc2hhNTEyAAAAUwAAAAtzc2gtZWQyNTUxOQAAAECfVh7AzwqRBMbnHBApCnMpu9Y1qpGM
+ sOSL1EeV3SIOlrThNTCerUpcaizcSY9L8WwP2TXlqw2Sq1BGM+PPSN0C
+ -----END SSH SIGNATURE-----
+ SIG
+ end
+
+ it 'returns public key fingerprint' do
+ expect(signature.key_fingerprint).to eq('3dNIFKfIAXZb/JL30KKv95cps+mZwVAuAYQhIWxAb+8')
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/themes_spec.rb b/spec/lib/gitlab/themes_spec.rb
index a41f7d927fe..a5d42945101 100644
--- a/spec/lib/gitlab/themes_spec.rb
+++ b/spec/lib/gitlab/themes_spec.rb
@@ -47,18 +47,4 @@ RSpec.describe Gitlab::Themes, lib: true do
expect(ids).not_to be_empty
end
end
-
- describe 'theme.css_filename' do
- described_class.each do |theme|
- next unless theme.css_filename
-
- context "for #{theme.name}" do
- it 'returns an existing CSS filename' do
- css_file_path = Rails.root.join('app/assets/stylesheets/themes', theme.css_filename + '.scss')
-
- expect(File.exist?(css_file_path)).to eq(true)
- end
- end
- end
- end
end
diff --git a/spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb b/spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb
deleted file mode 100644
index 5a5c7123971..00000000000
--- a/spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb
+++ /dev/null
@@ -1,140 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Tracking::Destinations::DatabaseEventsSnowplow, :do_not_stub_snowplow_by_default, feature_category: :application_instrumentation do
- let(:emitter) { SnowplowTracker::Emitter.new(endpoint: 'localhost', options: { buffer_size: 1 }) }
-
- let(:tracker) do
- SnowplowTracker::Tracker
- .new(
- emitters: [emitter],
- subject: SnowplowTracker::Subject.new,
- namespace: 'namespace',
- app_id: 'app_id'
- )
- end
-
- before do
- stub_application_setting(snowplow_app_id: '_abc123_')
- end
-
- around do |example|
- freeze_time { example.run }
- end
-
- context 'when snowplow is enabled' do
- before do
- allow(SnowplowTracker::AsyncEmitter)
- .to receive(:new)
- .with(endpoint: endpoint,
- options:
- {
- protocol: 'https',
- on_success: subject.method(:increment_successful_events_emissions),
- on_failure: subject.method(:failure_callback)
- }
- ).and_return(emitter)
-
- allow(SnowplowTracker::Tracker)
- .to receive(:new)
- .with(
- emitters: [emitter],
- subject: an_instance_of(SnowplowTracker::Subject),
- namespace: described_class::SNOWPLOW_NAMESPACE,
- app_id: '_abc123_'
- ).and_return(tracker)
- end
-
- describe '#event' do
- let(:endpoint) { 'localhost:9091' }
- let(:event_params) do
- {
- category: 'category',
- action: 'action',
- label: 'label',
- property: 'property',
- value: 1.5,
- context: nil,
- tstamp: (Time.now.to_f * 1000).to_i
- }
- end
-
- context 'when on gitlab.com environment' do
- let(:endpoint) { 'db-snowplow.trx.gitlab.net' }
-
- before do
- stub_application_setting(snowplow_database_collector_hostname: endpoint)
- end
-
- it 'sends event to tracker' do
- allow(Gitlab).to receive(:com?).and_return(true)
- allow(tracker).to receive(:track_struct_event).and_call_original
-
- subject.event('category', 'action', label: 'label', property: 'property', value: 1.5)
-
- expect(tracker).to have_received(:track_struct_event).with(event_params)
- end
- end
-
- it 'sends event to tracker' do
- allow(tracker).to receive(:track_struct_event).and_call_original
-
- subject.event('category', 'action', label: 'label', property: 'property', value: 1.5)
-
- expect(tracker).to have_received(:track_struct_event).with(event_params)
- end
-
- it 'increase total snowplow events counter' do
- counter = double
-
- expect(counter).to receive(:increment)
- expect(Gitlab::Metrics).to receive(:counter)
- .with(:gitlab_db_events_snowplow_events_total, 'Number of Snowplow events')
- .and_return(counter)
-
- subject.event('category', 'action', label: 'label', property: 'property', value: 1.5)
- end
- end
- end
-
- context 'for callbacks' do
- describe 'on success' do
- it 'increase gitlab_successful_snowplow_events_total counter' do
- counter = double
-
- expect(counter).to receive(:increment).with({}, 2)
- expect(Gitlab::Metrics).to receive(:counter)
- .with(
- :gitlab_db_events_snowplow_successful_events_total,
- 'Number of successful Snowplow events emissions').and_return(counter)
-
- subject.method(:increment_successful_events_emissions).call(2)
- end
- end
-
- describe 'on failure' do
- it 'increase gitlab_failed_snowplow_events_total counter and logs failures', :aggregate_failures do
- counter = double
- error_message = "Issue database_event_update failed to be reported to collector at localhost:9091"
- failures = [{ "e" => "se",
- "se_ca" => "Issue",
- "se_la" => "issues",
- "se_ac" => "database_event_update" }]
- allow(Gitlab::Metrics).to receive(:counter)
- .with(
- :gitlab_db_events_snowplow_successful_events_total,
- 'Number of successful Snowplow events emissions').and_call_original
-
- expect(Gitlab::AppLogger).to receive(:error).with(error_message)
- expect(counter).to receive(:increment).with({}, 1)
- expect(Gitlab::Metrics).to receive(:counter)
- .with(
- :gitlab_db_events_snowplow_failed_events_total,
- 'Number of failed Snowplow events emissions').and_return(counter)
-
- subject.method(:failure_callback).call(2, failures)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/tracking/event_definition_spec.rb b/spec/lib/gitlab/tracking/event_definition_spec.rb
index 5e41c691da8..7c5047dc0c6 100644
--- a/spec/lib/gitlab/tracking/event_definition_spec.rb
+++ b/spec/lib/gitlab/tracking/event_definition_spec.rb
@@ -31,10 +31,6 @@ RSpec.describe Gitlab::Tracking::EventDefinition do
File.write(path, content)
end
- it 'has all definitions valid' do
- expect { described_class.definitions }.not_to raise_error
- end
-
it 'has no duplicated actions in InternalEventTracking events', :aggregate_failures do
definitions_by_action = described_class.definitions
.select { |d| d.category == 'InternalEventTracking' }
@@ -85,10 +81,8 @@ RSpec.describe Gitlab::Tracking::EventDefinition do
attributes[attribute] = value
end
- it 'raise exception' do
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Tracking::InvalidEventError))
-
- described_class.new(path, attributes).validate!
+ it 'has validation errors' do
+ expect(described_class.new(path, attributes).validation_errors).not_to be_empty
end
end
end
diff --git a/spec/lib/gitlab/tracking/event_definition_validate_all_spec.rb b/spec/lib/gitlab/tracking/event_definition_validate_all_spec.rb
new file mode 100644
index 00000000000..cc2ccc511bb
--- /dev/null
+++ b/spec/lib/gitlab/tracking/event_definition_validate_all_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Tracking::EventDefinition, feature_category: :product_analytics_data_management do
+ it 'only has valid event definitions', :aggregate_failures do
+ described_class.definitions.each do |definition|
+ validation_errors = definition.validation_errors
+ expect(validation_errors).to be_empty, validation_errors.join
+ end
+ end
+end
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index 46213532071..6e4ab00d04f 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -165,38 +165,6 @@ RSpec.describe Gitlab::Tracking, feature_category: :application_instrumentation
end
end
- describe '.database_event' do
- context 'when the action is not passed in as a string' do
- it 'allows symbols' do
- expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
-
- described_class.database_event('category', :some_action)
- end
-
- it 'allows nil' do
- expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
-
- described_class.database_event('category', nil)
- end
-
- it 'allows integers' do
- expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
-
- described_class.database_event('category', 1)
- end
- end
-
- it_behaves_like 'rescued error raised by destination class' do
- let(:category) { 'Issue' }
- let(:action) { 'created' }
- let(:destination_class) { Gitlab::Tracking::Destinations::DatabaseEventsSnowplow }
-
- subject(:tracking_method) { described_class.database_event(category, action) }
- end
-
- it_behaves_like 'delegates to destination', Gitlab::Tracking::Destinations::DatabaseEventsSnowplow, :database_event
- end
-
describe '.event' do
context 'when the action is not passed in as a string' do
it 'allows symbols' do
diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb
index fb46d48c1bb..caa114cb00f 100644
--- a/spec/lib/gitlab/usage/metric_definition_spec.rb
+++ b/spec/lib/gitlab/usage/metric_definition_spec.rb
@@ -33,6 +33,14 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
described_class.instance_variable_set(:@definitions, nil)
end
+ def expect_validation_errors
+ expect(described_class.new(path, attributes).validation_errors).not_to be_empty
+ end
+
+ def expect_no_validation_errors
+ expect(described_class.new(path, attributes).validation_errors).to be_empty
+ end
+
def write_metric(metric, path, content)
path = File.join(metric, path)
dir = File.dirname(path)
@@ -40,12 +48,6 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
File.write(path, content)
end
- it 'has only valid definitions' do
- described_class.all.each do |definition|
- expect { definition.validate! }.not_to raise_error
- end
- end
-
describe '.instrumentation_class' do
context 'for non internal events' do
let(:attributes) { { key_path: 'metric1', instrumentation_class: 'RedisHLLMetric', data_source: 'redis_hll' } }
@@ -197,10 +199,8 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
attributes[attribute] = value
end
- it 'raise exception' do
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::MetricDefinition::InvalidError))
-
- described_class.new(path, attributes).validate!
+ it 'has validation errors' do
+ expect_validation_errors
end
end
@@ -210,9 +210,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
attributes[:status] = 'broken'
attributes.delete(:repair_issue_url)
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::MetricDefinition::InvalidError))
-
- described_class.new(path, attributes).validate!
+ expect_validation_errors
end
end
@@ -221,20 +219,16 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
attributes[:status] = 'removed'
end
- it 'raise dev exception when removed_by_url is not provided' do
+ it 'has validation errors when removed_by_url is not provided' do
attributes.delete(:removed_by_url)
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::MetricDefinition::InvalidError))
-
- described_class.new(path, attributes).validate!
+ expect_validation_errors
end
- it 'raises dev exception when milestone_removed is not provided' do
+ it 'has validation errors when milestone_removed is not provided' do
attributes.delete(:milestone_removed)
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::MetricDefinition::InvalidError))
-
- described_class.new(path, attributes).validate!
+ expect_validation_errors
end
end
@@ -251,18 +245,16 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
end
with_them do
- it 'raises dev exception when invalid' do
+ it 'has validation errors when invalid' do
attributes[:instrumentation_class] = instrumentation_class if instrumentation_class
attributes[:options] = options if options
attributes[:events] = events if events
if is_valid
- expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
+ expect_no_validation_errors
else
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::MetricDefinition::InvalidError))
+ expect_validation_errors
end
-
- described_class.new(path, attributes).validate!
end
end
end
@@ -294,12 +286,10 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
attributes[:options] = options
if is_valid
- expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
+ expect_no_validation_errors
else
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::MetricDefinition::InvalidError))
+ expect_validation_errors
end
-
- described_class.new(path, attributes).validate!
end
end
end
@@ -340,12 +330,10 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
attributes[:options] = options
if is_valid
- expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
+ expect_no_validation_errors
else
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::MetricDefinition::InvalidError))
+ expect_validation_errors
end
-
- described_class.new(path, attributes).validate!
end
end
end
diff --git a/spec/lib/gitlab/usage/metric_definition_validate_all_spec.rb b/spec/lib/gitlab/usage/metric_definition_validate_all_spec.rb
new file mode 100644
index 00000000000..d6255b54068
--- /dev/null
+++ b/spec/lib/gitlab/usage/metric_definition_validate_all_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :product_analytics_data_management do
+ # rubocop:disable Rails/FindEach -- The all method invoked here is unrelated to the ActiveRecord scope all
+ it 'only has valid metric definitions', :aggregate_failures do
+ described_class.all.each do |definition|
+ validation_errors = definition.validation_errors
+ expect(validation_errors).to be_empty, validation_errors.join
+ end
+ end
+ # rubocop:enable Rails/FindEach
+end
diff --git a/spec/lib/gitlab/usage/service_ping_report_spec.rb b/spec/lib/gitlab/usage/service_ping_report_spec.rb
index a848c286fa9..09866198639 100644
--- a/spec/lib/gitlab/usage/service_ping_report_spec.rb
+++ b/spec/lib/gitlab/usage/service_ping_report_spec.rb
@@ -168,11 +168,6 @@ RSpec.describe Gitlab::Usage::ServicePingReport, :use_clean_rails_memory_store_c
memoized_constatns += Gitlab::UsageData::EE_MEMOIZED_VALUES if defined? Gitlab::UsageData::EE_MEMOIZED_VALUES
memoized_constatns.each { |v| Gitlab::UsageData.clear_memoization(v) }
stub_database_flavor_check('Cloud SQL for PostgreSQL')
-
- # in_product_marketing_email metrics values are extracted from a single group by query
- # to check if the queries for individual metrics return the same value as group by when the value is non-zero
- create(:in_product_marketing_email, track: :create, series: 0, cta_clicked_at: Time.current)
- create(:in_product_marketing_email, track: :verify, series: 0)
end
let(:service_ping_payload) { described_class.for(output: :all_metrics_values) }
diff --git a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
index 2c9506dd498..05938fa08cd 100644
--- a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
@@ -50,18 +50,6 @@ RSpec.describe Gitlab::UsageDataCounters::CiTemplateUniqueCounter, feature_categ
end
end
- context 'with implicit includes', :snowplow do
- let(:config_source) { :auto_devops_source }
-
- described_class.all_included_templates('Auto-DevOps.gitlab-ci.yml').each do |template_name|
- context "for #{template_name}" do
- let(:template_path) { Gitlab::Template::GitlabCiYmlTemplate.find(template_name.delete_suffix('.gitlab-ci.yml')).full_name }
-
- include_examples 'tracks template'
- end
- end
- end
-
it 'expands short template names' do
expect do
described_class.track_unique_project_event(project: project, template: 'Dependency-Scanning.gitlab-ci.yml', config_source: :repository_source, user: user)
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index a7dc0b6a060..da8098bfee1 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -168,6 +168,54 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
end
end
end
+
+ describe "redis key overrides" do
+ let(:event_name) { "g_analytics_contribution" }
+
+ before do
+ allow(File).to receive(:read).and_call_original
+ allow(File).to receive(:read).with(described_class::KEY_OVERRIDES_PATH).and_return(overrides_file_content)
+ end
+
+ after do
+ described_class.clear_memoization(:key_overrides)
+ end
+
+ context "with an empty file" do
+ let(:overrides_file_content) { "{}" }
+
+ it "tracks the events using original Redis key" do
+ expected_key = "{hll_counters}_#{event_name}-2020-23"
+ expect(Gitlab::Redis::HLL).to receive(:add).with(hash_including(key: expected_key))
+
+ described_class.track_event(event_name, values: entity1)
+ end
+ end
+
+ context "with the file including overrides" do
+ let(:overrides_file_content) { "#{event_name}1: new_key2\n#{event_name}: new_key" }
+
+ context "when the event is included in overrides file" do
+ it "tracks the events using overridden Redis key" do
+ expected_key = "{hll_counters}_new_key-2020-23"
+ expect(Gitlab::Redis::HLL).to receive(:add).with(hash_including(key: expected_key))
+
+ described_class.track_event(:g_analytics_contribution, values: entity1)
+ end
+ end
+
+ context "when the event is not included in overrides file" do
+ let(:not_overridden_name) { "g_compliance_dashboard" }
+
+ it "tracks the events using original Redis key" do
+ expected_key = "{hll_counters}_#{not_overridden_name}-2020-23"
+ expect(Gitlab::Redis::HLL).to receive(:add).with(hash_including(key: expected_key))
+
+ described_class.track_event(not_overridden_name, values: entity1)
+ end
+ end
+ end
+ end
end
describe '.unique_events' do
@@ -236,6 +284,16 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
end
end
end
+
+ describe 'key overrides file' do
+ let(:key_overrides) { YAML.safe_load(File.read(described_class::KEY_OVERRIDES_PATH)) }
+
+ it "has a valid structure", :aggregate_failures do
+ expect(key_overrides).to be_a(Hash)
+
+ expect(key_overrides.keys + key_overrides.values).to all(be_a(String))
+ end
+ end
end
describe '.keys_for_aggregation' do
diff --git a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
index c3a718e669a..5c03ccb0d71 100644
--- a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
@@ -55,14 +55,6 @@ RSpec.describe Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter, :cl
let(:merge_request) { create(:merge_request) }
let(:target_project) { merge_request.target_project }
- it_behaves_like 'a tracked merge request unique event' do
- let(:action) { described_class::MR_USER_CREATE_ACTION }
- end
-
- it_behaves_like 'a tracked merge request unique event' do
- let(:action) { described_class::MR_CREATE_ACTION }
- end
-
it_behaves_like 'internal event tracking' do
let(:event) { described_class::MR_USER_CREATE_ACTION }
let(:project) { target_project }
diff --git a/spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb
index 1940442d2ad..903ae64cf33 100644
--- a/spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb
@@ -31,6 +31,14 @@ RSpec.describe Gitlab::UsageDataCounters::QuickActionActivityUniqueCounter, :cle
end
end
+ context 'when tracking react' do
+ let(:quickaction_name) { 'react' }
+
+ it_behaves_like 'a tracked quick action unique event' do
+ let(:action) { 'i_quickactions_award' }
+ end
+ end
+
context 'tracking assigns' do
let(:quickaction_name) { 'assign' }
diff --git a/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
index aaf509b6f81..0ab5dec8ecf 100644
--- a/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
@@ -44,4 +44,12 @@ RSpec.describe Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter, :clean_
it_behaves_like 'work item unique counter'
end
+
+ describe '.track_work_item_todo_marked_action' do
+ subject(:track_event) { described_class.track_work_item_mark_todo_action(author: user) }
+
+ let(:event_name) { described_class::WORK_ITEM_TODO_MARKED }
+
+ it_behaves_like 'work item unique counter'
+ end
end
diff --git a/spec/lib/integrations/google_cloud_platform/artifact_registry/client_spec.rb b/spec/lib/integrations/google_cloud_platform/artifact_registry/client_spec.rb
index 36fa350e46f..a258518953a 100644
--- a/spec/lib/integrations/google_cloud_platform/artifact_registry/client_spec.rb
+++ b/spec/lib/integrations/google_cloud_platform/artifact_registry/client_spec.rb
@@ -26,6 +26,10 @@ RSpec.describe Integrations::GoogleCloudPlatform::ArtifactRegistry::Client, feat
describe '#list_docker_images' do
let(:page_token) { nil }
+ let(:expected_url) do
+ "#{described_class::GLGO_BASE_URL}/gcp/ar/projects/#{gcp_project_id}/" \
+ "locations/#{gcp_location}/repositories/#{gcp_repository}/docker"
+ end
subject(:list) { client.list_docker_images(page_token: page_token) }
@@ -36,6 +40,13 @@ RSpec.describe Integrations::GoogleCloudPlatform::ArtifactRegistry::Client, feat
it 'calls glgo list docker images API endpoint' do
stub_list_docker_image(body: dummy_list_body)
expect(client).to receive(:encoded_jwt).with(wlif: gcp_wlif)
+ expect(::Gitlab::HTTP).to receive(:get).with(
+ expected_url,
+ headers: an_instance_of(Hash),
+ query: an_instance_of(Hash),
+ format: :plain,
+ extra_allowed_uris: [URI(described_class::GLGO_BASE_URL)]
+ ).and_call_original
expect(list).to include(images: an_instance_of(Array), next_page_token: an_instance_of(String))
end
diff --git a/spec/lib/sidebars/organizations/menus/scope_menu_spec.rb b/spec/lib/sidebars/organizations/menus/scope_menu_spec.rb
index 999889a72ee..974531a2dec 100644
--- a/spec/lib/sidebars/organizations/menus/scope_menu_spec.rb
+++ b/spec/lib/sidebars/organizations/menus/scope_menu_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe Sidebars::Organizations::Menus::ScopeMenu, feature_category: :navigation do
- let_it_be(:organization) { build(:organization) }
+ let_it_be(:organization_detail) { build(:organization_detail) }
+ let_it_be(:organization) { organization_detail.organization }
let_it_be(:user) { build(:user) }
let_it_be(:context) { Sidebars::Context.new(current_user: user, container: organization) }
@@ -11,7 +12,7 @@ RSpec.describe Sidebars::Organizations::Menus::ScopeMenu, feature_category: :nav
let(:menu) { described_class.new(context) }
let(:extra_attrs) do
{
- avatar: nil,
+ avatar: organization.avatar_url(size: 48),
entity_id: organization.id,
super_sidebar_parent: ::Sidebars::StaticMenu,
item_id: :organization_overview
diff --git a/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
index 0cf95391a26..85c109615c8 100644
--- a/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu, feature_catego
before do
stub_container_registry_config(enabled: registry_enabled)
stub_config(packages: { enabled: packages_enabled })
- stub_feature_flags(ml_experiment_tracking: false)
+ stub_feature_flags(ml_experiment_tracking: false, model_registry: false)
end
context 'when Packages Registry is visible' do
@@ -195,5 +195,32 @@ RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu, feature_catego
end
end
end
+
+ describe 'Model registry' do
+ let(:item_id) { :model_registry }
+
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?)
+ .with(user, :read_model_registry, project)
+ .and_return(model_registry_enabled)
+ end
+
+ context 'when user can read model registry' do
+ let(:model_registry_enabled) { true }
+
+ it 'shows the menu item' do
+ is_expected.not_to be_nil
+ end
+ end
+
+ context 'when user can not read model registry' do
+ let(:model_registry_enabled) { false }
+
+ it 'does not show the menu item' do
+ is_expected.to be_nil
+ end
+ end
+ end
end
end
diff --git a/spec/lib/sidebars/projects/super_sidebar_menus/deploy_menu_spec.rb b/spec/lib/sidebars/projects/super_sidebar_menus/deploy_menu_spec.rb
index 98d62948ac3..f23aaad71f9 100644
--- a/spec/lib/sidebars/projects/super_sidebar_menus/deploy_menu_spec.rb
+++ b/spec/lib/sidebars/projects/super_sidebar_menus/deploy_menu_spec.rb
@@ -18,7 +18,8 @@ RSpec.describe Sidebars::Projects::SuperSidebarMenus::DeployMenu, feature_catego
:releases,
:feature_flags,
:packages_registry,
- :container_registry
+ :container_registry,
+ :model_registry
])
end
end